[med-svn] [beast-mcmc] 01/05: Imported Upstream version 1.8.4+dfsg

Andreas Tille tille at debian.org
Tue Jun 28 08:03:39 UTC 2016


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository beast-mcmc.

commit b477f08dde06e2ccaddc63008b562a6de494a26a
Author: Andreas Tille <tille at debian.org>
Date:   Tue Jun 28 09:23:18 2016 +0200

    Imported Upstream version 1.8.4+dfsg
---
 .idea/uiDesigner.xml                               | 124 ++++
 README.md                                          |   2 +-
 build.xml                                          |   7 +-
 build_tempest.xml                                  |  18 +-
 release/common/README.txt                          |   4 +-
 release/common/VERSION HISTORY.txt                 |  28 +-
 release_tempest/Linux/icons/pathogen.png           | Bin 34709 -> 0 bytes
 release_tempest/Linux/scripts/pathogen             |  27 -
 release_tempest/common/README.txt                  |  71 ++-
 .../evomodel/branchmodel/EpochBranchModel.java     |  24 +-
 .../branchmodel/ExternalInternalBranchModel.java   |  23 +-
 .../lineagespecific/BeagleBranchLikelihood.java    |   2 +-
 .../LineageSpecificBranchModel.java                |  25 +-
 .../NewBeagleSequenceLikelihood.java               |   4 +-
 .../evomodel/parsers/ALSTreeLikelihoodParser.java  |   2 +
 .../AncestralStateTreeLikelihoodParser.java        |   7 +-
 .../BalancedBeagleTreeLikelihoodParser.java        |  21 +-
 .../parsers/BeagleTreeLikelihoodParser.java        |   9 +
 .../parsers/MarkovJumpsTreeLikelihoodParser.java   |   2 +
 .../OptimizedBeagleTreeLikelihoodParser.java       | 103 +++-
 .../sitemodel/EpochBranchSubstitutionModel.java    |  21 +-
 .../ExternalInternalBranchSubstitutionModel.java   |  19 +-
 .../evomodel/sitemodel/GammaSiteRateModel.java     |  41 +-
 .../substmodel/ComplexSubstitutionModel.java       |  21 +-
 .../evomodel/substmodel/GLMSubstitutionModel.java  |  15 +
 src/dr/app/beagle/evomodel/substmodel/GTR.java     |  33 +-
 .../beagle/evomodel/substmodel/GY94CodonModel.java |  35 +-
 src/dr/app/beagle/evomodel/substmodel/HKY.java     |  38 +-
 .../beagle/evomodel/substmodel/MG94CodonModel.java |  34 +-
 .../MarkovModulatedSubstitutionModel.java          |  20 +-
 .../substmodel/ProductChainSubstitutionModel.java  |  19 +-
 .../substmodel/SVSComplexSubstitutionModel.java    |   2 +-
 .../substmodel/SVSGeneralSubstitutionModel.java    |  31 +-
 src/dr/app/beagle/evomodel/substmodel/TN93.java    |  34 +-
 .../treelikelihood/ALSBeagleTreeLikelihood.java    |   3 +-
 .../AncestralStateBeagleTreeLikelihood.java        |  28 +-
 .../treelikelihood/BeagleTreeLikelihood.java       | 142 +++--
 .../MarkovJumpsBeagleTreeLikelihood.java           |  16 +-
 .../treelikelihood/OldBeagleTreeLikelihood.java    |  12 +-
 .../treelikelihood/SplitBySiteTraitLogger.java     |  22 +-
 .../evomodel/utilities/CompleteHistoryLogger.java  |  22 +-
 src/dr/app/beagle/mm/MMAlgorithm.java              | 125 ++++
 .../app/beagle/mm/MultiDimensionalScalingMM.java   | 269 ++++++++
 .../MassivelyParallelMDSImpl.java                  |  21 +-
 .../MultiDimensionalScalingCore.java               |   5 +
 .../MultiDimensionalScalingCoreImpl2.java          | 294 +++------
 .../MultiDimensionalScalingLikelihood.java         |  34 +-
 .../NativeMDSSingleton.java                        |  15 +-
 src/dr/app/beast/BeastMain.java                    |  29 +-
 src/dr/app/beast/BeastParser.java                  |  47 +-
 src/dr/app/beast/BeastVersion.java                 |  43 +-
 src/dr/app/beast/beast.properties                  |   4 +-
 src/dr/app/beast/development_parsers.properties    |  24 +-
 src/dr/app/beast/release_parsers.properties        |  11 +-
 src/dr/app/beauti/BeautiApp.java                   |  33 +-
 src/dr/app/beauti/BeautiFrame.java                 |   2 -
 .../clockModelsPanel/OldClockModelsPanel.java      | 681 ---------------------
 .../AncestralStatesComponentGenerator.java         |   2 +-
 .../continuous/ContinuousComponentOptions.java     |   3 +-
 .../discrete/DiscreteTraitsComponentGenerator.java |  12 +-
 .../components/dollo/DolloComponentGenerator.java  |   2 +-
 .../hpm/HierarchicalModelComponentGenerator.java   |   4 +-
 .../MarginalLikelihoodEstimationGenerator.java     |   4 +-
 src/dr/app/beauti/datapanel/DataPanel.java         |  12 +-
 src/dr/app/beauti/generator/BeastGenerator.java    |  56 +-
 ...odelGenerator.java => ClockModelGenerator.java} |  52 +-
 src/dr/app/beauti/generator/Generator.java         |  12 +-
 .../app/beauti/generator/InitialTreeGenerator.java |   2 +-
 src/dr/app/beauti/generator/LogGenerator.java      | 142 ++---
 .../app/beauti/generator/OperatorsGenerator.java   | 230 ++++---
 .../beauti/generator/ParameterPriorGenerator.java  |  10 +-
 .../app/beauti/generator/STARBEASTGenerator.java   |   4 +-
 .../generator/SubstitutionModelGenerator.java      |  49 +-
 .../beauti/generator/TreeLikelihoodGenerator.java  |   6 +-
 .../app/beauti/generator/TreePriorGenerator.java   |  12 +-
 src/dr/app/beauti/mcmcpanel/MCMCPanel.java         |   2 +
 .../app/beauti/operatorspanel/OperatorsPanel.java  |  33 +-
 .../app/beauti/options/AbstractPartitionData.java  |   2 +-
 src/dr/app/beauti/options/BeautiOptions.java       |  84 +--
 src/dr/app/beauti/options/ClockModelGroup.java     |  96 ---
 src/dr/app/beauti/options/ClockModelOptions.java   | 622 +------------------
 src/dr/app/beauti/options/ModelOptions.java        |  20 +-
 src/dr/app/beauti/options/Operator.java            | 111 +++-
 src/dr/app/beauti/options/Parameter.java           |  64 +-
 src/dr/app/beauti/options/PartitionClockModel.java | 243 +++-----
 .../options/PartitionClockModelTreeModelLink.java  |  78 +--
 src/dr/app/beauti/options/PartitionData.java       |   5 +-
 src/dr/app/beauti/options/PartitionOptions.java    | 294 ++++-----
 .../beauti/options/PartitionSubstitutionModel.java |  83 +--
 src/dr/app/beauti/options/PartitionTreeModel.java  |  54 +-
 src/dr/app/beauti/options/PartitionTreePrior.java  |   6 +-
 src/dr/app/beauti/options/TraitGuesser.java        |   2 +-
 src/dr/app/beauti/options/TreeModelOptions.java    |  22 +-
 .../priorsPanel/HierarchicalPriorDialog.java       |   6 +-
 .../app/beauti/priorsPanel/PriorOptionsPanel.java  |  12 +-
 .../app/beauti/priorsPanel/PriorSettingsPanel.java |   8 +-
 src/dr/app/beauti/priorsPanel/PriorsPanel.java     |  24 +-
 .../siteModelsPanel/PartitionModelPanel.java       |   2 +-
 src/dr/app/beauti/tipdatepanel/TipDatesPanel.java  |   5 +-
 .../beauti/treespanel/PartitionTreeModelPanel.java |  11 -
 src/dr/app/beauti/treespanel/TreesPanel.java       |   5 -
 src/dr/app/beauti/types/FixRateType.java           |   2 +-
 src/dr/app/beauti/types/OldClockType.java          |  95 ---
 src/dr/app/beauti/types/PriorType.java             |  18 +-
 src/dr/app/beauti/util/BEAUTiImporter.java         |   2 +-
 src/dr/app/beauti/util/BeautiTemplate.java         | 434 -------------
 src/dr/app/beauti/util/CommandLineBeauti.java      | 267 --------
 src/dr/app/beauti/util/XMLWriter.java              |  24 +-
 src/dr/app/bss/test/BeagleSeqSimTest.java          |   2 +-
 src/dr/app/tempest/TempEstApp.java                 |   5 +-
 src/dr/app/tempest/TempestFrame.java               |  19 +-
 src/dr/app/tools/AncestralSequenceAnnotator.java   |   1 +
 src/dr/app/tools/TreeAnnotatorDialog.java          |   2 +-
 .../alignment/AscertainedSitePatterns.java         |  19 +-
 src/dr/evolution/alignment/UncertainSiteList.java  | 101 +++
 .../coalescent/MultiEpochExponential.java          |  16 +-
 src/dr/evomodel/MSSD/CTMCScalePrior.java           |  37 +-
 src/dr/evomodel/antigenic/AntigenicDriftPrior.java |  45 +-
 src/dr/evomodel/antigenic/AntigenicLikelihood.java |  30 +-
 src/dr/evomodel/antigenic/AntigenicSplitPrior.java |  30 +-
 .../ContinuousAntigenicTraitLikelihood.java        |  24 +-
 .../DiscreteAntigenicTraitLikelihood.java          |  24 +-
 .../misc/obsolete/AGLikelihoodCluster.java         | 435 ++++++-------
 .../misc/obsolete/AGLikelihoodTreeCluster.java     |  43 +-
 .../branchratemodel/CompoundBranchRateModel.java   |   6 +-
 .../CountableBranchCategoryProvider.java           |  59 +-
 .../branchratemodel/DiscretizedBranchRates.java    |  37 +-
 .../evomodel/branchratemodel/LocalClockModel.java  |  31 +-
 .../branchratemodel/RandomLocalClockModel.java     |  34 +-
 .../coalescent/BayesianSkylineLikelihood.java      |  34 +-
 .../coalescent/CataclysmicDemographicModel.java    |  34 +-
 src/dr/evomodel/coalescent/ConstExpConstModel.java |  35 +-
 .../coalescent/ConstantExponentialModel.java       |  34 +-
 .../evomodel/coalescent/ConstantLogisticModel.java |  34 +-
 .../coalescent/ExpConstExpDemographicModel.java    |  34 +-
 src/dr/evomodel/coalescent/ExpansionModel.java     |  34 +-
 .../coalescent/ExponentialConstantModel.java       |  34 +-
 .../coalescent/ExponentialLogisticModel.java       |  34 +-
 .../GMRFMultilocusSkyrideLikelihood.java           | 402 ++++++++++--
 .../evomodel/coalescent/GMRFSkyrideLikelihood.java |  78 ++-
 .../evomodel/coalescent/LogisticGrowthModel.java   |  34 +-
 src/dr/evomodel/coalescent/SkylineLikelihood.java  |  36 +-
 .../coalescent/VariableDemographicModel.java       |  35 +-
 .../coalescent/VariableSkylineLikelihood.java      |  33 +-
 .../GMRFMultilocusSkyrideBlockUpdateOperator.java  |  64 +-
 .../AbstractMultivariateTraitLikelihood.java       | 258 ++++----
 .../BinaryLatentLiabilityLikelihood.java           |  17 +-
 .../continuous/ContinuousDiffusionStatistic.java   | 119 +++-
 .../FullyConjugateMultivariateTraitLikelihood.java | 264 ++++++--
 .../continuous/GaussianProcessFromTree.java        |  55 ++
 .../IntegratedMultivariateTraitLikelihood.java     | 229 +++----
 .../IntervalLatentLiabilityLikelihood.java         |  17 +-
 .../MultinomialLatentLiabilityLikelihood.java      |  17 +-
 ...NonPhylogeneticMultivariateTraitLikelihood.java |  17 +-
 .../OrderedLatentLiabilityLikelihood.java          |  17 +-
 .../SampledMultivariateTraitLikelihood.java        |   5 +-
 .../SemiConjugateMultivariateTraitLikelihood.java  |   4 +-
 .../evomodel/continuous/plink/PlinkImporter.java   |  15 +-
 .../epidemiology/casetocase/AbstractCase.java      |  13 +-
 .../epidemiology/casetocase/AbstractOutbreak.java  |  10 +-
 .../epidemiology/casetocase/BranchMapModel.java    |   2 +-
 .../CaseToCaseTransmissionLikelihood.java          |  49 +-
 .../casetocase/CaseToCaseTreeLikelihood.java       | 514 +++++-----------
 .../epidemiology/casetocase/CategoryOutbreak.java  |  90 ++-
 .../casetocase/PartitionedTreeLogger.java          |   5 +-
 .../casetocase/PartitionedTreeModel.java           | 493 +++++++++++++--
 .../casetocase/PartitionedTreeModelParser.java     |  21 +-
 .../casetocase/WithinCaseCoalescent.java           | 164 +----
 .../operators/InfectionBranchMovementOperator.java | 105 ++--
 .../operators/TransmissionSubtreeSlideA.java       |  15 +-
 .../operators/TransmissionSubtreeSlideB.java       |  11 +-
 .../operators/TransmissionWilsonBaldingA.java      |  21 +-
 .../operators/TransmissionWilsonBaldingB.java      |  10 +-
 .../operators/LatentFactorHamiltonianMC.java       | 102 +--
 .../evomodel/operators/LoadingsHamiltonianMC.java  | 155 +++++
 src/dr/evomodel/operators/SubtreeLeapOperator.java |   6 +-
 .../speciation/AlloppSpeciesNetworkModel.java      |  57 +-
 .../speciation/BirthDeathCollapseModel.java        |  45 +-
 .../speciation/BirthDeathGernhard08Model.java      |  52 +-
 .../speciation/BirthDeathSerialSamplingModel.java  |  32 +-
 .../evomodel/speciation/MulSpeciesTreeModel.java   | 210 ++++---
 .../speciation/PopsIOSpeciesTreeModel.java         |  13 +-
 src/dr/evomodel/speciation/SpeciesTreeModel.java   |  33 +-
 src/dr/evomodel/substmodel/Blosum62.java           |  22 +-
 src/dr/evomodel/substmodel/CPREV.java              |  20 +-
 .../substmodel/ComplexSubstitutionModel.java       |  24 +-
 src/dr/evomodel/substmodel/Dayhoff.java            |  22 +-
 src/dr/evomodel/substmodel/FLU.java                |  25 +-
 .../evomodel/substmodel/GLMSubstitutionModel.java  |  18 +-
 src/dr/evomodel/substmodel/GTR.java                |  34 +-
 src/dr/evomodel/substmodel/HKY.java                |  40 +-
 src/dr/evomodel/substmodel/JTT.java                |  21 +-
 src/dr/evomodel/substmodel/LG.java                 |  23 +-
 src/dr/evomodel/substmodel/MTREV.java              |  28 +-
 .../substmodel/MarginalVarianceStatistic.java      |   2 +-
 .../substmodel/SVSGeneralSubstitutionModel.java    |  28 +-
 src/dr/evomodel/substmodel/TN93.java               |  34 +-
 src/dr/evomodel/substmodel/WAG.java                |  22 +-
 .../tree}/ConditionalCladeProbability.java         |   4 +-
 .../tree/ProgressiveScalarTreeTransform.java       |   7 +
 src/dr/evomodel/tree/TerminalBranchStatistic.java  |  73 +++
 src/dr/evomodel/tree/TransformedTreeModel.java     |  30 +-
 src/dr/evomodel/tree/TreeModel.java                |  56 +-
 src/dr/evomodel/tree/TreeTransform.java            |  28 +-
 .../treelikelihood/HypermutantErrorModel.java      |  44 +-
 .../treelikelihood/SequenceErrorModel.java         |  46 +-
 src/dr/evomodelxml/MSSD/CTMCScalePriorParser.java  |   4 +-
 src/dr/evomodelxml/TreeWorkingPriorParsers.java    |   2 +-
 .../branchratemodel/LocalClockModelParser.java     |   4 +-
 .../coalescent/GMRFSkyrideLikelihoodParser.java    |  84 ++-
 .../operators/LoadingsHamiltonianMCParser.java     |  65 ++
 .../operators/SubtreeLeapOperatorParser.java       |  12 +-
 .../substmodel/MultivariateOUModelParser.java      |   2 +-
 .../evomodelxml/tree/MonophylyStatisticParser.java |   6 +-
 .../tree/TerminalBranchStatisticParser.java        |  73 +++
 src/dr/evoxml/NewickParser.java                    |  10 +-
 .../evoxml/UncertainAttributePatternsParser.java   | 230 +++++++
 .../distribution/GeneralizedLinearModel.java       |  16 +-
 .../distribution/MomentDistributionModel.java      |  27 +-
 .../MultivariateDistributionLikelihood.java        |   1 -
 .../MultivariateNormalDistributionModel.java       |  10 +
 .../distribution}/MultivariateOUModel.java         |   6 +-
 .../distribution/NormalDistributionModel.java      |  12 +-
 .../mcmc/MarginalLikelihoodEstimator.java          |  85 ++-
 src/dr/inference/mcmcmc/MCMCMC.java                |  15 -
 src/dr/inference/model/ComplementParameter.java    | 112 ++++
 src/dr/inference/model/CompoundLikelihood.java     |  23 +-
 .../ElementWiseMatrixMultiplicationParameter.java  |  69 +++
 .../model/EqualityConstrainedParameter.java        |  17 +-
 src/dr/inference/model/FastBUTMP.java              | 226 +++++++
 src/dr/inference/model/FastMatrixParameter.java    |   7 +
 src/dr/inference/model/FastTransposedBUTMP.java    | 125 ++++
 .../inference/model/IndianBuffetProcessPrior.java  |   4 +-
 src/dr/inference/model/LatentFactorModel.java      |  93 ++-
 src/dr/inference/model/MaskedParameter.java        |  34 +-
 .../inference/model/MatrixParameterInterface.java  |   2 +-
 src/dr/inference/model/MixtureModelLikelihood.java |  19 +-
 src/dr/inference/model/Parameter.java              |   4 +-
 .../inference/model/TransposedMatrixParameter.java |   5 +
 src/dr/inference/model/WeightedMixtureModel.java   |  18 +-
 .../operators/EllipticalSliceOperator.java         |   8 +-
 .../inference/operators/FactorGibbsOperator.java   |  13 +-
 .../operators/FactorIndependenceOperator.java      |  13 +-
 src/dr/inference/operators/FactorOperator.java     |  13 +-
 src/dr/inference/operators/GibbsOperator.java      |   4 +-
 .../LatentFactorModelPrecisionGibbsOperator.java   |   7 +-
 .../inference/operators/LoadingsGibbsOperator.java |   9 +-
 ...or.java => LoadingsGibbsTruncatedOperator.java} | 131 ++--
 .../operators/LoadingsIndependenceOperator.java    |   7 +-
 src/dr/inference/operators/MaskMoveOperator.java   | 182 ++++++
 .../operators/ModeIndependenceOperator.java        | 258 ++++++++
 .../operators/PathDependentOperator.java}          |  23 +-
 src/dr/inference/trace/CnCsPerSiteAnalysis.java    |  87 ++-
 .../inference/trace/CnCsToDnDsPerSiteAnalysis.java |  28 +-
 src/dr/inference/trace/DnDsPerSiteAnalysis.java    |  29 +-
 .../distribution/GammaDistributionModelParser.java |  44 +-
 .../distribution/GeneralizedLinearModelParser.java |  15 +-
 .../MomentDistributionModelParser.java             |   8 +-
 .../model/ComplementParameterParser.java           |  67 ++
 .../model/CompoundLikelihoodParser.java            |   6 +-
 .../ElementWiseMatrixMultiplicationParser.java     |  48 ++
 src/dr/inferencexml/model/FastBUTMPParser.java     | 108 ++++
 .../model/ImmutableParameterParser.java            |   5 +
 .../model/IndianBuffetProcessPriorParser.java      |   2 +-
 .../model/LatentFactorModelParser.java             |  23 +-
 .../operators/LoadingsGibbsOperatorParser.java     |  15 +
 .../operators/MaskMoveOperatorParser.java          | 129 ++++
 .../distributions/CompoundGaussianProcess.java     | 154 ++++-
 .../math/distributions/DirichletDistribution.java  |  33 +-
 .../GaussianProcessRandomGenerator.java            |   4 +
 .../MultivariateNormalDistribution.java            |   9 +
 src/dr/math/distributions/NormalDistribution.java  |   1 +
 src/dr/util/Citable.java                           |  14 +-
 src/dr/util/Citation.java                          | 168 ++++-
 src/dr/util/CommonCitations.java                   | 201 +++++-
 src/dr/util/MessageLogHandler.java                 |   8 +-
 .../RelativeRatesType.java => util/Pair.java}      |  37 +-
 src/dr/util/Transform.java                         |  30 +-
 src/dr/xml/AbstractXMLObjectParser.java            |   7 +-
 src/dr/xml/XMLObjectParser.java                    |   4 +-
 src/dr/xml/XMLParser.java                          | 126 ++--
 .../AncestralStateBeagleTreeLikelihoodTest.java    |   1 +
 src/test/dr/app/beagle/MarkovJumpsTest.java        |   1 +
 283 files changed, 9422 insertions(+), 6080 deletions(-)

diff --git a/.idea/uiDesigner.xml b/.idea/uiDesigner.xml
new file mode 100644
index 0000000..e96534f
--- /dev/null
+++ b/.idea/uiDesigner.xml
@@ -0,0 +1,124 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="Palette2">
+    <group name="Swing">
+      <item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
+        <default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
+      </item>
+      <item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
+        <default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
+      </item>
+      <item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
+        <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
+      </item>
+      <item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
+        <default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
+      </item>
+      <item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
+        <initial-values>
+          <property name="text" value="Button" />
+        </initial-values>
+      </item>
+      <item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
+        <initial-values>
+          <property name="text" value="RadioButton" />
+        </initial-values>
+      </item>
+      <item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
+        <initial-values>
+          <property name="text" value="CheckBox" />
+        </initial-values>
+      </item>
+      <item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
+        <default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
+        <initial-values>
+          <property name="text" value="Label" />
+        </initial-values>
+      </item>
+      <item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+          <preferred-size width="150" height="-1" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+          <preferred-size width="150" height="-1" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+          <preferred-size width="150" height="-1" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
+        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+          <preferred-size width="150" height="50" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
+        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+          <preferred-size width="150" height="50" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
+        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+          <preferred-size width="150" height="50" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
+        <default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
+      </item>
+      <item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+          <preferred-size width="150" height="50" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
+          <preferred-size width="150" height="50" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+          <preferred-size width="150" height="50" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
+          <preferred-size width="200" height="200" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
+        <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
+          <preferred-size width="200" height="200" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
+        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
+      </item>
+      <item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
+      </item>
+      <item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
+        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
+      </item>
+      <item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
+      </item>
+      <item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
+        <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
+          <preferred-size width="-1" height="20" />
+        </default-constraints>
+      </item>
+      <item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
+        <default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
+      </item>
+      <item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
+        <default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
+      </item>
+    </group>
+  </component>
+</project>
\ No newline at end of file
diff --git a/README.md b/README.md
index 9b1e7c9..ecfc6a7 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@ We also include a program that can convert NEXUS files into this format.
 
 ## Download BEAST
 
-[Download BEAST v1.8.2 binaries for Mac, Windows and UNIX/Linux](http://tree.bio.ed.ac.uk/software/beast/)
+[Download BEAST v1.8.3 binaries for Mac, Windows and UNIX/Linux](http://tree.bio.ed.ac.uk/software/beast/)
 
 [Older BEAST Downloads](https://code.google.com/p/beast-mcmc/downloads)
 
diff --git a/build.xml b/build.xml
index fa496f5..422efd1 100644
--- a/build.xml
+++ b/build.xml
@@ -290,11 +290,12 @@
         <echo message="JUnit test report finished."/>
     </target>
 
-    <property name="version" value="1.8.3"/>
     <!--
-    <property name="version" value="1.8.3pre20160105"/>
+    <property name="version" value="1.8.4"/>
     -->
-    <property name="version_number" value="1.8.3"/>
+    <property name="version" value="1.8.4pre20160615"/>
+
+    <property name="version_number" value="1.8.4"/>
     <property name="release_dir" value="release"/>
     <property name="BEAST_name" value="BEAST"/>
     <property name="BEAUti_name" value="BEAUti"/>
diff --git a/build_tempest.xml b/build_tempest.xml
index b31d664..a4de060 100644
--- a/build_tempest.xml
+++ b/build_tempest.xml
@@ -82,7 +82,7 @@
         <jar jarfile="${dist}/tempest.jar">
             <manifest>
                 <attribute name="Built-By" value="${user.name}"/>
-                <attribute name="Main-Class" value="dr.app.tempest.TempestApp"/>
+                <attribute name="Main-Class" value="dr.app.tempest.TempEstApp"/>
             </manifest>
             <fileset dir="${build}">
                 <include name="dr/app/tempest/**/*.class"/>
@@ -144,9 +144,13 @@
             classpath="${launch4j.dir}/launch4j.jar :${launch4j.dir}/lib/xstream.jar" />
 
         <copy file="${dist}/tempest.jar" todir="${Windows_package_dir}/lib"/>
+
+        <!--
         <copy todir="${Windows_package_dir}/lib">
             <fileset dir="${Windows_dir}/lib"/>
         </copy>
+        -->
+
         <copy file="${common_dir}/README.txt" todir="${Windows_package_dir}"/>
 
         <launch4j configFile="${Windows_dir}/tempest_launch4j.xml"
@@ -175,11 +179,15 @@
 
         <chmod dir="${Linux_package_dir}/bin" perm="755" includes="**/**"/>
 
-        <copy file="${Linux_dir}/icons/pathogen.png" todir="${Linux_package_dir}/images"/>
+        <copy file="${Linux_dir}/icons/tempest.png" todir="${Linux_package_dir}/images"/>
         <copy file="${dist}/tempest.jar" todir="${Linux_package_dir}/lib"/>
+
+        <!-- no libraries to copy...
         <copy todir="${Linux_package_dir}/lib">
             <fileset dir="${Linux_dir}/lib"/>
         </copy>
+        -->
+
         <copy file="${common_dir}/README.txt" todir="${Linux_package_dir}"/>
 
         <tar destfile="${release_dir}/${name}_v${version}.tgz" compression="gzip">
@@ -201,15 +209,15 @@
         <!-- create a jar bundle for the mac -->
         <jarbundler dir="${Mac_package_dir}"
                         name="${name} v${version}"
-                        mainclass="dr.app.tempest.TempestApp"
+                        mainclass="dr.app.tempest.TempEstApp"
                         stubfile="${packaging_tools}/mac/universalJavaApplicationStub"
                         icon="${Mac_dir}/icons/TempEst.icns"
                         jvmversion="1.6+"
                         arguments=""
                         vmoptions="-Xmx1024M"
                         version="${version}"
-                        copyright="${name} v${version}, Copyright 2006-2015, Andrew Rambaut"
-                        bundleid="pathogen" >
+                        copyright="${name} v${version}, Copyright 2006-2016, Andrew Rambaut"
+                        bundleid="tempest" >
             <javaproperty name="apple.laf.useScreenMenuBar" value="true"/>
             <jarfileset dir="${dist}">
                 <include name="**/tempest.jar"/>
diff --git a/release/common/README.txt b/release/common/README.txt
index 3a84394..76faf1c 100644
--- a/release/common/README.txt
+++ b/release/common/README.txt
@@ -1,4 +1,4 @@
-                    BEAST v1.8.3 2002-2016
+                    BEAST v1.8.4 2002-2016
         Bayesian Evolutionary Analysis Sampling Trees
                               by
       Alexei J. Drummond, Andrew Rambaut & Marc Suchard
@@ -16,7 +16,7 @@
                       msuchard at ucla.edu
 
 
-Last updated: a.rambaut at ed.ac.uk - 13th February 2016
+Last updated: a.rambaut at ed.ac.uk - 17th June 2016
 
 Contents:
 1) INTRODUCTION
diff --git a/release/common/VERSION HISTORY.txt b/release/common/VERSION HISTORY.txt
index b2214b3..c3be82f 100644
--- a/release/common/VERSION HISTORY.txt	
+++ b/release/common/VERSION HISTORY.txt	
@@ -17,11 +17,37 @@
 
 
 Version History
-Last updated: a.rambaut at ed.ac.uk - 13th February 2016
+Last updated: a.rambaut at ed.ac.uk - 17th June 2016
 All issues can be viewed at https://github.com/beast-dev/beast-mcmc/issues
 
 ================================================================================
 
+Version 1.8.4 released 17th June 2016
+
+    New Features:
+        New structured list of citations printed to screen before running.
+        Option ('-citation_file') to write citation list to file.
+        Option in BEAUti Priors panel to set parameters to 'Fixed Value'
+
+	Bug Fixes:
+	    Issue 808: Set autoOptimize to false in the randomWalkOperator on 
+	               Pagel's lambda
+	    Issue 806: SRD06 in BEAUTi selecting incorrect options.
+	    Issue 799: Relative rate parameters for partitions were not being 
+	               created. All partitions within a clock model have a 
+	               relative rate if their substitution models are unlinked.
+	    Issue 798: Calculating pairwise distances was slow for big data sets -
+	               removed this (but initial values no longer suggested based
+	               on data).
+	    Issue 797: Removed 'meanRate' from Priors tab in BEAUti.
+	    Issue 794: Running with empty command line causes error.
+	    Issue 792: Check to see that the same likelihood isn't included multiple
+                   times into the density.
+	    
+
+
+================================================================================
+
 Version 1.8.3 released 13th February 2016
 
     New Features:
diff --git a/release_tempest/Linux/icons/pathogen.png b/release_tempest/Linux/icons/pathogen.png
deleted file mode 100644
index 90d8e42..0000000
Binary files a/release_tempest/Linux/icons/pathogen.png and /dev/null differ
diff --git a/release_tempest/Linux/scripts/pathogen b/release_tempest/Linux/scripts/pathogen
deleted file mode 100755
index 31e64b7..0000000
--- a/release_tempest/Linux/scripts/pathogen
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/sh
-
-if [ -z "$PATHOGEN" ]; then
-	## resolve links - $0 may be a link to application
-	PRG="$0"
-
-	# need this for relative symlinks
-	while [ -h "$PRG" ] ; do
-	    ls=`ls -ld "$PRG"`
-	    link=`expr "$ls" : '.*-> \(.*\)$'`
-	    if expr "$link" : '/.*' > /dev/null; then
-		PRG="$link"
-	    else
-		PRG="`dirname "$PRG"`/$link"
-	    fi
-	done
-
-	# make it fully qualified
-	saveddir=`pwd`
-	PATHOGEN0=`dirname "$PRG"`
-	PATHOGEN=`cd "$PATHOGEN0" && pwd`
-	cd "$saveddir"
-fi
-
-PATHOGEN_LIB=$PATHOGEN/lib
-java -Xms64m -Xmx128m -jar $PATHOGEN_LIB/pathogen.jar $*
-
diff --git a/release_tempest/common/README.txt b/release_tempest/common/README.txt
index 7bed074..6191f13 100644
--- a/release_tempest/common/README.txt
+++ b/release_tempest/common/README.txt
@@ -1,4 +1,4 @@
-                      Path-O-Gen v1.2 2009
+                      TempEst v1.5 2016
                 Temporal Signal Investigation Tool
                               by
                        Andrew Rambaut
@@ -8,7 +8,7 @@
                       a.rambaut at ed.ac.uk
 
 UNIX / Mac OS X / Linux / Windows README 
-a.rambaut at ed.ac.uk - 27 November 2009
+a.rambaut at ed.ac.uk - 21 April 2016
 
 Contents:
 1) INTRODUCTION
@@ -16,26 +16,28 @@ Contents:
 3) ANALYSING TREES
 4) VERSION HISTORY
 5) SUPPORT & LINKS
-6) ACKNOWLEDGMENTS
+6) CITATION
+7) ACKNOWLEDGMENTS
 
 ___________________________________________________________________________
 1) INTRODUCTION
 
-Path-O-Gen is a tool for investigating the temporal signal and 'clocklikeness' of molecular phylogenies. It can read and analyse contemporaneous trees (where all sequences have been collected at the same time) and dated-tip trees (where sequences have been collected at different dates). It is designed for analysing trees that have not been inferred under a molecular-clock assumption to see how valid this assumption may be. It can also root the tree at the position that is likely to be th [...]
+TempEst is a tool for investigating the temporal signal and 'clocklikeness' of molecular phylogenies. It can read and analyse contemporaneous trees (where all sequences have been collected at the same time) and dated-tip trees (where sequences have been collected at different dates). It is designed for analysing trees that have not been inferred under a molecular-clock assumption to see how valid this assumption may be. It can also root the tree at the position that is likely to be the m [...]
 
+This software was formerly known as "Path-O-Gen".
 ___________________________________________________________________________
-2) INSTALLING AND RUNNING PATH-O-GEN
+2) INSTALLING AND RUNNING TEMPEST
 
-Mac OS X: To install Path-O-Gen, simply drag the program file to where you normally put applications. Then double click to run.
+Mac OS X: To install TempEst, simply drag the program file to where you normally put applications. Then double click to run.
 
-Windows: To install Path-O-Gen, simply drag the program file to where you normally put applications. Then double click to run.
+Windows: To install TempEst, simply drag the program file to where you normally put applications. Then double click to run.
 
-Linux / UNIX: Copy or move the folder to where you normally put applications and then double click the "pathogen.jar" file (in the lib/ directory) to run or type "./pathogen" at the command-line. 
+Linux / UNIX: Copy or move the folder to where you normally put applications and then double click the "tempest.jar" file (in the lib/ directory) to run or type "./tempest" at the command-line. 
 
 ___________________________________________________________________________
 3) ANALYSING TREES
 
-Once Path-O-Gen is running it will ask for a tree file to load. This should be in NEXUS format and should have been constructed using a phylogenetic method that does not assume a molecular clock (such as Neighbor-Joining or Maximum Likelihood or Bayesian methods with the molecular clock option off. It is also important that the trees contain branch lengths as genetic distance (substitutions per site). 
+Once TempEst is running it will ask for a tree file to load. This should be in NEXUS format and should have been constructed using a phylogenetic method that does not assume a molecular clock (such as Neighbor-Joining or Maximum Likelihood or Bayesian methods with the molecular clock option off. It is also important that the trees contain branch lengths as genetic distance (substitutions per site). 
 
 When the tree is loaded you will see a table containg all the taxa (sequence labels). If the sequences are contemporaneous (i.e., not sampled through time) then you can leave this as it is. If the sequences have dates associated with them you can enter them into this table. If the taxon labels have the dates encoded in them, you can use the "Guess Dates" button to try and extract them. The final thing you need to set here is whether the dates are "Since some time in the past" - which the [...]
 
@@ -48,16 +50,31 @@ Finally, you can export the tree (rooted as displayed) using the "Export Tree...
 ___________________________________________________________________________
 4) VERSION HISTORY
 
+---Version 1.5 22 April 2015---
+
+* New name, TempEst (formerly Path-O-Gen)
+* Re-organization of user-interface
+
+---Version 1.4 23 April 2013---
+
+* Improved the date parsing. Will now read dates with variable precision.
+* Can specify date precision in table. These are shown as bars in the plot
+* Minor other tweaks
+
+---Version 1.3 3 November 2010---
+
+* Improved the root optimization to use linear regression - should be more robust
+* Can import Newick as well as NEXUS tree files
+* A new feature: MRCA trace which shows a trace from selected tips to the interpolated position of their MRCA. This can help find anomalous tips (perhaps indicating contamination or mislabelling).
+
 ---Version 1.2 27 November 2009---
 
 * Added the ability to select points in the plots and the equivalent taxa will be highlighted in the tree (and vice-versa).
-
 * Added a residual plot for time-sampled trees. This shows the distribution of residual from the regression line to look for outliers.
 
 ---Version 1.1 23 February 2009---
 
 * Added a more flexible tree viewing component (based on FigTree)
-
 * Tips of a dated tip tree are now shown coloured by their residual from the root to tip regression line (blue: above, red: below, black on the regression).
 
 ---Version 1.0 12 February 2009---
@@ -72,21 +89,17 @@ Please email me to discuss any problems:
 a.rambaut at ed.ac.uk
 
 ___________________________________________________________________________
-6) ACKNOWLEDGMENTS
-
-Thanks to the following for supplying code or assisting with the creation or testing of BEAST and its associated software:
-
-	Alexander Alekseyenko
-	Erik Bloomquist
-	Roald Forsberg
-	Joseph Heled
-	Simon Ho
-	Philippe Lemey
-	Gerton Lunter
-	Sidney Markowitz
-	Tulio de Oliveira
-	Oliver Pybus
-	Beth Shapiro
-	Korbinian Strimmer
-	Marc Suchard
-	+ numerous other users who have kindly helped make BEAST better.
+6) CITATION
+
+Rambaut, Lam, de Carvalho & Pybus (2016) Exploring the temporal structure of heterochronous sequences using TempEst. Virus Evolution, 2</b>: vew007 
+DOI: http://dx.doi.org/10.1093/ve/vew007
+
+___________________________________________________________________________
+7) ACKNOWLEDGMENTS
+
+Tommy T. Lam, Luiz Max Carvalho & Oliver Pybus
+Co-authors of TempEst Manuscript http://dx.doi.org/10.1093/ve/vew007
+
+Alexei Drummond, Marc Suchard, Philippe Lemey, and Simon Frost 
+made significant contributions to development of Path-O-Gen/TempEst.
+
diff --git a/src/dr/app/beagle/evomodel/branchmodel/EpochBranchModel.java b/src/dr/app/beagle/evomodel/branchmodel/EpochBranchModel.java
index 2dcd8e5..9270308 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/EpochBranchModel.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/EpochBranchModel.java
@@ -37,8 +37,7 @@ import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 
 /**
  * @author Filip Bielejec
@@ -171,16 +170,21 @@ public class EpochBranchModel extends AbstractModel implements BranchModel, Cita
     protected void acceptState() {
     }// END: acceptState
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Epoch Branch model";
+    }
 
-    /**
-     * @return a list of citations associated with this object
-     */
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(new Author[]{new Author("F", "Bielejec"),
-                new Author("P", "Lemey"), new Author("G", "Baele"), new Author("A", "Rambaut"),
-                new Author("MA", "Suchard")}, Citation.Status.IN_PREPARATION));
-        return citations;
+        return Arrays.asList(
+                new Citation(new Author[]{new Author("F", "Bielejec"),
+                        new Author("P", "Lemey"), new Author("G", "Baele"), new Author("A", "Rambaut"),
+                        new Author("MA", "Suchard")}, Citation.Status.IN_PREPARATION));
     }// END: getCitations
 
     private final TreeModel tree;
diff --git a/src/dr/app/beagle/evomodel/branchmodel/ExternalInternalBranchModel.java b/src/dr/app/beagle/evomodel/branchmodel/ExternalInternalBranchModel.java
index 9b88ca1..842c491 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/ExternalInternalBranchModel.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/ExternalInternalBranchModel.java
@@ -37,7 +37,9 @@ import dr.util.Citable;
 import dr.util.Citation;
 
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
 
 /**
  * @author Andrew Rambaut
@@ -45,7 +47,7 @@ import java.util.List;
  * @author Marc A. Suchard
  * @version $Id$
  */
-public class ExternalInternalBranchModel extends AbstractModel implements BranchModel, Citable {
+public class ExternalInternalBranchModel extends AbstractModel implements BranchModel {
     public ExternalInternalBranchModel(TreeModel tree, SubstitutionModel externalSubstModel, SubstitutionModel internalSubstModel) {
         super("ExternalInternalBranchModel");
 
@@ -113,25 +115,6 @@ public class ExternalInternalBranchModel extends AbstractModel implements Branch
     protected void acceptState() {
     }
 
-    /**
-     * @return a list of citations associated with this object
-     */
-    public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                new Citation(
-                        new Author[]{
-                                new Author("F", "Bielejec"),
-                                new Author("P", "Lemey"),
-                                new Author("A", "Rambaut"),
-                                new Author("MA", "Suchard")
-                        },
-                        Citation.Status.IN_PREPARATION
-                )
-        );
-        return citations;
-    }
-
     private final TreeModel tree;
     private final SubstitutionModel externalSubstModel;
     private final SubstitutionModel internalSubstModel;
diff --git a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BeagleBranchLikelihood.java b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BeagleBranchLikelihood.java
index 7aa2f1c..c6abd56 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BeagleBranchLikelihood.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/BeagleBranchLikelihood.java
@@ -512,7 +512,7 @@ public class BeagleBranchLikelihood implements Likelihood {
 			BeagleTreeLikelihood btl = new BeagleTreeLikelihood(alignment,
 					treeModel, homogeneousBranchModel, siteRateModel,
 					branchRateModel, null, false,
-					PartialsRescalingScheme.DEFAULT);
+					PartialsRescalingScheme.DEFAULT, true);
 
 			System.out.println("BTL(homogeneous) = " + btl.getLogLikelihood());
 
diff --git a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/LineageSpecificBranchModel.java b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/LineageSpecificBranchModel.java
index ec917a5..b349c2e 100644
--- a/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/LineageSpecificBranchModel.java
+++ b/src/dr/app/beagle/evomodel/branchmodel/lineagespecific/LineageSpecificBranchModel.java
@@ -25,10 +25,7 @@
 
 package dr.app.beagle.evomodel.branchmodel.lineagespecific;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import dr.app.beagle.evomodel.branchmodel.BranchModel;
 import dr.app.beagle.evomodel.branchmodel.HomogeneousBranchModel;
@@ -60,6 +57,7 @@ import dr.math.MathUtils;
 import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
+import dr.util.CommonCitations;
 
 /**
  * @author Filip Bielejec
@@ -289,7 +287,7 @@ public class LineageSpecificBranchModel extends AbstractModel implements BranchM
                     branchRateModel, //
                     null, //
                     false, //
-                    PartialsRescalingScheme.DEFAULT);
+                    PartialsRescalingScheme.DEFAULT, true);
 
             BeagleTreeLikelihood gold = new BeagleTreeLikelihood(convert, //
                     tree, //
@@ -298,7 +296,7 @@ public class LineageSpecificBranchModel extends AbstractModel implements BranchM
                     branchRateModel, //
                     null, //
                     false, //
-                    PartialsRescalingScheme.DEFAULT);
+                    PartialsRescalingScheme.DEFAULT, true);
             
             System.out.println("likelihood (gold) = " + gold.getLogLikelihood());
             System.out.println("likelihood = " + like.getLogLikelihood());
@@ -310,13 +308,20 @@ public class LineageSpecificBranchModel extends AbstractModel implements BranchM
     }// END: main
 
 	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.MOLECULAR_CLOCK;
+	}
+
+	@Override
+	public String getDescription() {
+		return "Lineage Specific Branch model";
+	}
+
 	public List<Citation> getCitations() {
-		
-		List<Citation> citations = new ArrayList<Citation>();
-		citations.add(new Citation(new Author[] { new Author("F", "Bielejec"),
+		return Collections.singletonList(
+				new Citation(new Author[] { new Author("F", "Bielejec"),
 				new Author("P", "Lemey"), new Author("G", "Baele"), new Author("A", "Rambaut"),
 				new Author("MA", "Suchard") }, Citation.Status.IN_PREPARATION));
-		return citations;
 	}// END: getCitations
 
 }// END: class
diff --git a/src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleSequenceLikelihood.java b/src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleSequenceLikelihood.java
index 4259e21..66876d3 100644
--- a/src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleSequenceLikelihood.java
+++ b/src/dr/app/beagle/evomodel/newtreelikelihood/NewBeagleSequenceLikelihood.java
@@ -1106,11 +1106,11 @@ public class NewBeagleSequenceLikelihood extends NewAbstractSequenceLikelihood i
             );
             Alignment alignment = simulator.simulate(false, false);
 
-            BeagleTreeLikelihood nbtl = new BeagleTreeLikelihood(alignment, treeModel, homogeneousBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT);
+            BeagleTreeLikelihood nbtl = new BeagleTreeLikelihood(alignment, treeModel, homogeneousBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT, true);
 
             System.out.println("nBTL(homogeneous) = " + nbtl.getLogLikelihood());
 
-            nbtl = new BeagleTreeLikelihood(alignment, treeModel, epochBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT);
+            nbtl = new BeagleTreeLikelihood(alignment, treeModel, epochBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT, true);
 
             System.out.println("nBTL(epoch) = " + nbtl.getLogLikelihood());
 
diff --git a/src/dr/app/beagle/evomodel/parsers/ALSTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/ALSTreeLikelihoodParser.java
index ce3f51d..96b15e3 100644
--- a/src/dr/app/beagle/evomodel/parsers/ALSTreeLikelihoodParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/ALSTreeLikelihoodParser.java
@@ -77,6 +77,7 @@ public class ALSTreeLikelihoodParser extends BeagleTreeLikelihoodParser {
             TipStatesModel tipStatesModel, //
             boolean useAmbiguities, //
             PartialsRescalingScheme scalingScheme, //
+            boolean delayScaling,
             Map<Set<String>, //
                     Parameter> partialsRestrictions, //
             XMLObject xo //
@@ -130,6 +131,7 @@ public class ALSTreeLikelihoodParser extends BeagleTreeLikelihoodParser {
                 tipStatesModel,
                 useAmbiguities,
                 scalingScheme,
+                delayScaling,
                 partialsRestrictions
         );
     }
diff --git a/src/dr/app/beagle/evomodel/parsers/AncestralStateTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/AncestralStateTreeLikelihoodParser.java
index ded410e..19ebead 100644
--- a/src/dr/app/beagle/evomodel/parsers/AncestralStateTreeLikelihoodParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/AncestralStateTreeLikelihoodParser.java
@@ -72,6 +72,7 @@ public class AncestralStateTreeLikelihoodParser extends BeagleTreeLikelihoodPars
             TipStatesModel tipStatesModel, //
             boolean useAmbiguities, //
             PartialsRescalingScheme scalingScheme, //
+            boolean delayScaling,
             Map<Set<String>, //
                     Parameter> partialsRestrictions, //
             XMLObject xo //
@@ -101,6 +102,7 @@ public class AncestralStateTreeLikelihoodParser extends BeagleTreeLikelihoodPars
                 tipStatesModel,
                 useAmbiguities,
                 scalingScheme,
+                delayScaling,
                 partialsRestrictions,
                 dataType,
                 tag,
@@ -111,7 +113,7 @@ public class AncestralStateTreeLikelihoodParser extends BeagleTreeLikelihoodPars
 
     public XMLSyntaxRule[] getSyntaxRules() {
         return new XMLSyntaxRule[] {
-                AttributeRule.newBooleanRule(OldTreeLikelihoodParser.USE_AMBIGUITIES, true),
+                AttributeRule.newBooleanRule(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, true),
                 AttributeRule.newStringRule(RECONSTRUCTION_TAG_NAME, true),
                 new ElementRule(PatternList.class),
                 new ElementRule(TreeModel.class),
@@ -120,7 +122,8 @@ public class AncestralStateTreeLikelihoodParser extends BeagleTreeLikelihoodPars
                 new ElementRule(BranchRateModel.class, true),
                 new ElementRule(TipStatesModel.class, true),
                 new ElementRule(SubstitutionModel.class, true),
-                AttributeRule.newStringRule(OldTreeLikelihoodParser.SCALING_SCHEME,true),
+                AttributeRule.newStringRule(BeagleTreeLikelihoodParser.SCALING_SCHEME,true),
+                AttributeRule.newStringRule(BeagleTreeLikelihoodParser.DELAY_SCALING,true),
                 new ElementRule(PARTIALS_RESTRICTION, new XMLSyntaxRule[] {
                         new ElementRule(TaxonList.class),
                         new ElementRule(Parameter.class),
diff --git a/src/dr/app/beagle/evomodel/parsers/BalancedBeagleTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/BalancedBeagleTreeLikelihoodParser.java
index 06f1b4f..5ff18c6 100644
--- a/src/dr/app/beagle/evomodel/parsers/BalancedBeagleTreeLikelihoodParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/BalancedBeagleTreeLikelihoodParser.java
@@ -59,9 +59,7 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
     //public static final String BEAGLE_INSTANCE_COUNT = "beagle.instance.count";
 
     public static final String TREE_LIKELIHOOD = "balancedTreeLikelihood";
-    public static final String USE_AMBIGUITIES = "useAmbiguities";
     public static final String INSTANCE_COUNT = "instanceCount";
-    public static final String SCALING_SCHEME = "scalingScheme";
     public static final String PARTIALS_RESTRICTION = "partialsRestriction";
     
     public final int TEST_RUNS = 100;
@@ -77,6 +75,7 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
                                                         BranchRateModel branchRateModel,
                                                         TipStatesModel tipStatesModel,
                                                         boolean useAmbiguities, PartialsRescalingScheme scalingScheme,
+                                                        boolean delayScaling,
                                                         Map<Set<String>, Parameter> partialsRestrictions,
                                                         XMLObject xo) throws XMLParseException {
         return new BeagleTreeLikelihood(
@@ -88,13 +87,14 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
                 tipStatesModel,
                 useAmbiguities,
                 scalingScheme,
+                delayScaling,
                 partialsRestrictions
         );
     }
 
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
-        boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
+        boolean useAmbiguities = xo.getAttribute(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, false);
         /*int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
         if (instanceCount < 1) {
             instanceCount = 1;
@@ -131,14 +131,15 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
 //        }
 
         PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
-        if (xo.hasAttribute(SCALING_SCHEME)) {
-            scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
+        if (xo.hasAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)) {
+//            scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME));
             if (scalingScheme == null)
-                throw new XMLParseException("Unknown scaling scheme '"+xo.getStringAttribute(SCALING_SCHEME)+"' in "+
+                throw new XMLParseException("Unknown scaling scheme '"+xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)+"' in "+
                         "OldBeagleTreeLikelihood object '"+xo.getId());
 
         }
 
+        boolean delayScaling = true;
         Map<Set<String>, Parameter> partialsRestrictions = null;
 
         if (xo.hasChildNamed(PARTIALS_RESTRICTION)) {
@@ -180,6 +181,7 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
                 tipStatesModel,
                 useAmbiguities,
                 scalingScheme,
+                delayScaling,
                 partialsRestrictions,
                 xo
         );
@@ -226,6 +228,7 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
                         null,
                         useAmbiguities,
                         scalingScheme,
+                        delayScaling,
                         partialsRestrictions,
                         xo);
                 treeLikelihood.setId(xo.getId() + "_" + instanceCount);
@@ -272,6 +275,7 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
                             null,
                             useAmbiguities,
                             scalingScheme,
+                            delayScaling,
                             partialsRestrictions,
                             xo);
                     treeLikelihood.setId(xo.getId() + "_" + instanceCount);
@@ -322,7 +326,7 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
     }
 
     public static final XMLSyntaxRule[] rules = {
-            AttributeRule.newBooleanRule(USE_AMBIGUITIES, true),
+            AttributeRule.newBooleanRule(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, true),
             new ElementRule(PatternList.class),
             new ElementRule(TreeModel.class),
             new ElementRule(GammaSiteRateModel.class),
@@ -330,7 +334,8 @@ public class BalancedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
             new ElementRule(SubstitutionModel.class, true),
             new ElementRule(BranchRateModel.class, true),
             new ElementRule(TipStatesModel.class, true),
-            AttributeRule.newStringRule(SCALING_SCHEME,true),
+            AttributeRule.newStringRule(BeagleTreeLikelihoodParser.SCALING_SCHEME,true),
+            AttributeRule.newBooleanRule(BeagleTreeLikelihoodParser.DELAY_SCALING,true),
             new ElementRule(PARTIALS_RESTRICTION, new XMLSyntaxRule[] {
                     new ElementRule(TaxonList.class),
                     new ElementRule(Parameter.class),
diff --git a/src/dr/app/beagle/evomodel/parsers/BeagleTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/BeagleTreeLikelihoodParser.java
index eada33e..298837c 100644
--- a/src/dr/app/beagle/evomodel/parsers/BeagleTreeLikelihoodParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/BeagleTreeLikelihoodParser.java
@@ -69,6 +69,7 @@ public class BeagleTreeLikelihoodParser extends AbstractXMLObjectParser {
     //    public static final String DEVICE_NUMBER = "deviceNumber";
 //    public static final String PREFER_SINGLE_PRECISION = "preferSinglePrecision";
     public static final String SCALING_SCHEME = "scalingScheme";
+    public static final String DELAY_SCALING = "delayScaling";
     public static final String PARTIALS_RESTRICTION = "partialsRestriction";
 
     public String getParserName() {
@@ -81,6 +82,7 @@ public class BeagleTreeLikelihoodParser extends AbstractXMLObjectParser {
                                                         BranchRateModel branchRateModel,
                                                         TipStatesModel tipStatesModel,
                                                         boolean useAmbiguities, PartialsRescalingScheme scalingScheme,
+                                                        boolean delayRescalingUntilUnderflow,
                                                         Map<Set<String>, Parameter> partialsRestrictions,
                                                         XMLObject xo) throws XMLParseException {
         return new BeagleTreeLikelihood(
@@ -92,6 +94,7 @@ public class BeagleTreeLikelihoodParser extends AbstractXMLObjectParser {
                 tipStatesModel,
                 useAmbiguities,
                 scalingScheme,
+                delayRescalingUntilUnderflow,
                 partialsRestrictions
         );
     }
@@ -135,6 +138,7 @@ public class BeagleTreeLikelihoodParser extends AbstractXMLObjectParser {
 //        }
 
         PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
+        boolean delayScaling = true;
         if (xo.hasAttribute(SCALING_SCHEME)) {
             scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
             if (scalingScheme == null)
@@ -142,6 +146,9 @@ public class BeagleTreeLikelihoodParser extends AbstractXMLObjectParser {
                         "OldBeagleTreeLikelihood object '"+xo.getId());
 
         }
+        if (xo.hasAttribute(DELAY_SCALING)) {
+            delayScaling = xo.getBooleanAttribute(DELAY_SCALING);
+        }
 
         Map<Set<String>, Parameter> partialsRestrictions = null;
 
@@ -169,6 +176,7 @@ public class BeagleTreeLikelihoodParser extends AbstractXMLObjectParser {
                     tipStatesModel,
                     useAmbiguities,
                     scalingScheme,
+                    delayScaling,
                     partialsRestrictions,
                     xo
             );
@@ -198,6 +206,7 @@ public class BeagleTreeLikelihoodParser extends AbstractXMLObjectParser {
                     null,
                     useAmbiguities,
                     scalingScheme,
+                    delayScaling,
                     partialsRestrictions,
                     xo);
             treeLikelihood.setId(xo.getId() + "_" + instanceCount);
diff --git a/src/dr/app/beagle/evomodel/parsers/MarkovJumpsTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/MarkovJumpsTreeLikelihoodParser.java
index d5102c1..f24aa60 100644
--- a/src/dr/app/beagle/evomodel/parsers/MarkovJumpsTreeLikelihoodParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/MarkovJumpsTreeLikelihoodParser.java
@@ -76,6 +76,7 @@ public class MarkovJumpsTreeLikelihoodParser extends AncestralStateTreeLikelihoo
                                                         BranchRateModel branchRateModel,
                                                         TipStatesModel tipStatesModel,
                                                         boolean useAmbiguities, PartialsRescalingScheme scalingScheme,
+                                                        boolean delayScaling,
                                                         Map<Set<String>, Parameter> partialsRestrictions,
                                                         XMLObject xo) throws XMLParseException {
 
@@ -106,6 +107,7 @@ public class MarkovJumpsTreeLikelihoodParser extends AncestralStateTreeLikelihoo
                 tipStatesModel,
                 useAmbiguities,
                 scalingScheme,
+                delayScaling,
                 partialsRestrictions,
                 dataType,
                 stateTag,
diff --git a/src/dr/app/beagle/evomodel/parsers/OptimizedBeagleTreeLikelihoodParser.java b/src/dr/app/beagle/evomodel/parsers/OptimizedBeagleTreeLikelihoodParser.java
index 52b04d7..0eb5cbd 100644
--- a/src/dr/app/beagle/evomodel/parsers/OptimizedBeagleTreeLikelihoodParser.java
+++ b/src/dr/app/beagle/evomodel/parsers/OptimizedBeagleTreeLikelihoodParser.java
@@ -40,9 +40,7 @@ import dr.evolution.alignment.SitePatterns;
 import dr.evomodel.branchratemodel.BranchRateModel;
 import dr.evomodel.tree.TreeModel;
 import dr.evomodel.treelikelihood.TipStatesModel;
-import dr.inference.model.Likelihood;
-import dr.inference.model.Parameter;
-import dr.inference.model.TestThreadedCompoundLikelihood;
+import dr.inference.model.*;
 import dr.xml.AbstractXMLObjectParser;
 import dr.xml.AttributeRule;
 import dr.xml.ElementRule;
@@ -60,20 +58,21 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
     public static final String CALIBRATE = "calibrate";
     public static final String RETRY = "retry";
 
-    public static final boolean DEBUG = true;
+    public static final boolean DEBUG = false;
 
     public String getParserName() {
         return OPTIMIZED_BEAGLE_TREE_LIKELIHOOD;
     }
 
     protected BeagleTreeLikelihood createTreeLikelihood(PatternList patternList, TreeModel treeModel,
-            BranchModel branchModel,
-            GammaSiteRateModel siteRateModel,
-            BranchRateModel branchRateModel,
-            TipStatesModel tipStatesModel,
-            boolean useAmbiguities, PartialsRescalingScheme scalingScheme,
-            Map<Set<String>, Parameter> partialsRestrictions,
-            XMLObject xo) throws XMLParseException {
+                                                        BranchModel branchModel,
+                                                        GammaSiteRateModel siteRateModel,
+                                                        BranchRateModel branchRateModel,
+                                                        TipStatesModel tipStatesModel,
+                                                        boolean useAmbiguities, PartialsRescalingScheme scalingScheme,
+                                                        boolean delayRescalingUntilUnderflow,
+                                                        Map<Set<String>, Parameter> partialsRestrictions,
+                                                        XMLObject xo) throws XMLParseException {
         return new BeagleTreeLikelihood(
                 patternList,
                 treeModel,
@@ -83,8 +82,9 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
                 tipStatesModel,
                 useAmbiguities,
                 scalingScheme,
+                delayRescalingUntilUnderflow,
                 partialsRestrictions
-                );
+        );
     }
 
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
@@ -104,8 +104,12 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
         int childCount = xo.getChildCount();
         List<Likelihood> likelihoods = new ArrayList<Likelihood>();
 
+        //TEST
+        List<Likelihood> originalLikelihoods = new ArrayList<Likelihood>();
+
         for (int i = 0; i < childCount; i++) {
             likelihoods.add((Likelihood)xo.getChild(i));
+            originalLikelihoods.add((Likelihood)xo.getChild(i));
         }
 
         if (DEBUG) {
@@ -131,23 +135,25 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
         BranchRateModel[] branchRateModels = new BranchRateModel[childCount];
         boolean[] ambiguities = new boolean[childCount];
         PartialsRescalingScheme[] rescalingSchemes = new PartialsRescalingScheme[childCount];
+        boolean[] isDelayRescalingUntilUnderflow = new boolean[childCount];
         List<Map<Set<String>, Parameter>> partialsRestrictions = new ArrayList<Map<Set<String>, Parameter>>();
         for (int i = 0; i < likelihoods.size(); i++) {
-            patterns[i] = (SitePatterns)((BeagleTreeLikelihood)likelihoods.get(i)).getPatternsList();
+            patterns[i] = (SitePatterns) ((BeagleTreeLikelihood) likelihoods.get(i)).getPatternsList();
             siteCounts[i] = patterns[i].getPatternCount();
-            treeModels[i] = ((BeagleTreeLikelihood)likelihoods.get(i)).getTreeModel();
-            branchModels[i] = ((BeagleTreeLikelihood)likelihoods.get(i)).getBranchModel();
-            siteRateModels[i] = (GammaSiteRateModel) ((BeagleTreeLikelihood)likelihoods.get(i)).getSiteRateModel();
-            branchRateModels[i] = ((BeagleTreeLikelihood)likelihoods.get(i)).getBranchRateModel();
-            ambiguities[i] = ((BeagleTreeLikelihood)likelihoods.get(i)).useAmbiguities();
-            rescalingSchemes[i] = ((BeagleTreeLikelihood)likelihoods.get(i)).getRescalingScheme();
-            partialsRestrictions.add(i, ((BeagleTreeLikelihood)likelihoods.get(i)).getPartialsRestrictions());
+            treeModels[i] = ((BeagleTreeLikelihood) likelihoods.get(i)).getTreeModel();
+            branchModels[i] = ((BeagleTreeLikelihood) likelihoods.get(i)).getBranchModel();
+            siteRateModels[i] = (GammaSiteRateModel) ((BeagleTreeLikelihood) likelihoods.get(i)).getSiteRateModel();
+            branchRateModels[i] = ((BeagleTreeLikelihood) likelihoods.get(i)).getBranchRateModel();
+            ambiguities[i] = ((BeagleTreeLikelihood) likelihoods.get(i)).useAmbiguities();
+            rescalingSchemes[i] = ((BeagleTreeLikelihood) likelihoods.get(i)).getRescalingScheme();
+            isDelayRescalingUntilUnderflow[i] = ((BeagleTreeLikelihood) likelihoods.get(i)).isDelayRescalingUntilUnderflow();
+            partialsRestrictions.add(i, ((BeagleTreeLikelihood) likelihoods.get(i)).getPartialsRestrictions());
         }
 
         if (DEBUG) {
             System.err.println("Pattern counts: ");
             for (int i = 0;i < siteCounts.length; i++) {
-                System.err.print(siteCounts[i] + " ");
+                System.err.println(siteCounts[i] + "   vs.    " + patterns[i].getPatternCount());
             }
             System.err.println();
             System.err.println("Instance counts: ");
@@ -164,6 +170,7 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
 
         TestThreadedCompoundLikelihood compound = new TestThreadedCompoundLikelihood(likelihoods);
         //CompoundLikelihood compound = new CompoundLikelihood(likelihoods);
+        //ThreadedCompoundLikelihood compound = new ThreadedCompoundLikelihood(likelihoods);
 
         if (DEBUG) {
             System.err.println("Timing estimates for each of the " + calibrate + " likelihood calculations:");
@@ -356,6 +363,9 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
         }
         for (int i = 0; i < likelihoods.size(); i++) {
             siteCounts[i] = patterns[i].getPatternCount();
+            if (DEBUG) {
+                System.err.println("Site count " + i + " = " + siteCounts[i]);
+            }
         }
 
         if (DEBUG) {
@@ -381,11 +391,13 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
 
                 BeagleTreeLikelihood treeLikelihood = createTreeLikelihood(
                         subPatterns, treeModels[longestIndex], branchModels[longestIndex], siteRateModels[longestIndex], branchRateModels[longestIndex],
-                        null, 
-                        ambiguities[longestIndex], rescalingSchemes[longestIndex], partialsRestrictions.get(longestIndex),
+                        null,
+                        ambiguities[longestIndex], rescalingSchemes[longestIndex], isDelayRescalingUntilUnderflow[longestIndex],
+                        partialsRestrictions.get(longestIndex),
                         xo);
 
-                treeLikelihood.setId(xo.getId() + "_" + instanceCount);
+                treeLikelihood.setId(xo.getId() + "_" + longestIndex + "_" + i);
+                System.err.println(treeLikelihood.getId() + " created.");
                 newList.add(treeLikelihood);
             }
             for (int i = 0; i < newList.size()-1; i++) {
@@ -401,6 +413,7 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
             }
             compound = new TestThreadedCompoundLikelihood(likelihoods);
             //compound = new CompoundLikelihood(likelihoods);
+            //compound = new ThreadedCompoundLikelihood(likelihoods);
             siteCounts[longestIndex] = (instanceCount-1)*siteCounts[longestIndex]/instanceCount;
             longestSize = (instanceCount-1)*longestSize/instanceCount;
 
@@ -541,17 +554,22 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
 
                         BeagleTreeLikelihood treeLikelihood = createTreeLikelihood(
                                 subPatterns, treeModels[longestIndex], branchModels[longestIndex], siteRateModels[longestIndex], branchRateModels[longestIndex],
-                                null, 
-                                ambiguities[longestIndex], rescalingSchemes[longestIndex], partialsRestrictions.get(longestIndex),
+                                null,
+                                ambiguities[longestIndex], rescalingSchemes[longestIndex], isDelayRescalingUntilUnderflow[longestIndex],
+                                partialsRestrictions.get(longestIndex),
                                 xo);
 
-                        treeLikelihood.setId(xo.getId() + "_" + instanceCount);
+                        treeLikelihood.setId(xo.getId() + "_" + longestIndex + "_" + i);
+                        System.err.println(treeLikelihood.getId() + " created.");
                         newList.add(treeLikelihood);
                     }
                     /*for (int i = 0; i < newList.size()+1; i++) {
                         likelihoods.remove(currentLocation[longestIndex]);
                     }*/
                     for (int i = 0; i < newList.size()+timesRetried+1; i++) {
+                        //TEST CODE START
+                        unregisterAllModels((BeagleTreeLikelihood)likelihoods.get(currentLocation[longestIndex]));
+                        //TEST CODE END
                         likelihoods.remove(currentLocation[longestIndex]);
                     }
                     for (int i = 0; i < newList.size(); i++) {
@@ -565,6 +583,7 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
 
                     compound = new TestThreadedCompoundLikelihood(likelihoods);
                     //compound = new CompoundLikelihood(likelihoods);
+                    //compound = new ThreadedCompoundLikelihood(likelihoods);
                     siteCounts[longestIndex] = (instanceCount+timesRetried+1)*siteCounts[longestIndex]/instanceCount;
                     longestSize = (instanceCount+timesRetried+1)*longestSize/instanceCount;
 
@@ -795,10 +814,34 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
 
             }*/
 
+        /*for (int i = 0; i < originalLikelihoods.size(); i++) {
+            ((BeagleTreeLikelihood)originalLikelihoods.get(i)).removeModel(((BeagleTreeLikelihood)originalLikelihoods.get(i)).getBranchModel());
+            ((BeagleTreeLikelihood)originalLikelihoods.get(i)).removeModel(((BeagleTreeLikelihood)originalLikelihoods.get(i)).getBranchRateModel());
+            ((BeagleTreeLikelihood)originalLikelihoods.get(i)).removeModel(((BeagleTreeLikelihood)originalLikelihoods.get(i)).getSiteRateModel());
+            ((BeagleTreeLikelihood)originalLikelihoods.get(i)).removeModel(((BeagleTreeLikelihood)originalLikelihoods.get(i)).getTreeModel());
+            if (((BeagleTreeLikelihood)originalLikelihoods.get(i)).getTipStatesModel() != null) {
+                ((BeagleTreeLikelihood) originalLikelihoods.get(i)).removeModel(((BeagleTreeLikelihood) originalLikelihoods.get(i)).getTipStatesModel());
+            }
+        }*/
+
+        for (int i = 0; i < originalLikelihoods.size(); i++) {
+            unregisterAllModels((BeagleTreeLikelihood)originalLikelihoods.get(i));
+        }
+
         return compound;
 
     }
 
+    private void unregisterAllModels(BeagleTreeLikelihood btl) {
+        btl.removeModel(btl.getTreeModel());
+        btl.removeModel(btl.getBranchRateModel());
+        btl.removeModel(btl.getBranchModel());
+        btl.removeModel(btl.getSiteRateModel());
+        if (btl.getTipStatesModel() != null) {
+            btl.removeModel(btl.getTipStatesModel());
+        }
+    }
+
     //************************************************************************
     // AbstractXMLObjectParser implementation
     //************************************************************************
@@ -812,9 +855,9 @@ public class OptimizedBeagleTreeLikelihoodParser extends AbstractXMLObjectParser
     }
 
     public static final XMLSyntaxRule[] rules = {
-        new ElementRule(BeagleTreeLikelihood.class, 1, Integer.MAX_VALUE),
-        AttributeRule.newIntegerRule(CALIBRATE, true),
-        AttributeRule.newIntegerRule(RETRY, true)
+            new ElementRule(BeagleTreeLikelihood.class, 1, Integer.MAX_VALUE),
+            AttributeRule.newIntegerRule(CALIBRATE, true),
+            AttributeRule.newIntegerRule(RETRY, true)
     };
 
     public XMLSyntaxRule[] getSyntaxRules() {
diff --git a/src/dr/app/beagle/evomodel/sitemodel/EpochBranchSubstitutionModel.java b/src/dr/app/beagle/evomodel/sitemodel/EpochBranchSubstitutionModel.java
index e5d30d5..9a76275 100644
--- a/src/dr/app/beagle/evomodel/sitemodel/EpochBranchSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/sitemodel/EpochBranchSubstitutionModel.java
@@ -42,12 +42,14 @@ import dr.inference.model.Variable;
 import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
+import dr.util.CommonCitations;
 
 import java.util.*;
 
 /**
  * @author Filip Bielejec
  * @author Marc A. Suchard
+ * @author Andrew Rambaut
  * @version $Id$
  */
 @SuppressWarnings("serial")
@@ -540,16 +542,21 @@ public class EpochBranchSubstitutionModel extends AbstractModel implements
 		}
 	}// END: checkBuffers
 
-	/**
-	 * @return a list of citations associated with this object
-	 */
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.SUBSTITUTION_MODELS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "Using Epoch Branch Substitution model";
+	}
+
 	public List<Citation> getCitations() {
-		List<Citation> citations = new ArrayList<Citation>();
-		citations.add(new Citation(new Author[] { new Author("F", "Bielejec"),
+		return Arrays.asList(new Citation(new Author[] { new Author("F", "Bielejec"),
 				new Author("P", "Lemey"), new Author("G", "Baele"),
 				new Author("MA", "Suchard") }, Citation.Status.IN_PREPARATION));
-		return citations;
-	}// END: getCitations
+	}
 
 	// /////////////
 	// ---DEBUG---//
diff --git a/src/dr/app/beagle/evomodel/sitemodel/ExternalInternalBranchSubstitutionModel.java b/src/dr/app/beagle/evomodel/sitemodel/ExternalInternalBranchSubstitutionModel.java
index 52a3040..620f235 100644
--- a/src/dr/app/beagle/evomodel/sitemodel/ExternalInternalBranchSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/sitemodel/ExternalInternalBranchSubstitutionModel.java
@@ -50,7 +50,7 @@ import java.util.List;
  */
 
 @Deprecated // Switching to BranchModel
-public class ExternalInternalBranchSubstitutionModel extends AbstractModel implements BranchSubstitutionModel, Citable {
+public class ExternalInternalBranchSubstitutionModel extends AbstractModel implements BranchSubstitutionModel {
     public ExternalInternalBranchSubstitutionModel(List<SubstitutionModel> substModelList, List<FrequencyModel> frequencyModelList) {
         super("ExternalInternalBranchSubstitutionModel");
 
@@ -147,23 +147,6 @@ public class ExternalInternalBranchSubstitutionModel extends AbstractModel imple
     private final List<SubstitutionModel> substModelList;
     private final List<FrequencyModel> frequencyModelList;
 
-    /**
-     * @return a list of citations associated with this object
-     */
-    public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                new Citation(
-                        new Author[]{
-                                new Author("P", "Lemey"),
-                                new Author("MA", "Suchard")
-                        },
-                        Citation.Status.IN_PREPARATION
-                )
-        );
-        return citations;
-    }
-
 	@Override
 	public int getExtraBufferCount(TreeModel treeModel) {
 		// TODO Auto-generated method stub
diff --git a/src/dr/app/beagle/evomodel/sitemodel/GammaSiteRateModel.java b/src/dr/app/beagle/evomodel/sitemodel/GammaSiteRateModel.java
index 0569b90..a9689b0 100644
--- a/src/dr/app/beagle/evomodel/sitemodel/GammaSiteRateModel.java
+++ b/src/dr/app/beagle/evomodel/sitemodel/GammaSiteRateModel.java
@@ -31,6 +31,14 @@ import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.math.distributions.GammaDistribution;
 import dr.app.beagle.evomodel.substmodel.SubstitutionModel;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * GammaSiteModel - A SiteModel that has a gamma distributed rates across sites.
@@ -39,7 +47,7 @@ import dr.app.beagle.evomodel.substmodel.SubstitutionModel;
  * @version $Id: GammaSiteModel.java,v 1.31 2005/09/26 14:27:38 rambaut Exp $
  */
 
-public class GammaSiteRateModel extends AbstractModel implements SiteRateModel {
+public class GammaSiteRateModel extends AbstractModel implements SiteRateModel, Citable {
 
     public GammaSiteRateModel(String name) {
         this(   name,
@@ -274,6 +282,10 @@ public class GammaSiteRateModel extends AbstractModel implements SiteRateModel {
         ratesKnown = true;
     }
 
+    public boolean hasInvariantSites() {
+        return invarParameter != null;
+    }
+
     // *****************************************************************
     // Interface ModelComponent
     // *****************************************************************
@@ -342,5 +354,32 @@ public class GammaSiteRateModel extends AbstractModel implements SiteRateModel {
         this.substitutionModel = substitutionModel;
     }
 
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Discrete gamma-distributed rate heterogeneity model";
+    }
+
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public final static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("Z", "Yang")
+            },
+            "Maximum likelihood phylogenetic estimation from DNA sequences with variable rates over sites: approximate methods",
+            1994,
+            "J. Mol. Evol.",
+            39,
+            306, 314,
+            Citation.Status.PUBLISHED
+    );
+
     private SubstitutionModel substitutionModel;
 }
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/substmodel/ComplexSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/ComplexSubstitutionModel.java
index 545c21f..fc187ca 100644
--- a/src/dr/app/beagle/evomodel/substmodel/ComplexSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/ComplexSubstitutionModel.java
@@ -33,13 +33,17 @@ import dr.inference.model.Likelihood;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.math.matrixAlgebra.Vector;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+import dr.util.CommonCitations;
 
 import java.util.*;
 
 /**
  * @author Marc Suchard
  */
-public class ComplexSubstitutionModel extends GeneralSubstitutionModel implements Likelihood {
+public class ComplexSubstitutionModel extends GeneralSubstitutionModel implements Likelihood, Citable {
 
     public ComplexSubstitutionModel(String name, DataType dataType, FrequencyModel freqModel, Parameter parameter) {
         super(name, dataType, freqModel, parameter, -1);
@@ -274,5 +278,20 @@ public class ComplexSubstitutionModel extends GeneralSubstitutionModel implement
         this.doNormalization = normalize;
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Complex-diagonalizable, irreversible substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CommonCitations.EDWARDS_2011_ANCIENT);
+    }
+
     private boolean doNormalization = true;
 }
diff --git a/src/dr/app/beagle/evomodel/substmodel/GLMSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/GLMSubstitutionModel.java
index 95b2c4e..54f39a8 100644
--- a/src/dr/app/beagle/evomodel/substmodel/GLMSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/GLMSubstitutionModel.java
@@ -30,6 +30,11 @@ import dr.inference.distribution.LogLinearModel;
 import dr.inference.loggers.LogColumn;
 import dr.inference.model.BayesianStochasticSearchVariableSelection;
 import dr.inference.model.Model;
+import dr.util.Citation;
+import dr.util.CommonCitations;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * @author Marc A. Suchard
@@ -73,6 +78,16 @@ public class GLMSubstitutionModel extends ComplexSubstitutionModel {
         return Double.NEGATIVE_INFINITY;
     }
 
+    @Override
+    public String getDescription() {
+        return "Generalized linear (model, GLM) substitution model"; // TODO Horrible; fix
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+
+        return Collections.singletonList(CommonCitations.LEMEY_2014_UNIFYING);
+    }
 
     private LogLinearModel glm;
     private double[] testProbabilities;
diff --git a/src/dr/app/beagle/evomodel/substmodel/GTR.java b/src/dr/app/beagle/evomodel/substmodel/GTR.java
index f87acb7..49ddf27 100644
--- a/src/dr/app/beagle/evomodel/substmodel/GTR.java
+++ b/src/dr/app/beagle/evomodel/substmodel/GTR.java
@@ -28,6 +28,12 @@ package dr.app.beagle.evomodel.substmodel;
 import dr.evolution.datatype.Nucleotides;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * General Time Reversible model of nucleotide evolution
@@ -38,7 +44,7 @@ import dr.inference.model.Variable;
  * @author Alexei Drummond
  * @version $Id: GTR.java,v 1.19 2005/05/24 20:25:58 rambaut Exp $
  */
-public class GTR extends BaseSubstitutionModel {
+public class GTR extends BaseSubstitutionModel implements Citable {
 
     private Variable<Double> rateACVariable = null;
     private Variable<Double> rateAGVariable = null;
@@ -179,4 +185,29 @@ public class GTR extends BaseSubstitutionModel {
         return buffer.toString();
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "HKY nucleotide substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("S", "Tavaré")
+            },
+            "Some probabilistic and statistical problems in the analysis of DNA sequences.",
+            1985,
+            "In: Miura R. M., editor. Lectures on mathematics in the life sciences.",
+            17, 57, 86
+    );
+
 }
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/substmodel/GY94CodonModel.java b/src/dr/app/beagle/evomodel/substmodel/GY94CodonModel.java
index f54defa..a591d62 100644
--- a/src/dr/app/beagle/evomodel/substmodel/GY94CodonModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/GY94CodonModel.java
@@ -28,6 +28,12 @@ package dr.app.beagle.evomodel.substmodel;
 import dr.evolution.datatype.Codons;
 import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Yang model of codon evolution
@@ -37,7 +43,7 @@ import dr.inference.model.Statistic;
  * @author Marc A. Suchard
  * @version $Id: YangCodonModel.java,v 1.21 2005/05/24 20:25:58 rambaut Exp $
  */
-public class GY94CodonModel extends AbstractCodonModel {
+public class GY94CodonModel extends AbstractCodonModel implements Citable {
     /**
      * kappa
      */
@@ -205,4 +211,31 @@ public class GY94CodonModel extends AbstractCodonModel {
 
     };*/
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Goldman-Yang codon substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("N", "Goldman"),
+                    new Author("Z", "Yang")
+            },
+            "A codon-based model of nucleotide substitution for protein-coding DNA sequences",
+            1994,
+            "Mol Biol Evol",
+            11, 725, 736
+    );
+
 }
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/substmodel/HKY.java b/src/dr/app/beagle/evomodel/substmodel/HKY.java
index 249ef0f..ab37fa6 100644
--- a/src/dr/app/beagle/evomodel/substmodel/HKY.java
+++ b/src/dr/app/beagle/evomodel/substmodel/HKY.java
@@ -29,6 +29,12 @@ import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
 import dr.evolution.datatype.Nucleotides;
 import dr.math.matrixAlgebra.Vector;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 
 /**
@@ -38,7 +44,7 @@ import dr.math.matrixAlgebra.Vector;
  * @author Andrew Rambaut
  * @author Marc A. Suchard
  */
-public class HKY extends BaseSubstitutionModel {
+public class HKY extends BaseSubstitutionModel implements Citable {
 
     private Parameter kappaParameter = null;
 
@@ -222,6 +228,36 @@ public class HKY extends BaseSubstitutionModel {
 
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "HKY nucleotide substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("M", "Hasegowa"),
+                    new Author("H", "Kishino"),
+                    new Author("T", "Yano")
+            },
+            "Dating the human-ape splitting by a molecular clock of mitochondrial DNA",
+            1985,
+            "J. Mol. Evol.",
+            22,
+            160, 174,
+            Citation.Status.PUBLISHED
+    );
+
+
     public static void main(String[] args) {
 //        double kappa = 2.0;
 //        double[] pi = new double[]{0.15,0.30,0.20,0.35};
diff --git a/src/dr/app/beagle/evomodel/substmodel/MG94CodonModel.java b/src/dr/app/beagle/evomodel/substmodel/MG94CodonModel.java
index 07ebd5d..ed1aa41 100644
--- a/src/dr/app/beagle/evomodel/substmodel/MG94CodonModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/MG94CodonModel.java
@@ -27,6 +27,12 @@ package dr.app.beagle.evomodel.substmodel;
 
 import dr.evolution.datatype.Codons;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Muse-Gaut model of codon evolution
@@ -35,7 +41,7 @@ import dr.inference.model.Parameter;
  * @author Guy Baele
  * @author Philippe lemey
  */
-public class MG94CodonModel extends AbstractCodonModel {
+public class MG94CodonModel extends AbstractCodonModel implements Citable {
 
     protected Parameter alphaParameter;
     protected Parameter betaParameter;
@@ -137,4 +143,30 @@ public class MG94CodonModel extends AbstractCodonModel {
     }
 
     private boolean doNormalization = true;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Muse-Gaut codon substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("SV", "Muse"),
+                    new Author("BS", "Gaut")
+            },
+            "A likelihood approach for comparing synonymous and nonsynonymous nucleotide substitution rates, with application to the chloroplast genome",
+            1994,
+            "Mol Biol Evol",
+            11, 715, 724
+    );
 }
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/substmodel/MarkovModulatedSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/MarkovModulatedSubstitutionModel.java
index d4fad96..c932108 100644
--- a/src/dr/app/beagle/evomodel/substmodel/MarkovModulatedSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/MarkovModulatedSubstitutionModel.java
@@ -37,9 +37,7 @@ import dr.util.Citable;
 import dr.util.Citation;
 import dr.util.CommonCitations;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 import java.util.logging.Logger;
 
 /**
@@ -223,12 +221,18 @@ public class MarkovModulatedSubstitutionModel extends ComplexSubstitutionModel i
 //        return pcFreqModel;
 //    }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Markov modulated substitution model";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.SUCHARD_2012
-        );
-        return citations;
+        return Collections.singletonList(CommonCitations.SUCHARD_2012);
     }
 
     @Override
diff --git a/src/dr/app/beagle/evomodel/substmodel/ProductChainSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/ProductChainSubstitutionModel.java
index 7329aa2..aa1110b 100644
--- a/src/dr/app/beagle/evomodel/substmodel/ProductChainSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/ProductChainSubstitutionModel.java
@@ -34,8 +34,7 @@ import dr.util.Citable;
 import dr.util.Citation;
 import dr.util.CommonCitations;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 import java.util.logging.Logger;
 
 //import dr.math.matrixAlgebra.Vector;
@@ -110,12 +109,18 @@ public class ProductChainSubstitutionModel extends BaseSubstitutionModel impleme
                 + Citable.Utils.getCitationString(this));
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Product chain substitution model";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.OBRIEN_2009
-        );
-        return citations;
+        return Collections.singletonList(CommonCitations.OBRIEN_2009_LEARNING);
     }
 
     public EigenDecomposition getEigenDecomposition() {
diff --git a/src/dr/app/beagle/evomodel/substmodel/SVSComplexSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/SVSComplexSubstitutionModel.java
index 8c8a1c5..77a9f35 100644
--- a/src/dr/app/beagle/evomodel/substmodel/SVSComplexSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/SVSComplexSubstitutionModel.java
@@ -85,7 +85,7 @@ public class SVSComplexSubstitutionModel extends ComplexSubstitutionModel implem
 
     public boolean validState() {
         return !updateMatrix ||
-                Utils.connectedAndWellConditioned(probability,this);
+                BayesianStochasticSearchVariableSelection.Utils.connectedAndWellConditioned(probability,this);
     }
 
     protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
diff --git a/src/dr/app/beagle/evomodel/substmodel/SVSGeneralSubstitutionModel.java b/src/dr/app/beagle/evomodel/substmodel/SVSGeneralSubstitutionModel.java
index 8bea2ab..917b821 100644
--- a/src/dr/app/beagle/evomodel/substmodel/SVSGeneralSubstitutionModel.java
+++ b/src/dr/app/beagle/evomodel/substmodel/SVSGeneralSubstitutionModel.java
@@ -1,7 +1,7 @@
 /*
  * SVSGeneralSubstitutionModel.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -25,10 +25,13 @@
 
 package dr.app.beagle.evomodel.substmodel;
 
-import dr.inference.model.*;
+import dr.evolution.datatype.DataType;
 import dr.inference.loggers.LogColumn;
 import dr.inference.loggers.NumberColumn;
-import dr.evolution.datatype.DataType;
+import dr.inference.model.*;
+import dr.util.Citable;
+import dr.util.Citation;
+import dr.util.CommonCitations;
 
 import java.util.*;
 
@@ -37,7 +40,7 @@ import java.util.*;
  */
 
 public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implements Likelihood,
-        BayesianStochasticSearchVariableSelection {
+        BayesianStochasticSearchVariableSelection, Citable {
 
     public SVSGeneralSubstitutionModel(String name, DataType dataType, FrequencyModel freqModel,
                                        Parameter ratesParameter, Parameter indicatorsParameter) {
@@ -45,7 +48,6 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
 
         if (indicatorsParameter == null) {
             this.indicatorsParameter = new Parameter.Default(ratesParameter.getDimension(), 1.0);
-            System.err.println("HERE AA");
         } else {
             this.indicatorsParameter  = indicatorsParameter;
             addVariable(indicatorsParameter);
@@ -81,7 +83,7 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
     }
 
     public boolean validState() {
-        return !updateMatrix || Utils.connectedAndWellConditioned(probability,this);
+        return !updateMatrix || BayesianStochasticSearchVariableSelection.Utils.connectedAndWellConditioned(probability,this);
     }
 
     protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
@@ -106,7 +108,7 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
      */
     public double getLogLikelihood() {
         if (updateMatrix) {
-            if (!Utils.connectedAndWellConditioned(probability,this)) {
+            if (!BayesianStochasticSearchVariableSelection.Utils.connectedAndWellConditioned(probability,this)) {
                 return Double.NEGATIVE_INFINITY;
             }
         }
@@ -161,6 +163,21 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
         };
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Stochastic search variable selection, reversible substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CommonCitations.LEMEY_2009_BAYESIAN);
+    }
+
     protected class LikelihoodColumn extends NumberColumn {
         public LikelihoodColumn(String label) {
             super(label);
diff --git a/src/dr/app/beagle/evomodel/substmodel/TN93.java b/src/dr/app/beagle/evomodel/substmodel/TN93.java
index 0fc90d6..b5ade9b 100644
--- a/src/dr/app/beagle/evomodel/substmodel/TN93.java
+++ b/src/dr/app/beagle/evomodel/substmodel/TN93.java
@@ -27,13 +27,19 @@ package dr.app.beagle.evomodel.substmodel;
 
 import dr.evolution.datatype.Nucleotides;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Tamura-Nei model of nucleotide evolution
  *
  * @author Marc A. Suchard
  */
-public class TN93 extends BaseSubstitutionModel {
+public class TN93 extends BaseSubstitutionModel implements Citable {
 
     private Parameter kappaParameter1 = null;
     private Parameter kappaParameter2 = null;
@@ -149,4 +155,30 @@ public class TN93 extends BaseSubstitutionModel {
 
         return eigenDecomposition;
     }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Tamura-Nei nucleotide substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("K", "Tamura"),
+                    new Author("M", "Nei")
+            },
+            "Estimation of the number of nucleotide substitutions in the control region of mitochondrial DNA in humans and chimpanzees",
+            1993,
+            "Mol Biol Evol",
+            10, 512, 526
+    );
 }
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/treelikelihood/ALSBeagleTreeLikelihood.java b/src/dr/app/beagle/evomodel/treelikelihood/ALSBeagleTreeLikelihood.java
index 9cfb4a0..227ab35 100644
--- a/src/dr/app/beagle/evomodel/treelikelihood/ALSBeagleTreeLikelihood.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/ALSBeagleTreeLikelihood.java
@@ -62,9 +62,10 @@ public class ALSBeagleTreeLikelihood extends BeagleTreeLikelihood implements Lik
                                    TipStatesModel tipStatesModel,
                                    boolean useAmbiguities,
                                    PartialsRescalingScheme scalingScheme,
+                                   boolean delayScaling,
                                    Map<Set<String>, Parameter> partialsRestrictions) {
 
-        super(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme,
+        super(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling,
                 partialsRestrictions);
 //    }
 //
diff --git a/src/dr/app/beagle/evomodel/treelikelihood/AncestralStateBeagleTreeLikelihood.java b/src/dr/app/beagle/evomodel/treelikelihood/AncestralStateBeagleTreeLikelihood.java
index 3c04439..33e116f 100644
--- a/src/dr/app/beagle/evomodel/treelikelihood/AncestralStateBeagleTreeLikelihood.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/AncestralStateBeagleTreeLikelihood.java
@@ -28,6 +28,7 @@ package dr.app.beagle.evomodel.treelikelihood;
 import dr.app.beagle.evomodel.branchmodel.BranchModel;
 import dr.app.beagle.evomodel.sitemodel.SiteRateModel;
 import dr.evolution.alignment.PatternList;
+import dr.evolution.alignment.UncertainSiteList;
 import dr.evolution.datatype.Codons;
 import dr.evolution.datatype.DataType;
 import dr.evolution.datatype.GeneralDataType;
@@ -70,6 +71,7 @@ public class AncestralStateBeagleTreeLikelihood extends BeagleTreeLikelihood imp
                                               TipStatesModel tipStatesModel,
                                               boolean useAmbiguities,
                                               PartialsRescalingScheme scalingScheme,
+                                              boolean delayRescalingUntilUnderflow,
                                               Map<Set<String>, Parameter> partialsRestrictions,
                                               final DataType dataType,
                                               final String tag,
@@ -77,7 +79,7 @@ public class AncestralStateBeagleTreeLikelihood extends BeagleTreeLikelihood imp
                                               boolean useMAP,
                                               boolean returnML) {
 
-        super(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme,
+        super(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayRescalingUntilUnderflow,
                 partialsRestrictions);
 
         this.dataType = dataType;
@@ -146,16 +148,24 @@ public class AncestralStateBeagleTreeLikelihood extends BeagleTreeLikelihood imp
         int v = 0;
         for (int i = 0; i < patternCount; i++) {
 
-            int state = patternList.getPatternState(sequenceIndex, i);
-            stateSet = dataType.getStateSet(state);
 
-            for (int j = 0; j < stateCount; j++) {
-                if (stateSet[j]) {
-                    partials[v] = 1.0;
-                } else {
-                    partials[v] = 0.0;
+            if (patternList instanceof UncertainSiteList) {
+                ((UncertainSiteList) patternList).fillPartials(sequenceIndex, i, partials, v);
+                v += stateCount;
+                // TODO Add this functionality to SimpleSiteList to avoid if statement here
+            } else {
+
+                int state = patternList.getPatternState(sequenceIndex, i);
+                stateSet = dataType.getStateSet(state);
+
+                for (int j = 0; j < stateCount; j++) {
+                    if (stateSet[j]) {
+                        partials[v] = 1.0;
+                    } else {
+                        partials[v] = 0.0;
+                    }
+                    v++;
                 }
-                v++;
             }
         }  // TODO Note code duplication with BTL, refactor when debugged
 
diff --git a/src/dr/app/beagle/evomodel/treelikelihood/BeagleTreeLikelihood.java b/src/dr/app/beagle/evomodel/treelikelihood/BeagleTreeLikelihood.java
index d9d516c..2a94e14 100644
--- a/src/dr/app/beagle/evomodel/treelikelihood/BeagleTreeLikelihood.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/BeagleTreeLikelihood.java
@@ -40,6 +40,7 @@ import dr.app.beagle.tools.Partition;
 import dr.evolution.alignment.Alignment;
 import dr.evolution.alignment.AscertainedSitePatterns;
 import dr.evolution.alignment.PatternList;
+import dr.evolution.alignment.UncertainSiteList;
 import dr.evolution.datatype.Nucleotides;
 import dr.evolution.io.NewickImporter;
 import dr.evolution.tree.NodeRef;
@@ -51,10 +52,15 @@ import dr.evomodel.branchratemodel.DefaultBranchRateModel;
 import dr.evomodel.branchratemodel.StrictClockBranchRates;
 import dr.evomodel.tree.TreeModel;
 import dr.evomodel.treelikelihood.TipStatesModel;
+import dr.inference.model.BooleanLikelihood;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.ThreadAwareLikelihood;
 import dr.math.MathUtils;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+import dr.util.CommonCitations;
 
 import java.util.*;
 import java.util.logging.Logger;
@@ -69,7 +75,7 @@ import java.util.logging.Logger;
  */
 
 @SuppressWarnings("serial")
-public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood implements ThreadAwareLikelihood {
+public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood implements ThreadAwareLikelihood, Citable {
 
     // This property is a comma-delimited list of resource numbers (0 == CPU) to
     // allocate each BEAGLE instance to. If less than the number of instances then
@@ -79,6 +85,7 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
     private static final String REQUIRED_FLAGS_PROPERTY = "beagle.required.flags";
     private static final String SCALING_PROPERTY = "beagle.scaling";
     private static final String RESCALE_FREQUENCY_PROPERTY = "beagle.rescale";
+    private static final String DELAY_SCALING_PROPERTY = "beagle.delay.scaling";
     private static final String EXTRA_BUFFER_COUNT_PROPERTY = "beagle.extra.buffer.count";
     private static final String FORCE_VECTORIZATION = "beagle.force.vectorization";
 
@@ -103,9 +110,10 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
                                 BranchRateModel branchRateModel,
                                 TipStatesModel tipStatesModel,
                                 boolean useAmbiguities,
-                                PartialsRescalingScheme rescalingScheme) {
+                                PartialsRescalingScheme rescalingScheme,
+                                boolean delayRescalingUntilUnderflow) {
 
-        this(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, rescalingScheme, null);
+        this(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, rescalingScheme, delayRescalingUntilUnderflow, null);
     }
 
     public BeagleTreeLikelihood(PatternList patternList,
@@ -116,6 +124,7 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
                                 TipStatesModel tipStatesModel,
                                 boolean useAmbiguities,
                                 PartialsRescalingScheme rescalingScheme,
+                                boolean delayRescalingUntilUnderflow,
                                 Map<Set<String>, Parameter> partialsRestrictions) {
 
         super(BeagleTreeLikelihoodParser.TREE_LIKELIHOOD, patternList, treeModel);
@@ -184,6 +193,8 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
 
             // first set the rescaling scheme to use from the parser
             this.rescalingScheme = rescalingScheme;
+            this.delayRescalingUntilUnderflow = delayRescalingUntilUnderflow;
+
             int[] resourceList = null;
             long preferenceFlags = 0;
             long requirementFlags = 0;
@@ -221,12 +232,19 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
                 }
             }
 
+            // to keep behaviour of the delayed scheme (always + delay)...
+            if (this.rescalingScheme == PartialsRescalingScheme.DELAYED) {
+                this.delayRescalingUntilUnderflow = true;
+                this.rescalingScheme = PartialsRescalingScheme.ALWAYS;
+            }
+
             if (this.rescalingScheme == PartialsRescalingScheme.AUTO) {
                 preferenceFlags |= BeagleFlag.SCALING_AUTO.getMask();
                 useAutoScaling = true;
             } else {
 //                preferenceFlags |= BeagleFlag.SCALING_MANUAL.getMask();
             }
+
             String r = System.getProperty(RESCALE_FREQUENCY_PROPERTY);
             if (r != null) {
                 rescalingFrequency = Integer.parseInt(r);
@@ -235,6 +253,12 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
                 }
             }
 
+            String d = System.getProperty(DELAY_SCALING_PROPERTY);
+            if (d != null) {
+                this.delayRescalingUntilUnderflow = Boolean.parseBoolean(d);
+            }
+
+
             if (preferenceFlags == 0 && resourceList == null) { // else determine dataset characteristics
                 if (stateCount == 4 && patternList.getPatternCount() < 10000) // TODO determine good cut-off
                     preferenceFlags |= BeagleFlag.PROCESSOR_CPU.getMask();
@@ -310,6 +334,11 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
             } else {
                 logger.info("  No external BEAGLE resources available, or resource list/requirements not met, using Java implementation");
             }
+
+            if (patternList instanceof UncertainSiteList) {
+                useAmbiguities = true;
+            }
+
             logger.info("  " + (useAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
             logger.info("  With " + patternList.getPatternCount() + " unique site patterns.");
 
@@ -384,9 +413,16 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
                 this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
                 rescaleMessage = "  Auto rescaling not supported in BEAGLE, using : " + this.rescalingScheme.getText();
             }
+            boolean parenthesis = false;
             if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
-                rescaleMessage += " (rescaling every " + rescalingFrequency + " evaluations)";
+                rescaleMessage += " (rescaling every " + rescalingFrequency + " evaluations";
+                parenthesis = true;
+            }
+            if (this.delayRescalingUntilUnderflow) {
+                rescaleMessage += (parenthesis ? ", " : " (") + "delay rescaling until first overflow";
+                parenthesis = true;
             }
+            rescaleMessage += (parenthesis ? ")" : "");
             logger.info(rescaleMessage);
 
             if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
@@ -467,6 +503,10 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
         return rescalingScheme;
     }
 
+    public boolean isDelayRescalingUntilUnderflow() {
+        return delayRescalingUntilUnderflow;
+    }
+
     public Map<Set<String>, Parameter> getPartialsRestrictions() {
         return partialsRestrictions;
     }
@@ -498,16 +538,23 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
         int v = 0;
         for (int i = 0; i < patternCount; i++) {
 
-            int state = patternList.getPatternState(sequenceIndex, i);
-            stateSet = dataType.getStateSet(state);
+            if (patternList instanceof UncertainSiteList) {
+                ((UncertainSiteList) patternList).fillPartials(sequenceIndex, i, partials, v);
+                v += stateCount;
+                // TODO Add this functionality to SimpleSiteList to avoid if statement here
+            } else {
+
+                int state = patternList.getPatternState(sequenceIndex, i);
+                stateSet = dataType.getStateSet(state);
 
-            for (int j = 0; j < stateCount; j++) {
-                if (stateSet[j]) {
-                    partials[v] = 1.0;
-                } else {
-                    partials[v] = 0.0;
+                for (int j = 0; j < stateCount; j++) {
+                    if (stateSet[j]) {
+                        partials[v] = 1.0;
+                    } else {
+                        partials[v] = 0.0;
+                    }
+                    v++;
                 }
-                v++;
             }
         }
 
@@ -750,30 +797,26 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
 
         recomputeScaleFactors = false;
 
-        if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS) {
-            useScaleFactors = true;
-            recomputeScaleFactors = true;
-        } else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC && everUnderflowed) {
-            useScaleFactors = true;
+        if (!this.delayRescalingUntilUnderflow || everUnderflowed) {
+            if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS || this.rescalingScheme == PartialsRescalingScheme.DELAYED) {
+                useScaleFactors = true;
+                recomputeScaleFactors = true;
+            } else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
+                useScaleFactors = true;
+
+                if (rescalingCount > rescalingFrequency) {
+                    rescalingCount = 0;
+                    rescalingCountInner = 0;
+                }
 
-            if (rescalingCount > rescalingFrequency) {
-                rescalingCount = 0;
-                rescalingCountInner = 0;
-            }
+                if (rescalingCountInner < RESCALE_TIMES) {
+                    recomputeScaleFactors = true;
+                    updateAllNodes();
+                    rescalingCountInner++;
+                }
 
-            if (rescalingCountInner < RESCALE_TIMES) {
-                recomputeScaleFactors = true;
-                updateAllNodes();
-//                makeDirty();
-//                System.err.println("Recomputing scale factors");
-                rescalingCountInner++;
+                rescalingCount++;
             }
-
-            rescalingCount++;
-        } else if (this.rescalingScheme == PartialsRescalingScheme.DELAYED && everUnderflowed) {
-            useScaleFactors = true;
-            recomputeScaleFactors = true;
-            rescalingCount++;
         }
 
         if (tipStatesModel != null) {
@@ -893,10 +936,18 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
                 everUnderflowed = true;
                 logL = Double.NEGATIVE_INFINITY;
 
-                if (firstRescaleAttempt && (rescalingScheme == PartialsRescalingScheme.DYNAMIC || rescalingScheme == PartialsRescalingScheme.DELAYED)) {
+                if (firstRescaleAttempt && (delayRescalingUntilUnderflow || rescalingScheme == PartialsRescalingScheme.DELAYED)) {
                     // we have had a potential under/over flow so attempt a rescaling
                     if (rescalingScheme == PartialsRescalingScheme.DYNAMIC || (rescalingCount == 0)) {
-                        Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood. Attempting a rescaling...");
+                        // show a message but only every 1000 rescales
+                        if (rescalingMessageCount % 1000 == 0) {
+                            if (rescalingMessageCount > 0) {
+                                Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood (" + rescalingMessageCount + " messages not shown).");
+                            } else {
+                                Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood. Attempting a rescaling...");
+                            }
+                        }
+                        rescalingMessageCount += 1;
                     }
                     useScaleFactors = true;
                     recomputeScaleFactors = true;
@@ -1171,6 +1222,7 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
 
     private PartialsRescalingScheme rescalingScheme;
     private int rescalingFrequency = RESCALE_FREQUENCY;
+    private boolean delayRescalingUntilUnderflow = true;
 
     protected boolean useScaleFactors = false;
     private boolean useAutoScaling = false;
@@ -1180,6 +1232,8 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
     private int rescalingCountInner = 0;
 //    private int storedRescalingCount;
 
+    private int rescalingMessageCount = 0;
+
     /**
      * the branch-site model for these sites
      */
@@ -1330,11 +1384,11 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
             );
             Alignment alignment = simulator.simulate(false, false);
 
-            BeagleTreeLikelihood nbtl = new BeagleTreeLikelihood(alignment, treeModel, homogeneousBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT);
+            BeagleTreeLikelihood nbtl = new BeagleTreeLikelihood(alignment, treeModel, homogeneousBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT, false);
 
             System.out.println("nBTL(homogeneous) = " + nbtl.getLogLikelihood());
 
-            nbtl = new BeagleTreeLikelihood(alignment, treeModel, epochBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT);
+            nbtl = new BeagleTreeLikelihood(alignment, treeModel, epochBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT, false);
 
             System.out.println("nBTL(epoch) = " + nbtl.getLogLikelihood());
 
@@ -1369,4 +1423,18 @@ public class BeagleTreeLikelihood extends AbstractSinglePartitionTreeLikelihood
         return siteLogLikelihoods;
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.FRAMEWORK;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Using BEAGLE likelihood calculation library";
+    }
+
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CommonCitations.AYRES_2012_BEAGLE);
+    }
+
 }//END: class
\ No newline at end of file
diff --git a/src/dr/app/beagle/evomodel/treelikelihood/MarkovJumpsBeagleTreeLikelihood.java b/src/dr/app/beagle/evomodel/treelikelihood/MarkovJumpsBeagleTreeLikelihood.java
index 335313c..6b11bfc 100644
--- a/src/dr/app/beagle/evomodel/treelikelihood/MarkovJumpsBeagleTreeLikelihood.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/MarkovJumpsBeagleTreeLikelihood.java
@@ -44,6 +44,8 @@ import dr.inference.markovjumps.MarkovJumpsRegisterAcceptor;
 import dr.inference.markovjumps.MarkovJumpsType;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Citation;
+import dr.util.CommonCitations;
 
 import java.util.*;
 
@@ -67,6 +69,7 @@ public class MarkovJumpsBeagleTreeLikelihood extends AncestralStateBeagleTreeLik
                                            TipStatesModel tipStatesModel,
                                            boolean useAmbiguities,
                                            PartialsRescalingScheme scalingScheme,
+                                           boolean delayScaling,
                                            Map<Set<String>, Parameter> partialsRestrictions,
                                            DataType dataType, String stateTag,
                                            boolean useMAP,
@@ -76,7 +79,7 @@ public class MarkovJumpsBeagleTreeLikelihood extends AncestralStateBeagleTreeLik
                                            int nSimulants) {
 
         super(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities,
-                scalingScheme, partialsRestrictions, dataType, stateTag, useMAP, returnMarginalLikelihood);
+                scalingScheme, delayScaling, partialsRestrictions, dataType, stateTag, useMAP, returnMarginalLikelihood);
 
         this.useUniformization = useUniformization;
         this.reportUnconditionedColumns = reportUnconditionedColumns;
@@ -623,6 +626,17 @@ public class MarkovJumpsBeagleTreeLikelihood extends AncestralStateBeagleTreeLik
         }
     }
 
+    @Override
+    public String getDescription() {
+        return super.getDescription() + " and MarkovJumps inference techniques";
+    }
+
+    public List<Citation> getCitations() {
+        List<Citation> tmp = super.getCitations();
+        tmp.add(CommonCitations.MININ_2008_COUNTING);
+        return tmp;
+    }
+
     public static final String ALL_HISTORY = "history_all";
     public static final String HISTORY = "history";
     public static final String TOTAL_COUNTS = "allTransitions";
diff --git a/src/dr/app/beagle/evomodel/treelikelihood/OldBeagleTreeLikelihood.java b/src/dr/app/beagle/evomodel/treelikelihood/OldBeagleTreeLikelihood.java
index 360298a..6f912b9 100644
--- a/src/dr/app/beagle/evomodel/treelikelihood/OldBeagleTreeLikelihood.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/OldBeagleTreeLikelihood.java
@@ -140,12 +140,12 @@ public class OldBeagleTreeLikelihood extends AbstractSinglePartitionTreeLikeliho
             addModel(this.siteRateModel);
 
             this.branchSubstitutionModel = branchSubstitutionModel;
-            if (!(branchSubstitutionModel instanceof HomogenousBranchSubstitutionModel)) {
-                logger.info("  Branch site model used: " + branchSubstitutionModel.getModelName());
-                if (branchSubstitutionModel instanceof Citable) {
-                    logger.info("      Please cite: " + Citable.Utils.getCitationString((Citable) branchSubstitutionModel, "", ""));
-                }
-            }
+//            if (!(branchSubstitutionModel instanceof HomogenousBranchSubstitutionModel)) {
+//                logger.info("  Branch site model used: " + branchSubstitutionModel.getModelName());
+//                if (branchSubstitutionModel instanceof Citable) {
+//                    logger.info("      Please cite: " + Citable.Utils.getCitationString((Citable) branchSubstitutionModel, "", ""));
+//                }
+//            }
             eigenCount = this.branchSubstitutionModel.getEigenCount();
             addModel(branchSubstitutionModel);
 
diff --git a/src/dr/app/beagle/evomodel/treelikelihood/SplitBySiteTraitLogger.java b/src/dr/app/beagle/evomodel/treelikelihood/SplitBySiteTraitLogger.java
index bf7e4f7..bad1087 100644
--- a/src/dr/app/beagle/evomodel/treelikelihood/SplitBySiteTraitLogger.java
+++ b/src/dr/app/beagle/evomodel/treelikelihood/SplitBySiteTraitLogger.java
@@ -36,8 +36,7 @@ import dr.util.Citation;
 import dr.util.CommonCitations;
 import dr.xml.*;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 import java.util.logging.Logger;
 
 /**
@@ -150,14 +149,17 @@ public class SplitBySiteTraitLogger extends TreeTraitProvider.Helper implements
         }
     };
 
-    /**
-     * @return a list of citations associated with this object
-     */
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.COUNTING_PROCESSES;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Split by site trait logger";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.SUCHARD_2012
-        );
-        return citations;
+        return Collections.singletonList(CommonCitations.LEMEY_2012_RENAISSANCE);
     }
 }
diff --git a/src/dr/app/beagle/evomodel/utilities/CompleteHistoryLogger.java b/src/dr/app/beagle/evomodel/utilities/CompleteHistoryLogger.java
index 4b36da7..76f9c1c 100644
--- a/src/dr/app/beagle/evomodel/utilities/CompleteHistoryLogger.java
+++ b/src/dr/app/beagle/evomodel/utilities/CompleteHistoryLogger.java
@@ -39,7 +39,7 @@ import dr.util.CommonCitations;
 
 import java.awt.*;
 import java.io.Serializable;
-import java.util.ArrayList;
+import java.util.*;
 import java.util.List;
 import java.util.logging.Logger;
 
@@ -258,15 +258,19 @@ public class CompleteHistoryLogger implements Loggable, Citable {
         return columns;
     }
 
-    /**
-     * @return a list of citations associated with this object
-     */
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.COUNTING_PROCESSES;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Complete history logger";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(CommonCitations.LEMEY_2012);
-        citations.add(CommonCitations.SHAPIRO_2012);
-        citations.add(CommonCitations.BLOOM_2012);
-        return citations;
+        return Arrays.asList(//CommonCitations.LEMEY_2012, // TODO Find published Lemey paper
+                CommonCitations.MININ_2008_FAST, CommonCitations.BLOOM_2013_STABILITY);
     }
 
     final private Tree tree;
diff --git a/src/dr/app/beagle/mm/MMAlgorithm.java b/src/dr/app/beagle/mm/MMAlgorithm.java
new file mode 100644
index 0000000..ebecfd6
--- /dev/null
+++ b/src/dr/app/beagle/mm/MMAlgorithm.java
@@ -0,0 +1,125 @@
+/*
+ * MMAlgorithm.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.beagle.mm;
+
+import dr.app.beagle.multidimensionalscaling.MultiDimensionalScalingLikelihood;
+import dr.evomodel.continuous.FullyConjugateMultivariateTraitLikelihood;
+import dr.xml.*;
+
+/**
+ * Created by msuchard on 12/15/15.
+ */
+public abstract class MMAlgorithm {
+
+    public static final double DEFAULT_TOLERANCE = 1E-1;
+    public static final int DEFAULT_MAX_ITERATIONS = 1000;
+
+    public double[] findMode(final double[] startingValue) throws NotConvergedException {
+        return findMode(startingValue, DEFAULT_TOLERANCE, DEFAULT_MAX_ITERATIONS);
+    }
+
+    public double[] findMode(final double[] startingValue, final double tolerance,
+                             final int maxIterations) throws NotConvergedException {
+
+        if (DEBUG) {
+            System.err.println("Starting findMode with " + tolerance + " " + maxIterations);
+        }
+
+        double[] buffer1 = new double[startingValue.length];
+        double[] buffer2 = new double[startingValue.length];
+
+        double[] current = buffer1;
+        double[] next = buffer2;
+
+        System.arraycopy(startingValue, 0, next, 0, startingValue.length);
+        int iteration = 0;
+
+        do {
+            // Move next -> current
+            double[] tmp = current;
+            current = next;
+            next = tmp;
+
+            if (DEBUG) {
+                System.err.println("Current: " + printArray(current));
+            }
+
+            mmUpdate(current, next);
+            ++iteration;
+
+            if (DEBUG) {
+                System.err.println("Finished iteration " + iteration);
+            }
+
+        } while (convergenceCriterion(next, current) > tolerance && iteration < maxIterations);
+
+        if (iteration >= maxIterations) {
+            throw new NotConvergedException();
+        }
+
+        if (DEBUG) {
+            System.err.println("Final  : " + printArray(next));
+        }
+
+        return next;
+    }
+
+    static private String format =  "%5.3e";
+
+    protected String printArray(double[] x) {
+        StringBuilder sb = new StringBuilder();
+        sb.append(String.format(format, x[0]));
+        for (int i = 1; i < x.length; ++i) {
+            sb.append(", ").append(String.format(format, x[i]));
+        }
+        return sb.toString();
+    }
+
+    protected abstract void mmUpdate(final double[] current, double[] next);
+
+    private double convergenceCriterion(final double[] current, final double[] previous) {
+        double norm = 0.0;
+
+        for (int i = 0; i < current.length; ++i) {
+            norm += (current[i] - previous[i]) * (current[i] - previous[i]);
+        }
+
+        double value = Math.sqrt(norm);
+
+        if (DEBUG) {
+            System.err.println("Convergence = " + value);
+        }
+
+        return value;
+    }
+
+
+    class NotConvergedException extends Exception {
+        // Nothing interesting
+    }
+
+    private static final boolean DEBUG = false;
+}
diff --git a/src/dr/app/beagle/mm/MultiDimensionalScalingMM.java b/src/dr/app/beagle/mm/MultiDimensionalScalingMM.java
new file mode 100644
index 0000000..7241f16
--- /dev/null
+++ b/src/dr/app/beagle/mm/MultiDimensionalScalingMM.java
@@ -0,0 +1,269 @@
+/*
+ * MultiDimensionalScalingMM.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.app.beagle.mm;
+
+import dr.app.beagle.multidimensionalscaling.MultiDimensionalScalingLikelihood;
+import dr.evomodel.continuous.FullyConjugateMultivariateTraitLikelihood;
+import dr.inference.model.MatrixParameterInterface;
+import dr.inference.model.Parameter;
+import dr.inference.operators.EllipticalSliceOperator;
+import dr.math.distributions.GaussianProcessRandomGenerator;
+import dr.math.matrixAlgebra.Matrix;
+import dr.xml.*;
+
+/**
+ * Created by msuchard on 12/15/15.
+ */
+public class MultiDimensionalScalingMM extends MMAlgorithm {
+
+    private final MultiDimensionalScalingLikelihood likelihood;
+    private final GaussianProcessRandomGenerator gp;
+
+    private final int P; // Embedding dimension
+    private final int Q; // Data dimension
+
+    private double[] XtX = null;
+    private double[] D = null;
+    private double[] distance = null;
+
+    public MultiDimensionalScalingMM(MultiDimensionalScalingLikelihood likelihood,
+                                     GaussianProcessRandomGenerator gp) {
+        super();
+
+        this.likelihood = likelihood;
+        this.gp = gp;
+
+        this.P = likelihood.getMdsDimension();
+        this.Q = likelihood.getLocationCount();
+
+        if (gp != null) {
+            double[][] precision = gp.getPrecisionMatrix();
+            setPrecision(precision);
+        }
+
+        double[] mode = null;
+
+        System.err.println("Start: " + printArray(likelihood.getMatrixParameter().getParameterValues()));
+
+        try {
+            mode = findMode(likelihood.getMatrixParameter().getParameterValues());
+        } catch (NotConvergedException e) {
+            e.printStackTrace();
+        }
+
+        System.err.println("Final: " + printArray(mode));
+
+        EllipticalSliceOperator.transformPoint(mode, true, true, P);
+
+        System.err.println("Final: " + printArray(mode));
+
+        setParameterValues(likelihood.getMatrixParameter(), mode);
+    }
+
+    private void setParameterValues(MatrixParameterInterface mat, double[] values) {
+
+        mat.setAllParameterValuesQuietly(values, 0);
+        mat.setParameterValueNotifyChangedAll(0, 0, values[0]); // Fire changed
+    }
+
+    private double[] getDistanceMatrix() {
+        return likelihood.getObservations();
+    }
+
+    private void setPrecision(double[][] matrix) {
+
+        if (!ignoreGP) {
+
+            final int QP = matrix.length;
+            if (QP != this.Q * this.P) throw new IllegalArgumentException("Invalid dimensions");
+
+            precision = matrix;
+//            precision = new double[QP * QP];
+//            precisionSign = new int[QP * QP];
+
+            precisionStatistics = new double[QP];
+
+            for (int ik = 0; ik < QP; ++ik) {
+                double sum = 0.0;
+                for (int jl = 0; jl < QP; ++jl) {
+//                    double value = weightTree * matrix[ik][jl];
+                    if (ik != jl) {
+                        sum += Math.abs(precision[ik][jl]);
+                    }
+//                    precisionSign[ik * QP + jl] = (value > 0.0) ? 1 : -1;
+//                    precision[ik * QP + jl] = Math.abs(value);
+                }
+                precisionStatistics[ik] = sum;
+            }
+
+        }
+
+    }
+
+    protected void mmUpdate(final double[] current, double[] next) {
+
+        if (XtX == null) {
+            XtX = new double[Q * Q];
+        }
+
+        if (D == null) {
+            D = new double[Q * Q];
+            for (int i = 0; i < Q; ++i) {
+                D[i * Q + i] = 1.0; // To protect against divide-by-zero
+            }
+        }
+
+        if (distance == null) {
+            distance = getDistanceMatrix();
+        }
+
+        // Compute XtX
+        for (int i = 0; i < Q; ++i) {
+            for (int j = i; j < Q; ++j) {
+                double innerProduct = 0.0;
+                for (int k = 0; k < P; ++k) {
+                    innerProduct += current[i * P + k] * current[j * P + k];
+                }
+                XtX[j * Q + i] = XtX[i * Q + j] = innerProduct;
+            }
+        }
+
+        // Compute D
+        for (int i = 0; i < Q; ++i) {
+            for (int j = i + 1; j < Q; ++j) { // TODO XtX is not a necessary intermediate
+                double norm2 = XtX[i * Q + i] + XtX[j * Q + j] - 2 * XtX[i * Q + j];
+                double norm = norm2 > 0.0 ? Math.sqrt(norm2) : 0.0;
+                D[j * Q + i] = D[i * Q + j] = Math.max(norm, 1E-3);
+
+                if (Double.isNaN(D[i * Q + j])) {
+                    System.err.println("D NaN");
+                    System.err.println(XtX[i * Q + i]);
+                    System.err.println(XtX[j * Q + j]);
+                    System.err.println(2 * XtX[i * Q + j]);
+                    System.err.println(norm2);
+                    System.err.println(norm);
+                    System.exit(-1);
+                }
+            }
+        }
+
+        // Compute update
+        for (int i = 0; i < Q; ++i) { // TODO Embarrassingly parallel
+            for (int k = 0; k < P; ++k) { // TODO Embarrassingly parallel
+
+                final int ik = i * P + k;
+                final int QP = Q * P;
+
+                double numerator = 0.0;
+                for (int j = 0; j < Q; ++j) {
+                    int add = (i != j)? 1 : 0;
+                    double inc = distance[i * Q + j] * (current[i * P + k] - current[j * P + k]) / D[i * Q + j]
+                                                + (current[i * P + k] + current[j * P + k]);
+                    if (Double.isNaN(inc)) {
+                        System.err.println("Bomb at " + i + " " + k + " " + j);
+                        System.err.println("Distance = " + distance[i * Q + j]);
+                        System.err.println("Ci = " + current[i * P + k]);
+                        System.err.println("Cj = " + current[j * P + k]);
+                        System.err.println("D = " + D[i * Q + j]);
+                        System.exit(-1);
+                    }
+
+                    if (precision != null) {
+                        for (int l = 0; l < P; ++l) {
+                            final int jl = j * P + l;
+                            final double prec = precision[ik][jl];
+                            final int sign = (prec > 0.0) ? 1 : -1;
+                            inc += weightTree * prec * (current[i * P + k] - sign * current[j * P + k]);
+                        }
+                    }
+
+                    numerator += add * inc;
+                }
+                double denominator = 2 * (Q - 1);
+
+                if (precision != null) {
+                    denominator += weightTree * (2 * precision[ik][ik] + precisionStatistics[ik]);
+                }
+
+                next[i * P + k] = numerator / denominator;
+            }
+        }
+    }
+
+   // **************************************************************
+    // XMLObjectParser
+    // **************************************************************
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public static final String MDS_STARTING_VALUES = "mdsModeFinder";
+
+        public String getParserName() {
+            return MDS_STARTING_VALUES;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            MultiDimensionalScalingLikelihood likelihood =
+                    (MultiDimensionalScalingLikelihood) xo.getChild(MultiDimensionalScalingLikelihood.class);
+
+            GaussianProcessRandomGenerator gp =
+                    (GaussianProcessRandomGenerator) xo.getChild(GaussianProcessRandomGenerator.class);
+
+            return new MultiDimensionalScalingMM(likelihood, gp);
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "Provides a mode finder for a MDS model on a tree";
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                new ElementRule(MultiDimensionalScalingLikelihood.class),
+                new ElementRule(GaussianProcessRandomGenerator.class, true),
+        };
+
+        public Class getReturnType() {
+            return MultiDimensionalScalingMM.class;
+        }
+    };
+
+    private double[][] precision = null;
+
+    private double[] precisionStatistics = null;
+//    private int[] precisionSign = null;
+
+    private boolean ignoreGP = false;
+
+    private double weightTree = 0.00001;  // TODO Formally compute
+}
diff --git a/src/dr/app/beagle/multidimensionalscaling/MassivelyParallelMDSImpl.java b/src/dr/app/beagle/multidimensionalscaling/MassivelyParallelMDSImpl.java
index 350bfe2..611486b 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MassivelyParallelMDSImpl.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MassivelyParallelMDSImpl.java
@@ -43,8 +43,6 @@ public class MassivelyParallelMDSImpl implements MultiDimensionalScalingCore {
     private NativeMDSSingleton singleton = null;
     private int instance = -1; // Get instance # via initialization
 
-//    private static final long LEFT_TRUNCATION = 1 << 5;
-
     public MassivelyParallelMDSImpl() {
         singleton = NativeMDSSingleton.loadLibrary();
     }
@@ -68,22 +66,24 @@ public class MassivelyParallelMDSImpl implements MultiDimensionalScalingCore {
     }
 
     @Override
+    public double[] getPairwiseData() {
+        return singleton.getPairwiseData(instance);
+    }
+
+    @Override
     public void updateLocation(int locationIndex, double[] location) {
         singleton.updateLocations(instance, locationIndex, location);
     }
 
     @Override
     public double calculateLogLikelihood() {
-        double sumOfSquaredResiduals = singleton.getSumOfSquaredResiduals(instance);
-
-        // TODO Missing - n / 2 * log(2 * pi)
+        double sumOfIncrements = singleton.getSumOfIncrements(instance);
 
-        double logLikelihood = 0.5 * (Math.log(precision) - Math.log(2 * Math.PI)) * observationCount -
-                        (0.5 * precision * sumOfSquaredResiduals);
+        double logLikelihood = 0.5 * (Math.log(precision) - Math.log(2 * Math.PI)) * observationCount - sumOfIncrements;
 
-        if (isLeftTruncated) {
-            logLikelihood -= singleton.getSumOfLogTruncations(instance);
-        }
+//        if (isLeftTruncated) {
+//            logLikelihood -= singleton.getSumOfLogTruncations(instance);
+//        }
 
         return logLikelihood;
     }
@@ -114,5 +114,4 @@ public class MassivelyParallelMDSImpl implements MultiDimensionalScalingCore {
     private double precision;
     private double storedPrecision;
     private boolean isLeftTruncated;
-
 }
diff --git a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCore.java b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCore.java
index 0a9d09d..6dc8c2f 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCore.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCore.java
@@ -86,4 +86,9 @@ public interface MultiDimensionalScalingCore {
      * Accept the proposed state
      */
     void acceptState();
+
+    /**
+     * Get pair-wise data
+     */
+    double[] getPairwiseData();
 }
diff --git a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl2.java b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl2.java
index dfab459..5f553c5 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl2.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingCoreImpl2.java
@@ -48,23 +48,15 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
         this.embeddingDimension = embeddingDimension;
         this.locationCount = locationCount;
         this.observationCount = (locationCount * (locationCount - 1)) / 2;
-        this.isLeftTruncated = isLeftTruncated;
 
         observations = new double[locationCount][locationCount];
-        squaredResiduals = new double[locationCount][locationCount];
-        storedSquaredResiduals = null;
-        residualsKnown = false;
-        sumOfSquaredResidualsKnown = false;
+        increments = new double[locationCount][locationCount];
+        storedIncrements = null;
+        incrementsKnown = false;
+        sumOfIncrementsKnown = false;
 
         isLeftTruncated = (flags & MultiDimensionalScalingCore.LEFT_TRUNCATION) != 0;
 
-        if (isLeftTruncated) {
-            truncations = new double[locationCount][locationCount];
-            storedTruncations = null;
-            truncationsKnown = false;
-            sumOfTruncationsKnown = false;
-        }
-
         updatedLocation = -1;
 
         locations = new double[locationCount][embeddingDimension];
@@ -85,25 +77,34 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
     }
 
     @Override
+    public double[] getPairwiseData() {
+        double[] data = new double[locationCount * locationCount];
+        int k = 0;
+        for (int i = 0; i < locationCount; ++i) {
+            System.arraycopy(observations[i], 0, data, k, locationCount);
+            k += locationCount;
+        }
+        return data;
+    }
+
+    @Override
     public void setParameters(double[] parameters) {
         precision = parameters[0];
 
         // Handle truncations
-        truncationsKnown = false;
-        sumOfTruncationsKnown = false;
+        if (isLeftTruncated) {
+            incrementsKnown = false;
+            sumOfIncrementsKnown = false;
+        }
     }
 
     @Override
     public void updateLocation(int locationIndex, double[] location) {
         if (updatedLocation != -1 || locationIndex == -1) {
             // more than one location updated - do a full recomputation
-            residualsKnown = false;
-            storedSquaredResiduals = null;
-
-            // Handle truncations
-            truncationsKnown = false;
-            storedTruncations = null;
-        }
+            incrementsKnown = false;
+            storedIncrements = null;
+         }
 
         if (locationIndex != -1) {
             updatedLocation = locationIndex;
@@ -126,49 +127,38 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
             }
         }
 
-        sumOfSquaredResidualsKnown = false;
-
-        // Handle truncation
-        sumOfTruncationsKnown = false;
+        sumOfIncrementsKnown = false;
     }
 
     @Override
     public double calculateLogLikelihood() {
-        if (!sumOfSquaredResidualsKnown) {
+        if (!sumOfIncrementsKnown) {
 
-            if (!residualsKnown) {
+            if (!incrementsKnown) {
                 computeSumOfSquaredResiduals();
             } else {
                 updateSumOfSquaredResiduals();
                 if (REPORT_ROUNDOFF) {
                     // Report round-off error
-                    double storedSumOfSquaredResults = sumOfSquaredResiduals;
+                    double storedSumOfSquaredResults = sumOfIncrements;
                     computeSumOfSquaredResiduals();
-                    if (Math.abs(storedSumOfSquaredResults - sumOfSquaredResiduals) > 1E-6) {
+                    if (Math.abs(storedSumOfSquaredResults - sumOfIncrements) > 1E-6) {
                         System.err.println(storedSumOfSquaredResults);
-                        System.err.println(sumOfSquaredResiduals);
-                        System.err.println(storedSumOfSquaredResults - sumOfSquaredResiduals);
+                        System.err.println(sumOfIncrements);
+                        System.err.println(storedSumOfSquaredResults - sumOfIncrements);
                         System.err.println("");
                     }
                 }
             }
-            sumOfSquaredResidualsKnown = true;
+            sumOfIncrementsKnown = true;
         }
 
-        double logLikelihood = 0.5 * (Math.log(precision) - Math.log(2 * Math.PI)) * observationCount -
-                (0.5 * precision * sumOfSquaredResiduals);
+        double logLikelihood = 0.5 * (Math.log(precision) - Math.log(2 * Math.PI)) * observationCount;
 
         if (isLeftTruncated) {
-            if (!sumOfTruncationsKnown) {
-
-                if (!truncationsKnown) {
-                    computeSumOfTruncations();
-                } else {
-                    updateSumOfTruncations();
-                }
-                sumOfTruncationsKnown = true;
-            }
-            logLikelihood -= truncationSum;
+            logLikelihood -= sumOfIncrements; // If truncated, then values on difference scale
+        } else {
+            logLikelihood -= 0.5 * precision * sumOfIncrements;
         }
 
         return logLikelihood;
@@ -177,8 +167,8 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
     @Override
     public void storeState() {
         // Handle residuals
-        storedSumOfSquaredResiduals = sumOfSquaredResiduals;
-        storedSquaredResiduals = null;
+        storedSumOfIncrements = sumOfIncrements;
+        storedIncrements = null;
 
         // Handle locations
         for (int i = 0; i < locationCount; i++) {
@@ -188,28 +178,22 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
 
         // Handle precision
         storedPrecision = precision;
-
-        // Handle truncations
-        if (isLeftTruncated) {
-            storedTruncationSum = truncationSum;
-            storedTruncations = null;
-        }
     }
 
     @Override
     public void restoreState() {
         // Handle residuals
-        sumOfSquaredResiduals = storedSumOfSquaredResiduals;
-        sumOfSquaredResidualsKnown = true;
+        sumOfIncrements = storedSumOfIncrements;
+        sumOfIncrementsKnown = true;
 
-        if (storedSquaredResiduals != null) {
-            System.arraycopy(storedSquaredResiduals, 0 , squaredResiduals[updatedLocation], 0, locationCount);
+        if (storedIncrements != null) {
+            System.arraycopy(storedIncrements, 0 , increments[updatedLocation], 0, locationCount);
 //            for (int j = 0; j < locationCount; j++) { // Do not write transposed values
-//                squaredResiduals[j][updatedLocation] = storedSquaredResiduals[j];
+//                increments[j][updatedLocation] = storedIncrements[j];
 //            }
-            residualsKnown = true;
+            incrementsKnown = true;
         } else {
-            residualsKnown = false;
+            incrementsKnown = false;
         }
 
         // Handle locations
@@ -219,151 +203,86 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
 
         // Handle precision
         precision = storedPrecision;
-
-        // Handle truncations
-        if (isLeftTruncated) {
-            truncationSum = storedTruncationSum;
-            sumOfTruncationsKnown = true;
-
-            if (storedTruncations != null) {
-                System.arraycopy(storedTruncations, 0, truncations[updatedLocation], 0, locationCount);
-//                for (int j = 0; j < locationCount; ++j) { // Do not write transposed values
-//                    truncations[j][updatedLocation] = storedTruncations[j];
-//                }
-                truncationsKnown = true;
-            } else {
-                truncationsKnown = false;
-            }
-        }
     }
 
     @Override
     public void acceptState() {
-        if (storedSquaredResiduals != null) {
+        if (storedIncrements != null) {
             for (int j = 0; j < locationCount; ++j) {
-                squaredResiduals[j][updatedLocation] = squaredResiduals[updatedLocation][j];
-            }
-        }
-
-        if (isLeftTruncated) {
-            if (storedTruncations != null) {
-                for (int j = 0; j < locationCount; ++j) { // Do not write transposed values
-                    truncations[j][updatedLocation] = truncations[updatedLocation][j];
-                }
+                increments[j][updatedLocation] = increments[updatedLocation][j];
             }
         }
     }
 
     @Override
     public void makeDirty() {
-        sumOfSquaredResidualsKnown = false;
-        residualsKnown = false;
-
-        sumOfTruncationsKnown = false;
-        truncationsKnown = false;
+        sumOfIncrementsKnown = false;
+        incrementsKnown = false;
     }
 
     protected void computeSumOfSquaredResiduals() {
 
+        final double oneOverSd = Math.sqrt(precision);
+        final double scale = 0.5 * precision;
+
         // OLD
-        sumOfSquaredResiduals = 0.0;
+        sumOfIncrements = 0.0;
         for (int i = 0; i < locationCount; i++) {
 
             for (int j = 0; j < locationCount; j++) {
                 double distance = calculateDistance(locations[i], locations[j]);
                 double residual = distance - observations[i][j];
-                double squaredResidual = residual * residual;
-                squaredResiduals[i][j] = squaredResidual;
-//                squaredResiduals[j][i] = squaredResidual; // Do not write transposed values
-                sumOfSquaredResiduals += squaredResidual;
+                double increment = residual * residual;
+                if (isLeftTruncated) {
+                    increment = scale * increment;
+                    if (i != j) {
+                        increment += computeTruncation(distance, precision, oneOverSd);
+//                        increment += computeTruncation(Math.sqrt(residual * residual), precision, oneOverSd); // OLD .. believed incorrect
+                    }
+                }
+                increments[i][j] = increment;
+//                increments[j][i] = increment; // Do not write transposed values
+                sumOfIncrements += increment;
             }
         }
 
-        sumOfSquaredResiduals /= 2;
-
-        // New   TODO
-//        sumOfSquaredResiduals = 0.0;
-//         for (int i = 0; i < locationCount; i++) {
-//
-//             for (int j = i + 1; j < locationCount; j++) {
-//                 double distance = calculateDistance(locations[i], locations[j]);
-//                 double residual = distance - observations[i][j];
-//                 double squaredResidual = residual * residual;
-//                 squaredResiduals[i][j] = squaredResidual;
-//                 squaredResiduals[j][i] = squaredResidual;
-//                 sumOfSquaredResiduals += squaredResidual;
-//             }
-//         }
-
-        residualsKnown = true;
-        sumOfSquaredResidualsKnown = true;
+        sumOfIncrements /= 2;
+
+        incrementsKnown = true;
+        sumOfIncrementsKnown = true;
     }
 
-    protected void computeSumOfTruncations() {
+    protected void updateSumOfSquaredResiduals() {
 
         final double oneOverSd = Math.sqrt(precision);
+        final double scale = 0.5 * precision;
 
-        truncationSum = 0.0;
-        for (int i = 0; i < locationCount; i++) {
-
-            for (int j = 0; j < locationCount; j++) {
-                double squaredResidual = squaredResiduals[i][j]; // Note just written above, save transaction
-                double truncation = (i == j) ? 0.0 : computeTruncation(squaredResidual, precision, oneOverSd);
-                truncations[i][j] =  truncation;
-//                truncations[j][i] = truncation;
-                truncationSum += truncation;
-            }
-        }
-
-        truncationSum /= 2;
-
-        truncationsKnown = true;
-        sumOfTruncationsKnown = true;
-    }
-
-    protected void updateSumOfSquaredResiduals() {
         double delta = 0.0;
 
-        int i = updatedLocation;
+        final int i = updatedLocation;
 
-        storedSquaredResiduals = new double[locationCount];
-        System.arraycopy(squaredResiduals[i], 0, storedSquaredResiduals, 0, locationCount);
+        storedIncrements = new double[locationCount];
+        System.arraycopy(increments[i], 0, storedIncrements, 0, locationCount);
 
         for (int j = 0; j < locationCount; j++) {
             double distance = calculateDistance(locations[i], locations[j]);
             double residual = distance - observations[i][j];
-            double squaredResidual = residual * residual;
-
-            delta += squaredResidual - squaredResiduals[i][j];
-
-            squaredResiduals[i][j] = squaredResidual;
-//            squaredResiduals[j][i] = squaredResidual; // Do not write transposed values
-        }
+            double increment = residual * residual;
 
-        sumOfSquaredResiduals += delta;
-    }
-
-    protected void updateSumOfTruncations() {
-        final double oneOverSd = Math.sqrt(precision);
-        double delta = 0.0;
-
-        int i = updatedLocation;
-
-        storedTruncations = new double[locationCount];
-        System.arraycopy(truncations[i], 0, storedTruncations, 0, locationCount);
-
-        for (int j = 0; j < locationCount; j++) {
-
-            double squaredResidual = squaredResiduals[i][j];
-            double truncation = (i == j) ? 0.0 : computeTruncation(squaredResidual, precision, oneOverSd);
-
-            delta += truncation - truncations[i][j];
+            if (isLeftTruncated) {
+                increment = scale * increment;
+                if (i != j) {
+                    increment += computeTruncation(distance, precision, oneOverSd);
+//                    increment += computeTruncation(Math.sqrt(residual * residual), precision, oneOverSd); // OLD .. believed incorrect
+                }
+            }
 
-            truncations[i][j] = truncation;
-//            truncations[j][i] = truncation; // Do not write transposed values
+            delta += increment - increments[i][j];
+            increments[i][j] = increment;
+//            increments[j][i] = increment; // Do not write transposed values
         }
 
-        truncationSum += delta;
+        sumOfIncrements += delta;
     }
 
     protected double calculateDistance(double[] X, double[] Y) {
@@ -375,33 +294,10 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
         return Math.sqrt(sum);
     }
 
-    protected double computeTruncation(double squaredResidual, double precision, double oneOverSd) {
-        return NormalDistribution.standardCDF(Math.sqrt(squaredResidual) * oneOverSd, true);
+    protected double computeTruncation(double mean, double precision, double oneOverSd) {
+        return NormalDistribution.standardCDF(mean * oneOverSd, true); // Should be standardCDF(mean / sd, true);
     }
 
-//    protected void calculateTruncations(double precision) {
-//        double sd = 1.0 / Math.sqrt(precision);
-//        for (int i = 0; i < distanceCount; i++) {
-//            if (distanceUpdated[i]) {
-//                truncations[i] = NormalDistribution.cdf(distances[i], 0.0, sd, true);
-//            }
-//        }
-//        truncationsKnown = true;
-//    }
-//
-//    protected double calculateTruncationSum() {
-//        double sum = 0.0;
-//        for (int i = 0; i < observationCount; i++) {
-//            int dist = getDistanceIndexForObservation(i);
-//            if (dist != -1) {
-//                sum += truncations[dist];
-//            } else {
-//                sum += Math.log(0.5);
-//            }
-//        }
-//        return sum;
-//    }
-
     private int embeddingDimension;
     private boolean isLeftTruncated = false;
     private int locationCount;
@@ -415,23 +311,15 @@ public class MultiDimensionalScalingCoreImpl2 implements MultiDimensionalScaling
     private double[][] locations;
     private double[][] storedLocations;
 
-    private boolean residualsKnown = false;
-
-    private boolean sumOfSquaredResidualsKnown = false;
-    private double[][] squaredResiduals;
-
-    private double[] storedSquaredResiduals;
+    private boolean incrementsKnown = false;
 
-    private double sumOfSquaredResiduals;
-    private double storedSumOfSquaredResiduals;
+    private boolean sumOfIncrementsKnown = false;
+    private double[][] increments;
 
-    private boolean truncationsKnown = false;
-    private boolean sumOfTruncationsKnown = false;
+    private double[] storedIncrements;
 
-    private double truncationSum;
-    private double storedTruncationSum;
-    private double[][] truncations;
-    private double[] storedTruncations;
+    private double sumOfIncrements;
+    private double storedSumOfIncrements;
 
     private static boolean REPORT_ROUNDOFF = false;
 
diff --git a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingLikelihood.java b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingLikelihood.java
index fc9506e..3aadf54 100644
--- a/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingLikelihood.java
+++ b/src/dr/app/beagle/multidimensionalscaling/MultiDimensionalScalingLikelihood.java
@@ -40,10 +40,17 @@ import java.util.Map;
  * @author Marc Suchard
  * @version $Id$
  */
-public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
+public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood implements Reportable {
 
     public static final String REQUIRED_FLAGS_PROPERTY = "mds.required.flags";
 
+    @Override
+    public String getReport() {
+        StringBuilder sb = new StringBuilder();
+        sb.append(getId() + ": " + getLogLikelihood());
+        return sb.toString();
+    }
+
     public enum ObservationType {
         POINT,
         UPPER_BOUND,
@@ -104,7 +111,8 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
         String[] rowLabels = new String[locationCount];
 
         int observationCount = rowCount * rowCount;
-        double[] observations = new double[observationCount];
+//        double[] observations = new double[observationCount];
+        observations = new double[observationCount];
         ObservationType[] observationTypes = new ObservationType[observationCount];
 
         double[][] tmp = new double[rowCount][rowCount];
@@ -132,6 +140,22 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
                 rowLabels, observations, observationTypes);
     }
 
+//    private class Data {
+//        int observationCount;
+//        double[] observations;
+//        ObservationType[] observationTypes;
+//
+//        Data(int observationCount, double[] observations, ObservationType[] observationTypes) {
+//            this.observationCount = observationCount;
+//            this.observations = observations;
+//            this.observationTypes = observationTypes;
+//        }
+//    }
+
+    public double[] getObservations() { return observations; }    // TODO Grab from core when needed to save space
+
+    public MatrixParameterInterface getMatrixParameter() { return locationsParameter; }
+
     private int[] getPermutation(String[] source, MatrixParameterInterface destination) {
 
         if (source.length != destination.getColumnDimension()) {
@@ -183,6 +207,10 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
         return core;
     }
 
+    public int getMdsDimension() { return mdsDimension; }
+
+    public int getLocationCount() { return locationCount; }
+
     protected void initialize(
             final int mdsDimension,
             final Parameter mdsPrecision,
@@ -421,4 +449,6 @@ public class MultiDimensionalScalingLikelihood extends AbstractModelLikelihood {
     private double storedLogLikelihood;
 
     private long flags = 0;
+
+    private double[] observations;
 }
diff --git a/src/dr/app/beagle/multidimensionalscaling/NativeMDSSingleton.java b/src/dr/app/beagle/multidimensionalscaling/NativeMDSSingleton.java
index 82b421f..bcd1c75 100644
--- a/src/dr/app/beagle/multidimensionalscaling/NativeMDSSingleton.java
+++ b/src/dr/app/beagle/multidimensionalscaling/NativeMDSSingleton.java
@@ -108,9 +108,7 @@ public class NativeMDSSingleton {
 
     public native void updateLocations(int instance, int updateCount, double[] locations);
 
-    public native double getSumOfSquaredResiduals(int instance);
-
-    public native double getSumOfLogTruncations(int instance);
+    public native double getSumOfIncrements(int instance);
 
     public native void storeState(int instance);
 
@@ -124,4 +122,15 @@ public class NativeMDSSingleton {
 
     public native void setParameters(int instance, double[] parameters);
 
+    public native double[] getPairwiseData(int instance);
+
+//jsize size = env->GetArrayLength( arr );
+//std::vector<double> input( size );
+//env->GetDoubleArrayRegion( arr, 0, size, &input[0] );
+//
+////  ...
+//
+//jdoubleArray output = env->NewDoubleArray( results.size() );
+//env->SetDoubleArrayRegion( output, 0, results.size(), &results[0] );
+
 }
diff --git a/src/dr/app/beast/BeastMain.java b/src/dr/app/beast/BeastMain.java
index b3c20f3..247eee0 100644
--- a/src/dr/app/beast/BeastMain.java
+++ b/src/dr/app/beast/BeastMain.java
@@ -44,6 +44,7 @@ import jam.util.IconUtils;
 
 import javax.swing.*;
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.IOException;
 import java.util.*;
@@ -124,6 +125,12 @@ public class BeastMain {
             });
             infoLogger.addHandler(errorHandler);
 
+            if (System.getProperty("citations.filename") != null) {
+                FileOutputStream citationStream = new FileOutputStream(System.getProperty("citations.filename"));
+                Handler citationHandler = new MessageLogHandler(citationStream);
+                Logger.getLogger("dr.apps.beast").addHandler(citationHandler);
+            }
+
             logger.setUseParentHandlers(false);
 
             infoLogger.info("Parsing XML file: " + fileName);
@@ -237,7 +244,7 @@ public class BeastMain {
                 infoLogger.severe("Error running file: " + fileName);
                 infoLogger.severe(
                         "The initial model is invalid because state has a zero probability.\n\n" +
-                                "If the log likelihood of the tree is -Inf, his may be because the\n" +
+                                "If the log likelihood of the tree is -Inf, this may be because the\n" +
                                 "initial, random tree is so large that it has an extremely bad\n" +
                                 "likelihood which is being rounded to zero.\n\n" +
                                 "Alternatively, it may be that the product of starting mutation rate\n" +
@@ -341,6 +348,7 @@ public class BeastMain {
                         new Arguments.Option("beagle_async", "BEAGLE: use asynchronous kernels if available"),
                         new Arguments.StringOption("beagle_scaling", new String[]{"default", "dynamic", "delayed", "always", "none"},
                                 false, "BEAGLE: specify scaling scheme to use"),
+                        new Arguments.Option("beagle_delay_scaling_off", "BEAGLE: don't wait until underflow for scaling option"),
                         new Arguments.LongOption("beagle_rescale", "BEAGLE: frequency of rescaling (dynamic scaling only)"),
                         new Arguments.Option("mpi", "Use MPI rank to label output"),
 
@@ -353,15 +361,19 @@ public class BeastMain {
                         new Arguments.LongOption("dump_state", "Specify a state at which to write a dump file"),
                         new Arguments.LongOption("dump_every", "Specify a frequency to write a dump file"),
 
+                        new Arguments.StringOption("citations_file", "FILENAME", "Specify a filename to write a citation list to"),
+
                         new Arguments.Option("version", "Print the version and credits and stop"),
                         new Arguments.Option("help", "Print this information and stop"),
                 });
 
         int argumentCount = 0;
 
-        StringBuilder commandLine = new StringBuilder(args[0]);
-        for (int i = 1; i < args.length; i++) {
-            commandLine.append(" ");
+        StringBuilder commandLine = new StringBuilder();
+        for (int i = 0; i < args.length; i++) {
+            if (i > 0) {
+                commandLine.append(" ");
+            }
             commandLine.append(args[i]);
         }
         System.setProperty("command_line", commandLine.toString());
@@ -505,6 +517,10 @@ public class BeastMain {
             System.setProperty("beagle.scaling", arguments.getStringOption("beagle_scaling"));
         }
 
+        if (arguments.hasOption("beagle_delay_scaling_off")) {
+            System.setProperty("beagle.delay.scaling", Boolean.FALSE.toString());
+        }
+
         if (arguments.hasOption("beagle_rescale")) {
             System.setProperty("beagle.rescale", Long.toString(arguments.getLongOption("beagle_rescale")));
         }
@@ -544,6 +560,11 @@ public class BeastMain {
             System.setProperty(MCMC.DUMP_EVERY, Long.toString(debugWriteEvery));
         }
 
+        if (arguments.hasOption("citations_file")) {
+            String debugStateFile = arguments.getStringOption("citations_file");
+            System.setProperty("citations.filename", debugStateFile);
+        }
+
         if (useMPI) {
             String[] nullArgs = new String[0];
             try {
diff --git a/src/dr/app/beast/BeastParser.java b/src/dr/app/beast/BeastParser.java
index 871d8b8..98b3c47 100644
--- a/src/dr/app/beast/BeastParser.java
+++ b/src/dr/app/beast/BeastParser.java
@@ -25,6 +25,8 @@
 
 package dr.app.beast;
 
+import dr.util.Citation;
+import dr.util.Pair;
 import dr.xml.PropertyParser;
 import dr.xml.UserInput;
 import dr.xml.XMLObjectParser;
@@ -35,9 +37,8 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.lang.reflect.Field;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Properties;
+import java.util.*;
+import java.util.logging.Logger;
 
 /**
  * @author Alexei Drummond
@@ -55,6 +56,8 @@ public class BeastParser extends XMLParser {
     public BeastParser(String[] args, List<String> additionalParsers, boolean verbose, boolean parserWarnings, boolean strictXML) {
         super(parserWarnings, strictXML);
 
+        addCitable(BeastVersion.INSTANCE);
+
         setup(args);
 
         if (verbose) {
@@ -199,6 +202,44 @@ public class BeastParser extends XMLParser {
         }
     }
 
+    @Override
+    protected void executingRunnable() {
+        Logger.getLogger("dr.apps.beast").info("\n\nCitations for this analysis: ");
+
+        Map<String, Set<Pair<String, String>>> categoryMap = new LinkedHashMap<String, Set<Pair<String, String>>>();
+
+        // force the Framework category to be first...
+        categoryMap.put("Framework", new LinkedHashSet<Pair<String, String>>());
+
+        for (Pair<String, String> keyPair : getCitationStore().keySet()) {
+            Set<Pair<String, String>> pairSet = categoryMap.get(keyPair.fst);
+            if (pairSet == null) {
+                pairSet = new LinkedHashSet<Pair<String, String>>();
+                categoryMap.put(keyPair.fst, pairSet);
+            }
+            pairSet.add(keyPair);
+        }
+
+        for (String category : categoryMap.keySet()) {
+            Logger.getLogger("dr.apps.beast").info("\n"+category.toUpperCase());
+            Set<Pair<String, String>> pairSet = categoryMap.get(category);
+
+            for (Pair<String, String>keyPair : pairSet) {
+                Logger.getLogger("dr.apps.beast").info(keyPair.snd + ":");
+
+                for (Citation citation : getCitationStore().get(keyPair)) {
+                    Logger.getLogger("dr.apps.beast").info("\t" + citation.toString());
+                }
+            }
+        }
+
+        // clear the citation store so all the same citations don't get cited again
+        getCitationStore().clear();
+
+        Logger.getLogger("dr.apps.beast").info("\n");
+
+    }
+
     private void setup(String[] args) {
 
         for (int i = 0; i < args.length; i++) {
diff --git a/src/dr/app/beast/BeastVersion.java b/src/dr/app/beast/BeastVersion.java
index dcbb55f..cb1c0b1 100644
--- a/src/dr/app/beast/BeastVersion.java
+++ b/src/dr/app/beast/BeastVersion.java
@@ -25,8 +25,14 @@
 
 package dr.app.beast;
 
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 import dr.util.Version;
 
+import java.util.Collections;
+import java.util.List;
+
 /**
  * This class provides a mechanism for returning the version number of the
  * dr software.
@@ -39,12 +45,14 @@ import dr.util.Version;
  *
  * $Id$
  */
-public class BeastVersion implements Version {
+public class BeastVersion implements Version, Citable {
+
+    public static final BeastVersion INSTANCE = new BeastVersion();
 
     /**
      * Version string: assumed to be in format x.x.x
      */
-    private static final String VERSION = "1.8.3";
+    private static final String VERSION = "1.8.4";
 
     private static final String DATE_STRING = "2002-2016";
 
@@ -52,7 +60,7 @@ public class BeastVersion implements Version {
 
     // this is now being manually updated since the move to GitHub. Using date in yyyymmdd format (suffix
     // with b,c,d etc if multiple revisions in a day.
-    private static final String REVISION = "GitHub 20160213";
+    private static final String REVISION = "GitHub 20160615";
 
     public String getVersion() {
         return VERSION;
@@ -128,4 +136,33 @@ public class BeastVersion implements Version {
             return "Invalid Revision String : " + REVISION;
         }
     }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.FRAMEWORK;
+    }
+
+    @Override
+    public String getDescription() {
+        return "BEAST primary citation";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("AJ", "Drummond"),
+                    new Author("MA", "Suchard"),
+                    new Author("Dong", "Xie"),
+                    new Author("A", "Rambaut")
+            },
+            "Bayesian phylogenetics with BEAUti and the BEAST 1.7",
+            2012,
+            "Mol Biol Evol",
+            29, 1969, 1973,
+            "10.1093/molbev/mss075");
+
 }
diff --git a/src/dr/app/beast/beast.properties b/src/dr/app/beast/beast.properties
index 4dfb32a..95bc133 100644
--- a/src/dr/app/beast/beast.properties
+++ b/src/dr/app/beast/beast.properties
@@ -23,5 +23,5 @@
 # Boston, MA  02110-1301  USA
 #
 
-parsers=development
-#parsers=release
+#parsers=development
+parsers=release
diff --git a/src/dr/app/beast/development_parsers.properties b/src/dr/app/beast/development_parsers.properties
index 0242195..398e114 100644
--- a/src/dr/app/beast/development_parsers.properties
+++ b/src/dr/app/beast/development_parsers.properties
@@ -49,12 +49,13 @@ dr.inference.model.ParameterIntegerParser
 dr.inference.model.LikelihoodProfile
 dr.evomodelxml.continuous.BuildCompoundSymmetricMatrix
 dr.inferencexml.model.BlockUpperTriangularMatrixParameterParser
+dr.inferencexml.model.FastBUTMPParser
 dr.inferencexml.model.MatrixMatrixProductParser
 dr.inferencexml.model.MatrixVectorProductParameterParser
 dr.inferencexml.model.DifferenceMatrixParameterParser
-dr.inferencexml.model.DifferenceParameterParser
-dr.inferencexml.model.SumParameterParser
+dr.inferencexml.model.ElementWiseMatrixMultiplicationParser
 dr.inferencexml.model.ImmutableParameterParser
+dr.inferencexml.model.ComplementParameterParser
 
 # DISTRIBUTIONS
 
@@ -75,7 +76,6 @@ dr.evomodel.operators.FixedColouredOperator
 dr.evomodel.approxPopTree.PopTreeModel
 
 # TREE OPERATORS
-dr.evomodelxml.operators.SubtreeJumpOperatorParser
 dr.evomodelxml.operators.LatentFactorHamiltonianMCParser
 
 # TREE LIKELIHOOD
@@ -102,8 +102,6 @@ dr.evomodelxml.coalescent.operators.GaussianProcessSkytrackTreeOperatorParser
 # TREE SUMMARY STATISTICS
 dr.evomodelxml.WanderingTaxonLoggerParser
 
-dr.evomodel.treelikelihood.RootDiscreteStateStatistic
-
 # CONTINUOUS DIFFUSION
 dr.evomodel.continuous.VonMisesFisherDiffusionModel
 dr.evoxml.BifractionalDiffusionModelParser
@@ -145,6 +143,7 @@ dr.inferencexml.operators.FactorOperatorParser
 dr.inferencexml.operators.LoadingsGibbsOperatorParser
 dr.inferencexml.operators.LatentFactorModelPrecisionGibbsOperatorParser
 dr.inferencexml.operators.LoadingsIndependenceOperatorParser
+dr.evomodelxml.operators.LoadingsHamiltonianMCParser
 
 dr.inferencexml.operators.EllipticalSliceOperatorParser
 
@@ -169,8 +168,6 @@ dr.evomodelxml.substmodel.GeneralF81ModelParser
 # EPIDEMIOLOGY
 dr.evomodel.epidemiology.SIRModelParser
 dr.evomodel.epidemiology.SIRepidemicModelParser
-dr.evomodel.epidemiology.casetocase.operators.InfectionBranchGibbsOperator
-dr.evomodel.epidemiology.casetocase.operators.TransmissionTreeOperator
 
 # DISTRIBUTIONS
 dr.inferencexml.distribution.TruncatedNormalDistributionModelParser
@@ -212,11 +209,8 @@ dr.app.beagle.evomodel.branchmodel.lineagespecific.DirichletProcessOperatorParse
 dr.app.beagle.evomodel.branchmodel.lineagespecific.DirichletProcessPriorLoggerParser
 dr.app.beagle.evomodel.branchmodel.lineagespecific.RatioParameterParser
 
-# NEW Bayesian MDS
-dr.app.beagle.multidimensionalscaling.MultiDimensionalScalingLikelihood
-dr.inference.model.CompoundMatrixParameter
-dr.inference.model.CompoundFastMatrixParameter
-dr.inference.model.CopyParameterValuesParser
+
+dr.inference.operators.ModeIndependenceOperator
 dr.inferencexml.distribution.CompoundGaussianProcessParser
 dr.app.beagle.evomodel.parsers.NewBeagleTreeLikelihoodParser
 dr.inference.model.FastMatrixParameter
@@ -253,3 +247,9 @@ dr.evomodel.antigenic.phyloClustering.statistics.ClusterLabelsVirusesTreeTrait
 #Simulate clusters and HI
 dr.evomodel.antigenic.phyloClustering.misc.simulateClusters
 ###############################################################################################
+
+# GLM
+dr.inferencexml.operators.MaskMoveOperatorParser
+
+# Uncertain attributes:
+dr.evoxml.UncertainAttributePatternsParser
\ No newline at end of file
diff --git a/src/dr/app/beast/release_parsers.properties b/src/dr/app/beast/release_parsers.properties
index 14d9a47..01ed4c5 100644
--- a/src/dr/app/beast/release_parsers.properties
+++ b/src/dr/app/beast/release_parsers.properties
@@ -220,6 +220,7 @@ dr.evomodelxml.tree.MicrosatelliteSamplerTreeModelParser
 dr.evomodelxml.tree.TipHeightLikelihoodParser
 dr.evomodelxml.tree.TreeMetricStatisticParser
 dr.evomodelxml.tree.TreeLengthStatisticParser
+dr.evomodelxml.tree.TerminalBranchStatisticParser
 dr.evomodelxml.tree.NodeHeightsStatisticParser
 dr.evomodelxml.tree.TreeShapeStatisticParser
 dr.evomodelxml.tree.TMRCAStatisticParser
@@ -394,8 +395,6 @@ dr.inferencexml.trace.MarginalLikelihoodAnalysisParser
 dr.inferencexml.trace.HarmonicMeanAnalysisParser
 dr.inferencexml.trace.AICMAnalysisParser
 dr.inferencexml.trace.ArithmeticMeanAnalysisParser
-dr.inferencexml.trace.GeneralizedHarmonicMeanAnalysisParser
-
 
 #GMRF
 dr.evomodelxml.coalescent.operators.GMRFSkyrideFixedEffectsGibbsOperatorParser
@@ -598,6 +597,13 @@ dr.evomodel.epidemiology.casetocase.PartitionedTreeLoggerParser
 dr.evomodel.epidemiology.casetocase.PartitionedTreeModelParser
 dr.evomodel.epidemiology.casetocase.CategoryOutbreak
 
+# NEW Bayesian MDS
+dr.inference.model.CompoundMatrixParameter
+dr.inference.model.CompoundFastMatrixParameter
+dr.inference.model.CopyParameterValuesParser
+dr.app.beagle.mm.MultiDimensionalScalingMM
+dr.evomodel.antigenic.MultidimensionalScalingLikelihood
+
 # ANTIGENIC EVOLUTION/EVOLUTIONARY CARTOGRAPHY
 dr.evomodel.antigenic.AntigenicLikelihood
 dr.evomodel.antigenic.DriftedLocationsStatistic
@@ -608,4 +614,3 @@ dr.evomodel.antigenic.ClusterSplitMergeOperator
 dr.evomodel.antigenic.ClusterSingleMoveOperator
 dr.evomodel.antigenic.DistanceDependentCRPGibbsOperator
 dr.evomodel.antigenic.NPAntigenicLikelihood
-dr.evomodel.antigenic.MultidimensionalScalingLikelihood
diff --git a/src/dr/app/beauti/BeautiApp.java b/src/dr/app/beauti/BeautiApp.java
index b63cfbe..77e6d7a 100644
--- a/src/dr/app/beauti/BeautiApp.java
+++ b/src/dr/app/beauti/BeautiApp.java
@@ -26,7 +26,6 @@
 package dr.app.beauti;
 
 import dr.app.beast.BeastVersion;
-import dr.app.beauti.util.CommandLineBeauti;
 import dr.app.util.Arguments;
 import dr.app.util.OSType;
 import dr.util.Version;
@@ -145,21 +144,21 @@ public class BeautiApp extends MultiDocApplication {
             return;
         }
 
-        if (args2.length > 1) {
-
-            if (args.length != 3) {
-                printTitle();
-                printUsage(arguments);
-                System.exit(1);
-            }
-
-            String inputFileName = args[0];
-            String templateFileName = args[1];
-            String outputFileName = args[2];
-
-            new CommandLineBeauti(inputFileName, templateFileName, outputFileName);
-
-        } else {
+//        if (args2.length > 1) {
+//
+//            if (args.length != 3) {
+//                printTitle();
+//                printUsage(arguments);
+//                System.exit(1);
+//            }
+//
+//            String inputFileName = args[0];
+//            String templateFileName = args[1];
+//            String outputFileName = args[2];
+//
+//            new CommandLineBeauti(inputFileName, templateFileName, outputFileName);
+//
+//        } else {
             String inputFileName = null;
             if (args2.length == 1) {
                 inputFileName = args2[0];
@@ -275,7 +274,7 @@ public class BeautiApp extends MultiDocApplication {
                         JOptionPane.ERROR_MESSAGE);
                 e.printStackTrace();
             }
-        }
+//        }
     }
 
     public static boolean advanced = false;
diff --git a/src/dr/app/beauti/BeautiFrame.java b/src/dr/app/beauti/BeautiFrame.java
index f32f7f7..a51ded7 100644
--- a/src/dr/app/beauti/BeautiFrame.java
+++ b/src/dr/app/beauti/BeautiFrame.java
@@ -35,7 +35,6 @@ package dr.app.beauti;
 
 import dr.app.beauti.ancestralStatesPanel.AncestralStatesPanel;
 import dr.app.beauti.clockModelsPanel.ClockModelsPanel;
-import dr.app.beauti.clockModelsPanel.OldClockModelsPanel;
 import dr.app.beauti.components.ComponentFactory;
 import dr.app.beauti.components.ancestralstates.AncestralStatesComponentFactory;
 import dr.app.beauti.components.continuous.ContinuousComponentFactory;
@@ -48,7 +47,6 @@ import dr.app.beauti.components.sequenceerror.SequenceErrorModelComponentFactory
 import dr.app.beauti.components.tipdatesampling.TipDateSamplingComponentFactory;
 import dr.app.beauti.datapanel.DataPanel;
 import dr.app.beauti.generator.BeastGenerator;
-import dr.app.beauti.generator.ComponentGenerator;
 import dr.app.beauti.generator.Generator;
 import dr.app.beauti.mcmcpanel.MCMCPanel;
 import dr.app.beauti.operatorspanel.OperatorsPanel;
diff --git a/src/dr/app/beauti/clockModelsPanel/OldClockModelsPanel.java b/src/dr/app/beauti/clockModelsPanel/OldClockModelsPanel.java
deleted file mode 100644
index 310b815..0000000
--- a/src/dr/app/beauti/clockModelsPanel/OldClockModelsPanel.java
+++ /dev/null
@@ -1,681 +0,0 @@
-/*
- * OldClockModelsPanel.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.beauti.clockModelsPanel;
-
-import dr.app.beauti.BeautiFrame;
-import dr.app.beauti.BeautiPanel;
-import dr.app.beauti.ComboBoxRenderer;
-import dr.app.beauti.options.BeautiOptions;
-import dr.app.beauti.options.ClockModelGroup;
-import dr.app.beauti.options.PartitionClockModel;
-import dr.app.beauti.types.OldClockType;
-import dr.app.gui.table.RealNumberCellEditor;
-import dr.app.gui.table.TableEditorStopper;
-import dr.evolution.datatype.DataType;
-import jam.framework.Exportable;
-import jam.panels.ActionPanel;
-import jam.table.TableRenderer;
-
-import javax.swing.*;
-import javax.swing.border.TitledBorder;
-import javax.swing.plaf.BorderUIResource;
-import javax.swing.table.AbstractTableModel;
-import javax.swing.table.JTableHeader;
-import javax.swing.table.TableCellRenderer;
-import javax.swing.table.TableColumn;
-import java.awt.*;
-import java.awt.event.ActionEvent;
-import java.awt.event.MouseEvent;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * @author Andrew Rambaut
- * @author Alexei Drummond
- * @author Walter Xie
- * @version $Id: ClockModelPanel.java,v 1.17 2006/09/05 13:29:34 rambaut Exp $
- * @deprecated
- */
-public class OldClockModelsPanel extends BeautiPanel implements Exportable {
-
-    private static final long serialVersionUID = 2945922234432540027L;
-    private final String[] columnToolTips = {"Name", "Clock model",
-            "Decide whether to estimate this clock model",
-            "Provide the rate if it is fixed",
-            "The group which the clock model is belonging to"};
-    private final String[] columnToolTips2 = {"A group of clock models",
-            "<html>Fix mean rate of this group of clock models." +
-                    "<br>Select this option to fix the mean substitution rate,<br>" +
-                    "rather than try to infer it. If this option is turned off, then<br>" +
-                    "either the sequences should have dates or the tree should have<br>" +
-                    "sufficient calibration informations specified as priors.<br>" +
-                    "In addition, it is only available for multi-clock partitions.</html>",
-            "Enter the fixed mean rate here."};
-    private static final int MINIMUM_TABLE_HEIGHT = 400;
-
-    JTable clockModelTable = null;
-    ClockModelTableModel clockModelTableModel = null;
-    JScrollPane scrollPane;
-//    JCheckBox fixedMeanRateCheck = new JCheckBox("Fix mean rate of molecular clock model to: ");
-//    RealNumberField meanRateField = new RealNumberField(Double.MIN_VALUE, Double.MAX_VALUE);
-
-    JTable clockGroupTable = null;
-    ClockGroupTableModel clockGroupTableModel = null;
-
-    BeautiFrame frame = null;
-    BeautiOptions options = null;
-    boolean settingOptions = false;
-
-    public List<ClockModelGroup> clockModelGroupList = new ArrayList<ClockModelGroup>();
-
-    public OldClockModelsPanel(BeautiFrame parent) {
-
-        this.frame = parent;
-
-        clockModelTableModel = new ClockModelTableModel();
-        clockModelTable = new JTable(clockModelTableModel) {
-            //Implement table header tool tips.
-            protected JTableHeader createDefaultTableHeader() {
-                return new JTableHeader(columnModel) {
-                    public String getToolTipText(MouseEvent e) {
-                        Point p = e.getPoint();
-                        int index = columnModel.getColumnIndexAtX(p.x);
-                        int realIndex = columnModel.getColumn(index).getModelIndex();
-                        return columnToolTips[realIndex];
-                    }
-                };
-            }
-        };
-
-        initTable(clockModelTable);
-
-        scrollPane = new JScrollPane(clockModelTable,
-                JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED,
-                JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
-        scrollPane.setOpaque(false);
-
-        // PanelUtils.setupComponent(clockModelCombo);
-        // clockModelCombo.setToolTipText("<html>Select either a strict molecular clock or<br>or a relaxed clock model.</html>");
-        // clockModelCombo.addItemListener(comboListener);
-
-//        PanelUtils.setupComponent(fixedMeanRateCheck);
-//        fixedMeanRateCheck.setSelected(false); // default to FixRateType.ESTIMATE
-//        fixedMeanRateCheck.addItemListener(new ItemListener() {
-//            public void itemStateChanged(ItemEvent ev) {
-//                meanRateField.setEnabled(fixedMeanRateCheck.isSelected());
-//                if (fixedMeanRateCheck.isSelected()) {
-//                    options.clockModelOptions.fixMeanRate();
-//                } else {
-//                    options.clockModelOptions.fixRateOfFirstClockPartition();
-//                }
-//
-//                clockModelTableModel.fireTableDataChanged();
-//                fireModelsChanged();
-//            }
-//        });
-//        fixedMeanRateCheck.setToolTipText("<html>Select this option to fix the mean substitution rate,<br>"
-//                + "rather than try to infer it. If this option is turned off, then<br>"
-//                + "either the sequences should have dates or the tree should have<br>"
-//                + "sufficient calibration informations specified as priors.<br>"
-//                + "In addition, it is only available for multi-clock partitions." + "</html>");// TODO Alexei
-//
-//        PanelUtils.setupComponent(meanRateField);
-//        meanRateField.setEnabled(fixedMeanRateCheck.isSelected());
-//        meanRateField.setValue(1.0);
-//        meanRateField.addKeyListener(new java.awt.event.KeyAdapter() {
-//            public void keyTyped(java.awt.event.KeyEvent ev) {
-//                frame.setDirty();
-//            }
-//        });
-//        meanRateField.setToolTipText("<html>Enter the fixed mean rate here.</html>");
-//        meanRateField.setColumns(10);
-//		meanRateField.setEnabled(true);
-
-        JPanel modelPanelParent = new JPanel(new BorderLayout(12, 12));
-//        modelPanelParent.setLayout(new BoxLayout(modelPanelParent, BoxLayout.Y_AXIS));
-        modelPanelParent.setOpaque(false);
-        TitledBorder modelBorder = new TitledBorder("Clock Model : ");
-        modelPanelParent.setBorder(modelBorder);
-
-//        OptionsPanel panel = new OptionsPanel(12, 12);
-//        panel.addComponents(fixedMeanRateCheck, meanRateField);
-
-
-        // The bottom panel is now small enough that this is not necessary
-//        JScrollPane scrollPane2 = new JScrollPane(panel);
-//        scrollPane2.setOpaque(false);
-//        scrollPane2.setPreferredSize(new Dimension(400, 150));
-
-        modelPanelParent.add(scrollPane, BorderLayout.CENTER);
-//        modelPanelParent.add(panel, BorderLayout.SOUTH);
-
-        //=======  Clock Model Group for Fix Mean function ==========
-        clockGroupTableModel = new ClockGroupTableModel();
-        clockGroupTable = new JTable(clockGroupTableModel) {
-            //Implement table header tool tips.
-            protected JTableHeader createDefaultTableHeader() {
-                return new JTableHeader(columnModel) {
-                    public String getToolTipText(MouseEvent e) {
-                        Point p = e.getPoint();
-                        int index = columnModel.getColumnIndexAtX(p.x);
-                        int realIndex = columnModel.getColumn(index).getModelIndex();
-                        return columnToolTips2[realIndex];
-                    }
-                };
-            }
-        };
-        clockGroupTable.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
-        clockGroupTable.setAutoResizeMode(JTable.AUTO_RESIZE_OFF);
-        clockGroupTable.getTableHeader().setReorderingAllowed(false);
-
-        TableColumn col = clockGroupTable.getColumnModel().getColumn(0);
-        col.setMinWidth(200);
-        col = clockGroupTable.getColumnModel().getColumn(1);
-        col.setMinWidth(40);
-        col.setCellRenderer(new GrayableCheckboxCellRenderer());
-        col = clockGroupTable.getColumnModel().getColumn(2);
-        col.setCellEditor(new RealNumberCellEditor(0, Double.POSITIVE_INFINITY));
-        col.setMinWidth(80);
-        TableEditorStopper.ensureEditingStopWhenTableLosesFocus(clockGroupTable);
-
-        JScrollPane d_scrollPane = new JScrollPane(clockGroupTable,
-                JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED,
-                JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
-        d_scrollPane.setOpaque(false);
-
-        ActionPanel actionPanel1 = new ActionPanel(false);
-        actionPanel1.setAddAction(addClockGroupAction);
-        actionPanel1.setRemoveAction(removeClockGroupAction);
-        addClockGroupAction.setEnabled(false);
-        removeClockGroupAction.setEnabled(false);
-
-        JPanel groupPanel = new JPanel(new BorderLayout(12, 12));
-        groupPanel.add(d_scrollPane, BorderLayout.CENTER);
-        groupPanel.add(actionPanel1, BorderLayout.SOUTH);
-        TitledBorder traitClockBorder = new TitledBorder("Clock Model Group: ");
-        groupPanel.setBorder(traitClockBorder);
-
-        JSplitPane splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, modelPanelParent, groupPanel);
-        splitPane.setDividerLocation(MINIMUM_TABLE_HEIGHT);
-
-        setOpaque(false);
-        setLayout(new BorderLayout(12, 12));
-        setBorder(new BorderUIResource.EmptyBorderUIResource(new Insets(12, 12, 12, 12)));
-        add(splitPane, BorderLayout.CENTER);
-    }
-
-    Action addClockGroupAction = new AbstractAction("+") {
-        public void actionPerformed(ActionEvent ae) {
-            String s = JOptionPane.showInputDialog(frame,
-                    "Please input the new group name below:",
-                    "Add A New Clock Model Group Dialog",
-                    JOptionPane.PLAIN_MESSAGE, null, null,
-                    clockModelGroupList.size() + "_group").toString().trim();
-
-            if ((s != null) && (s.length() > 0)) {
-                if (options.clockModelOptions.containsGroup(s, clockModelGroupList)) {
-                    errorMessageDialog("This name has been used already,\nplease input a new name.");
-                } else {
-                    clockModelGroupList.add(new ClockModelGroup(s));
-                    modelsChanged();
-                }
-
-            } else {
-                errorMessageDialog("Please input a name properly.");
-            }
-        }
-    };
-
-    Action removeClockGroupAction = new AbstractAction("-") {
-        public void actionPerformed(ActionEvent ae) {
-            int row = clockGroupTable.getSelectedRow();
-            if (row >= 0) {
-                ClockModelGroup group = clockModelGroupList.get(row);
-                if (options.getPartitionClockModels(group).size() > 0) {
-                    errorMessageDialog("Cannot remove the group " + group.getName() +
-                            ",\nwhich contains clock model(s)." +
-                            "\nPlease assign model(s) to a different group first.");
-                } else {
-                    clockModelGroupList.remove(row);
-                    modelsChanged();
-                }
-            } else {
-                errorMessageDialog("Please select a group properly.");
-            }
-        }
-    };
-
-    private void errorMessageDialog(String e) {
-        JOptionPane.showMessageDialog(this, e, "Clock Model Panel Error", JOptionPane.ERROR_MESSAGE);
-    }
-
-    private void initTable(JTable dataTable) {
-        dataTable.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
-        dataTable.setAutoResizeMode(JTable.AUTO_RESIZE_OFF);
-        dataTable.getTableHeader().setReorderingAllowed(false);
-//        clockModelTable.getTableHeader().setDefaultRenderer(
-//              new HeaderRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-
-        TableColumn col = dataTable.getColumnModel().getColumn(0);
-        col.setCellRenderer(new ClockTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        col.setMinWidth(200);
-
-        col = dataTable.getColumnModel().getColumn(1);
-        ComboBoxRenderer comboBoxRenderer = new ComboBoxRenderer();
-        comboBoxRenderer.putClientProperty("JComboBox.isTableCellEditor", Boolean.TRUE);
-        col.setCellRenderer(comboBoxRenderer);
-        col.setMinWidth(260);
-
-        col = dataTable.getColumnModel().getColumn(2);
-        col.setMinWidth(40);
-        col.setCellRenderer(new GrayableCheckboxCellRenderer());
-
-        col = dataTable.getColumnModel().getColumn(3);
-        col.setCellRenderer(new ClockTableCellRenderer(SwingConstants.LEFT, new Insets(0, 4, 0, 4)));
-        col.setCellEditor(new RealNumberCellEditor(0, Double.POSITIVE_INFINITY));
-        col.setMinWidth(80);
-
-        col = dataTable.getColumnModel().getColumn(4);
-        col.setCellRenderer(comboBoxRenderer);
-        col.setMinWidth(200);
-
-        TableEditorStopper.ensureEditingStopWhenTableLosesFocus(dataTable);
-    }
-
-    private void modelsChanged() {
-        TableColumn col = clockModelTable.getColumnModel().getColumn(1);
-        col.setCellEditor(new DefaultCellEditor(new JComboBox(new OldClockType[]{
-                OldClockType.STRICT_CLOCK,
-                OldClockType.UNCORRELATED_LOGNORMAL,
-//                OldClockType.UNCORRELATED_CAUCHY,
-//                OldClockType.UNCORRELATED_GAMMA,
-                OldClockType.UNCORRELATED_EXPONENTIAL,
-//                OldClockType.AUTOCORRELATED
-                OldClockType.RANDOM_LOCAL_CLOCK,
-                OldClockType.FIXED_LOCAL_CLOCK
-        })));
-
-        col = clockModelTable.getColumnModel().getColumn(4);
-        col.setCellEditor(new DefaultCellEditor(new JComboBox(options.clockModelOptions.getClockModelGroupNames(clockModelGroupList))));
-
-        addClockGroupAction.setEnabled(clockModelGroupList.size() > 0);
-        removeClockGroupAction.setEnabled(clockModelGroupList.size() > 1);
-
-        clockGroupTableModel.fireTableDataChanged();
-    }
-
-    private void fireModelsChanged() {
-        options.updatePartitionAllLinks();
-        clockModelTableModel.fireTableDataChanged();
-        clockGroupTableModel.fireTableDataChanged();
-        frame.setStatusMessage();
-        frame.setDirty();
-    }
-
-//    private void updateModelPanelBorder() {
-//    	if (options.hasData()) {
-//    		modelBorder.setTitle(options.clockModelOptions.getRateOptionClockModel().toString());
-//    	} else {
-//    		modelBorder.setTitle("Overall clock model(s) parameters");
-//    	}
-//
-//        repaint();
-//    }
-
-    public void setOptions(BeautiOptions options) {
-
-        this.options = options;
-
-        settingOptions = true;
-
-        clockModelGroupList = options.clockModelOptions.getClockModelGroups();
-        addClockGroupAction.setEnabled(clockModelGroupList.size() > 0);
-        removeClockGroupAction.setEnabled(clockModelGroupList.size() > 1);
-
-//        fixedMeanRateCheck.setSelected(options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN);
-//        fixedMeanRateCheck.setEnabled(!(options.clockModelOptions.getRateOptionClockModel() == FixRateType.TIP_CALIBRATED
-//                || options.clockModelOptions.getRateOptionClockModel() == FixRateType.NODE_CALIBRATED
-//                || options.clockModelOptions.getRateOptionClockModel() == FixRateType.RATE_CALIBRATED));
-//        meanRateField.setValue(options.clockModelOptions.getMeanRelativeRate());
-
-        settingOptions = false;
-
-        int selRow = clockModelTable.getSelectedRow();
-        clockModelTableModel.fireTableDataChanged();
-        if (options.getPartitionClockModels().size() > 0) {
-            if (selRow < 0) {
-                selRow = 0;
-            }
-            clockModelTable.getSelectionModel().setSelectionInterval(selRow, selRow);
-        }
-
-//        fireModelsChanged();
-
-        modelsChanged();
-
-        clockModelTableModel.fireTableDataChanged();
-    }
-
-    public void getOptions(BeautiOptions options) {
-        if (settingOptions) return;
-
-//        if (fixedMeanRateCheck.isSelected()) {
-//        	options.clockModelOptions.fixMeanRate();
-//        } else {
-//        	options.clockModelOptions.fixRateOfFirstClockPartition();
-//        }
-//        options.clockModelOptions.setMeanRelativeRate(meanRateField.getValue());
-
-//        fireModelsChanged();
-    }
-
-    public JComponent getExportableComponent() {
-        return clockModelTable;
-    }
-
-    class ClockModelTableModel extends AbstractTableModel {
-
-        private static final long serialVersionUID = -2852144669936634910L;
-
-        //        String[] columnNames = {"Clock Model Name", "Molecular Clock Model"};
-        final private String[] columnNames = new String[]{"Name", "Model", "Estimate", "Rate", "Group"};
-
-        public ClockModelTableModel() {
-        }
-
-        public int getColumnCount() {
-            return columnNames.length;
-        }
-
-        public int getRowCount() {
-            if (options == null) return 0;
-            return options.getPartitionClockModels().size();
-        }
-
-        public Object getValueAt(int row, int col) {
-            PartitionClockModel model = options.getPartitionClockModels().get(row);
-            switch (col) {
-                case 0:
-                    return model.getName();
-                case 1:
-                    return OldClockType.getType(model.getClockType(), model.getClockDistributionType());
-                case 2:
-                    return model.isEstimatedRate();
-                case 3:
-                    return model.getRate();
-                case 4:
-                    return model.getClockModelGroup().getName();
-            }
-            return null;
-        }
-
-        public void setValueAt(Object aValue, int row, int col) {
-            PartitionClockModel model = options.getPartitionClockModels().get(row);
-            switch (col) {
-                case 0:
-                    String name = ((String) aValue).trim();
-                    if (name.length() > 0) {
-                        model.setName(name);
-                    }
-                    break;
-                case 1:
-                    OldClockType type = (OldClockType) aValue;
-                    model.setClockType(type.getClockType());
-                    model.setClockDistributionType(type.getClockDistributionType());
-                    break;
-                case 2:
-                    model.setEstimatedRate((Boolean) aValue);
-//                    if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.RElATIVE_TO) {
-//                        if (!options.clockModelOptions.validateRelativeTo()) {
-//                            JOptionPane.showMessageDialog(frame, "It must have at least one clock rate to be fixed !",
-//                                    "Validation Of Relative To ?th Rate", JOptionPane.WARNING_MESSAGE);
-//                            model.setEstimatedRate(false);
-//                        }
-//                    }
-                    break;
-                case 3:
-                    model.setRate((Double) aValue, true);
-//                    options.selectParameters();
-                    break;
-                case 4:
-                    model.setClockModelGroup(options.clockModelOptions.getGroup(aValue.toString(), clockModelGroupList));
-                    break;
-                default:
-                    throw new IllegalArgumentException("unknown column, " + col);
-            }
-            fireModelsChanged();
-        }
-
-        public boolean isCellEditable(int row, int col) {
-            boolean editable;
-            PartitionClockModel model = options.getPartitionClockModels().get(row);
-            ClockModelGroup group = model.getClockModelGroup();
-            switch (col) {
-                case 1:
-                    editable = !(model.getDataType().getType() == DataType.MICRO_SAT ||
-                            model.getDataType().getType() == DataType.GENERAL);
-                    break;
-                case 2:// Check box
-                case 4:
-                    editable = !group.isFixMean();
-                    break;
-                case 3:
-                    editable = !group.isFixMean();// && !((Boolean) getValueAt(row, 2));
-                    break;
-                default:
-                    editable = true;
-            }
-
-            return editable;
-        }
-
-        public String getColumnName(int column) {
-            return columnNames[column];
-        }
-
-        public Class getColumnClass(int c) {
-            if (getRowCount() == 0) {
-                return Object.class;
-            }
-            return getValueAt(0, c).getClass();
-        }
-
-        public String toString() {
-            StringBuffer buffer = new StringBuffer();
-
-            buffer.append(getColumnName(0));
-            for (int j = 1; j < getColumnCount(); j++) {
-                buffer.append("\t");
-                buffer.append(getColumnName(j));
-            }
-            buffer.append("\n");
-
-            for (int i = 0; i < getRowCount(); i++) {
-                buffer.append(getValueAt(i, 0));
-                for (int j = 1; j < getColumnCount(); j++) {
-                    buffer.append("\t");
-                    buffer.append(getValueAt(i, j));
-                }
-                buffer.append("\n");
-            }
-
-            return buffer.toString();
-        }
-    }
-
-    class ClockTableCellRenderer extends TableRenderer {
-
-        public ClockTableCellRenderer(int alignment, Insets insets) {
-            super(alignment, insets);
-        }
-
-        public Component getTableCellRendererComponent(JTable aTable,
-                                                       Object value,
-                                                       boolean aIsSelected,
-                                                       boolean aHasFocus,
-                                                       int aRow, int aColumn) {
-
-            if (value == null) return this;
-
-            Component renderer = super.getTableCellRendererComponent(aTable,
-                    value,
-                    aIsSelected,
-                    aHasFocus,
-                    aRow, aColumn);
-
-            ClockModelGroup group = options.getPartitionClockModels().get(aRow).getClockModelGroup();
-            if (group.isFixMean() && aColumn > 1) {
-                renderer.setForeground(Color.gray);
-//            } else if (!group.isFixMean() && aColumn == 3) {
-//                renderer.setForeground(Color.gray);
-            } else {
-                renderer.setForeground(Color.black);
-            }
-
-            return this;
-        }
-
-    }
-
-    public class GrayableCheckboxCellRenderer extends JCheckBox implements TableCellRenderer {
-        public Component getTableCellRendererComponent(JTable table, Object value,
-                                 boolean isSelected, boolean hasFocus, int vRowIndex, int vColIndex) {
-            boolean editable = table.getModel().isCellEditable(vRowIndex, vColIndex);
-            setBackground(editable ? Color.WHITE : Color.LIGHT_GRAY);
-            setEnabled(editable);
-            setSelected((Boolean) value);
-            setHorizontalAlignment(SwingConstants.CENTER);
-            return this;
-        }
-    }
-
-    class ClockGroupTableModel extends AbstractTableModel {
-
-        String[] columnNames = {"Group Name", "Fix Mean", "Rate"};
-
-        public ClockGroupTableModel() {
-        }
-
-        public int getColumnCount() {
-            return columnNames.length;
-        }
-
-        public int getRowCount() {
-            if (options == null) return 0;
-//            return options.clockModelOptions.getClockModelGroups().size();
-            return clockModelGroupList.size();
-        }
-
-        public Object getValueAt(int row, int col) {
-            switch (col) {
-                case 0:
-                    return clockModelGroupList.get(row).getName();
-                case 1:
-                    return clockModelGroupList.get(row).isFixMean();
-                case 2:
-                    if (!clockModelGroupList.get(row).isFixMean()) return "";
-                    return clockModelGroupList.get(row).getFixMeanRate();
-            }
-            return null;
-        }
-
-        public void setValueAt(Object aValue, int row, int col) {
-
-            switch (col) {
-                case 0:
-                    String name = ((String) aValue).trim();
-                    if (name.length() > 0) {
-                        clockModelGroupList.get(row).setName(name);
-                        modelsChanged();
-                    }
-                    break;
-                case 1:
-                    ClockModelGroup group = clockModelGroupList.get(row);
-                    group.setFixMean((Boolean) aValue);
-                    if ((Boolean) aValue) {
-                        options.clockModelOptions.fixMeanRate(group);
-                    } else {
-                        options.clockModelOptions.fixRateOfFirstClockPartition(group);
-                    }
-                    break;
-                case 2:
-                    clockModelGroupList.get(row).setFixMeanRate((Double) aValue, options);
-                    break;
-
-                default:
-                    throw new IllegalArgumentException("unknown column, " + col);
-            }
-
-            fireModelsChanged();
-        }
-
-        public boolean isCellEditable(int row, int col) {
-            switch (col) {
-                case 1:// Check box
-                    return options.getPartitionClockModels(options.clockModelOptions.
-                            getGroup(getValueAt(row, 0).toString(), clockModelGroupList)).size() > 1;
-                case 2:
-                    return /*!fixedMeanRateCheck.isSelected() &&*/ ((Boolean) getValueAt(row, 1));
-                default:
-                    return true;
-            }
-        }
-
-        public String getColumnName(int column) {
-            return columnNames[column];
-        }
-
-        public Class getColumnClass(int c) {
-            if (getRowCount() == 0) {
-                return Object.class;
-            }
-            return getValueAt(0, c).getClass();
-        }
-
-        public String toString() {
-            StringBuffer buffer = new StringBuffer();
-
-            buffer.append(getColumnName(0));
-            for (int j = 1; j < getColumnCount(); j++) {
-                buffer.append("\t");
-                buffer.append(getColumnName(j));
-            }
-            buffer.append("\n");
-
-            for (int i = 0; i < getRowCount(); i++) {
-                buffer.append(getValueAt(i, 0));
-                for (int j = 1; j < getColumnCount(); j++) {
-                    buffer.append("\t");
-                    buffer.append(getValueAt(i, j));
-                }
-                buffer.append("\n");
-            }
-
-            return buffer.toString();
-        }
-    }
-
-}
\ No newline at end of file
diff --git a/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentGenerator.java b/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentGenerator.java
index fa3e539..6bc61da 100644
--- a/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentGenerator.java
+++ b/src/dr/app/beauti/components/ancestralstates/AncestralStatesComponentGenerator.java
@@ -157,7 +157,7 @@ public class AncestralStatesComponentGenerator extends BaseComponentGenerator {
 
         writer.writeTag("parameter",
                 new Attribute[]{
-                        new Attribute.Default<String>("id", prefix + "count"),
+                        new Attribute.Default<String>("id", partition.getPrefix() + "count"),
                         new Attribute.Default<String>("value", matrix.toString())},
                 true);
 
diff --git a/src/dr/app/beauti/components/continuous/ContinuousComponentOptions.java b/src/dr/app/beauti/components/continuous/ContinuousComponentOptions.java
index 859c276..1c3427e 100644
--- a/src/dr/app/beauti/components/continuous/ContinuousComponentOptions.java
+++ b/src/dr/app/beauti/components/continuous/ContinuousComponentOptions.java
@@ -74,7 +74,8 @@ public class ContinuousComponentOptions implements ComponentOptions {
                 modelOptions.createParameterBetaDistributionPrior(prefix + LAMBDA,
                         "phylogenetic signal parameter",
                         0.5, 2.0, 2.0, 0.0);
-                modelOptions.createOperator(prefix + LAMBDA, OperatorType.RANDOM_WALK_ABSORBING, 0.3, 10.0);
+                // don't autooptimize
+                modelOptions.createOperator(prefix + LAMBDA, OperatorType.RANDOM_WALK_ABSORBING, 0.3, 10.0, false);
             }
 
             if (!modelOptions.parameterExists(prefix + "swap." + RRW_CATEGORIES)) {
diff --git a/src/dr/app/beauti/components/discrete/DiscreteTraitsComponentGenerator.java b/src/dr/app/beauti/components/discrete/DiscreteTraitsComponentGenerator.java
index d93446c..89abbf4 100644
--- a/src/dr/app/beauti/components/discrete/DiscreteTraitsComponentGenerator.java
+++ b/src/dr/app/beauti/components/discrete/DiscreteTraitsComponentGenerator.java
@@ -28,20 +28,14 @@ package dr.app.beauti.components.discrete;
 import dr.app.beagle.evomodel.parsers.MarkovJumpsTreeLikelihoodParser;
 import dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions;
 import dr.app.beauti.generator.BaseComponentGenerator;
-import dr.app.beauti.generator.BeastGenerator;
-import dr.app.beauti.generator.BranchRatesModelGenerator;
+import dr.app.beauti.generator.ClockModelGenerator;
 import dr.app.beauti.generator.ComponentGenerator;
 import dr.app.beauti.options.*;
 import dr.app.beauti.util.XMLWriter;
 import dr.evolution.datatype.GeneralDataType;
-import dr.evomodel.branchratemodel.BranchRateModel;
 import dr.evomodel.sitemodel.SiteModel;
 import dr.evomodel.substmodel.AbstractSubstitutionModel;
 import dr.evomodel.tree.TreeModel;
-import dr.evomodelxml.branchratemodel.DiscretizedBranchRatesParser;
-import dr.evomodelxml.branchratemodel.RandomLocalClockModelParser;
-import dr.evomodelxml.branchratemodel.StrictClockBranchRatesParser;
-import dr.evomodelxml.clock.ACLikelihoodParser;
 import dr.evomodelxml.sitemodel.GammaSiteModelParser;
 import dr.evomodelxml.substmodel.ComplexSubstitutionModelParser;
 import dr.evomodelxml.substmodel.FrequencyModelParser;
@@ -329,14 +323,12 @@ public class DiscreteTraitsComponentGenerator extends BaseComponentGenerator {
             writer.writeOpenTag(GeneralSubstitutionModelParser.RATES, new Attribute[]{
                     new Attribute.Default<Integer>(GeneralSubstitutionModelParser.RELATIVE_TO, relativeTo)});
         }
-        options.getParameter(prefix + "rates").isFixed = true;
         writeParameter(options.getParameter(prefix + "rates"), dimension, writer);
 
         writer.writeCloseTag(GeneralSubstitutionModelParser.RATES);
 
         if (model.isActivateBSSVS()) { //If "BSSVS" is not activated, rateIndicator should not be there.
             writer.writeOpenTag(GeneralSubstitutionModelParser.INDICATOR);
-            options.getParameter(prefix + "indicators").isFixed = true;
             writeParameter(options.getParameter(prefix + "indicators"), dimension, writer);
             writer.writeCloseTag(GeneralSubstitutionModelParser.INDICATOR);
         }
@@ -404,7 +396,7 @@ public class DiscreteTraitsComponentGenerator extends BaseComponentGenerator {
         writer.writeIDref(SiteModel.SITE_MODEL, substModel.getName() + "." + SiteModel.SITE_MODEL);
         writer.writeIDref(GeneralSubstitutionModelParser.GENERAL_SUBSTITUTION_MODEL, substModel.getName() + "." + AbstractSubstitutionModel.MODEL);
 
-        BranchRatesModelGenerator.writeBranchRatesModelRef(clockModel, writer);
+        ClockModelGenerator.writeBranchRatesModelRef(clockModel, writer);
 
         if (substModel.getDiscreteSubstType() == DiscreteSubstModelType.ASYM_SUBST) {
             int stateCount = options.getStatesForDiscreteModel(substModel).size();
diff --git a/src/dr/app/beauti/components/dollo/DolloComponentGenerator.java b/src/dr/app/beauti/components/dollo/DolloComponentGenerator.java
index d67f9d0..11c3190 100644
--- a/src/dr/app/beauti/components/dollo/DolloComponentGenerator.java
+++ b/src/dr/app/beauti/components/dollo/DolloComponentGenerator.java
@@ -222,7 +222,7 @@ public class DolloComponentGenerator extends BaseComponentGenerator {
         writer.writeCloseTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
 
         PartitionSubstitutionModel model = partition.getPartitionSubstitutionModel();
-        if (model.hasCodon()) {
+        if (model.hasCodonPartitions()) {
             writeParameter(GammaSiteModelParser.RELATIVE_RATE, "mu", model, writer);
         }
 
diff --git a/src/dr/app/beauti/components/hpm/HierarchicalModelComponentGenerator.java b/src/dr/app/beauti/components/hpm/HierarchicalModelComponentGenerator.java
index 45c2afb..073a0b2 100644
--- a/src/dr/app/beauti/components/hpm/HierarchicalModelComponentGenerator.java
+++ b/src/dr/app/beauti/components/hpm/HierarchicalModelComponentGenerator.java
@@ -248,10 +248,10 @@ public class HierarchicalModelComponentGenerator extends BaseComponentGenerator
         writer.writeOpenTag(getModelTagName(hpm), getModelAttributes(hpm));
         
         writeParameter(NormalDistributionModelParser.MEAN,
-                hpm.getConditionalParameterList().get(0).getName(), 1, hpm.getConditionalParameterList().get(0).initial,
+                hpm.getConditionalParameterList().get(0).getName(), 1, hpm.getConditionalParameterList().get(0).getInitial(),
                 Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, writer);
         writeParameter(NormalDistributionModelParser.PREC,
-                hpm.getConditionalParameterList().get(1).getName(), 1, hpm.getConditionalParameterList().get(1).initial, 
+                hpm.getConditionalParameterList().get(1).getName(), 1, hpm.getConditionalParameterList().get(1).getInitial(),
                 0.0, Double.POSITIVE_INFINITY, writer);
         writer.writeCloseTag(getModelTagName(hpm));
 
diff --git a/src/dr/app/beauti/components/marginalLikelihoodEstimation/MarginalLikelihoodEstimationGenerator.java b/src/dr/app/beauti/components/marginalLikelihoodEstimation/MarginalLikelihoodEstimationGenerator.java
index af786ee..fc1ffd0 100644
--- a/src/dr/app/beauti/components/marginalLikelihoodEstimation/MarginalLikelihoodEstimationGenerator.java
+++ b/src/dr/app/beauti/components/marginalLikelihoodEstimation/MarginalLikelihoodEstimationGenerator.java
@@ -27,8 +27,6 @@ package dr.app.beauti.components.marginalLikelihoodEstimation;
 
 import dr.app.beauti.BeautiFrame;
 import dr.app.beauti.generator.BaseComponentGenerator;
-import dr.app.beauti.generator.ComponentGenerator;
-import dr.app.beauti.generator.Generator;
 import dr.app.beauti.generator.TreePriorGenerator;
 import dr.app.beauti.options.*;
 import dr.app.beauti.types.*;
@@ -638,7 +636,7 @@ public class MarginalLikelihoodEstimationGenerator extends BaseComponentGenerato
                     case UNCORRELATED:
                         switch (model.getClockDistributionType()) {
                             case LOGNORMAL:
-                                if (model.getClockRateParam().isMeanInRealSpace()) {
+                                if (model.getClockRateParameter().isMeanInRealSpace()) {
                                     writer.writeOpenTag(WorkingPriorParsers.LOG_TRANSFORMED_NORMAL_REFERENCE_PRIOR,
                                             new Attribute[]{
                                                     new Attribute.Default<String>("fileName", beautiOptions.logFileName),
diff --git a/src/dr/app/beauti/datapanel/DataPanel.java b/src/dr/app/beauti/datapanel/DataPanel.java
index 53cf34a..0228c54 100644
--- a/src/dr/app/beauti/datapanel/DataPanel.java
+++ b/src/dr/app/beauti/datapanel/DataPanel.java
@@ -212,12 +212,6 @@ public class DataPanel extends BeautiPanel implements Exportable {
         PanelUtils.setupComponent(button);
         controlPanel1.add(button);
 
-//        controlPanel1.add(new JLabel(" or "));
-//
-//        button = new JButton(importTraitsAction);
-//        PanelUtils.setupComponent(button);
-//        controlPanel1.add(button);
-
         JPanel panel1 = new JPanel(new BorderLayout());
         panel1.setOpaque(false);
         panel1.add(useStarBEASTCheck, BorderLayout.NORTH);
@@ -227,14 +221,15 @@ public class DataPanel extends BeautiPanel implements Exportable {
         setBorder(new BorderUIResource.EmptyBorderUIResource(new Insets(12, 12, 12, 12)));
         setLayout(new BorderLayout(0, 0));
         add(panel1, BorderLayout.NORTH);
+//        add(toolBar1, BorderLayout.NORTH);
         add(scrollPane, BorderLayout.CENTER);
         add(controlPanel1, BorderLayout.SOUTH);
 
         useStarBEASTCheck.setEnabled(false);
         useStarBEASTCheck.setToolTipText(STARBEASTOptions.CITATION);
         useStarBEASTCheck.addActionListener(new java.awt.event.ActionListener() {
-            public void actionPerformed(ActionEvent e) {// wrong listener Issue 397: *BEAST in BEAUti is broken
-                if (frame.setupStarBEAST(useStarBEASTCheck.isSelected()) == false) {
+            public void actionPerformed(ActionEvent e) {
+                if (!frame.setupStarBEAST(useStarBEASTCheck.isSelected())) {
                     useStarBEASTCheck.setSelected(false); // go back to unchecked
                 }
 
@@ -534,7 +529,6 @@ public class DataPanel extends BeautiPanel implements Exportable {
             if (!model.getName().equals(partition.getName())) {
                 PartitionClockModel newModel = new PartitionClockModel(options, partition.getName(), model);
                 partition.setPartitionClockModel(newModel);
-                newModel.setClockModelGroup(model.getClockModelGroup()); // set clock model group
             }
         }
 
diff --git a/src/dr/app/beauti/generator/BeastGenerator.java b/src/dr/app/beauti/generator/BeastGenerator.java
index 5ef0066..42754ce 100644
--- a/src/dr/app/beauti/generator/BeastGenerator.java
+++ b/src/dr/app/beauti/generator/BeastGenerator.java
@@ -28,7 +28,6 @@ package dr.app.beauti.generator;
 import dr.app.beast.BeastVersion;
 import dr.app.beauti.BeautiFrame;
 import dr.app.beauti.components.ComponentFactory;
-import dr.app.beauti.components.marginalLikelihoodEstimation.MarginalLikelihoodEstimationOptions;
 import dr.app.beauti.options.*;
 import dr.app.beauti.types.*;
 import dr.app.beauti.util.XMLWriter;
@@ -49,7 +48,6 @@ import dr.evoxml.TaxaParser;
 import dr.evoxml.TaxonParser;
 import dr.inferencexml.distribution.MixedDistributionLikelihoodParser;
 import dr.inferencexml.model.CompoundLikelihoodParser;
-import dr.inferencexml.model.CompoundParameterParser;
 import dr.inferencexml.operators.SimpleOperatorScheduleParser;
 import dr.util.Attribute;
 import dr.util.Version;
@@ -87,7 +85,7 @@ public class BeastGenerator extends Generator {
     private final SubstitutionModelGenerator substitutionModelGenerator;
     private final InitialTreeGenerator initialTreeGenerator;
     private final TreeModelGenerator treeModelGenerator;
-    private final BranchRatesModelGenerator branchRatesModelGenerator;
+    private final ClockModelGenerator clockModelGenerator;
     private final OperatorsGenerator operatorsGenerator;
     private final ParameterPriorGenerator parameterPriorGenerator;
     private final LogGenerator logGenerator;
@@ -107,7 +105,7 @@ public class BeastGenerator extends Generator {
 
         initialTreeGenerator = new InitialTreeGenerator(options, components);
         treeModelGenerator = new TreeModelGenerator(options, components);
-        branchRatesModelGenerator = new BranchRatesModelGenerator(options, components);
+        clockModelGenerator = new ClockModelGenerator(options, components);
 
         operatorsGenerator = new OperatorsGenerator(options, components);
         parameterPriorGenerator = new ParameterPriorGenerator(options, components);
@@ -307,24 +305,24 @@ public class BeastGenerator extends Generator {
 
             //++++++++++++++++ Prior Bounds ++++++++++++++++++
             for (Parameter param : options.selectParameters()) {
-                if (param.initial != Double.NaN) {
-                    if (param.isTruncated && (param.initial < param.truncationLower || param.initial > param.truncationUpper)) {
+                if (param.getInitial() != Double.NaN) {
+                    if (param.isTruncated && (param.getInitial() < param.truncationLower || param.getInitial() > param.truncationUpper)) {
                         throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                                "\ninitial value " + param.initial + " is NOT in the range [" + param.truncationLower + ", " + param.truncationUpper + "]," +
+                                "\ninitial value " + param.getInitial() + " is NOT in the range [" + param.truncationLower + ", " + param.truncationUpper + "]," +
                                 "\nor this range is wrong. Please check the Prior panel.", BeautiFrame.PRIORS);
-                    } else if (param.priorType == PriorType.UNIFORM_PRIOR && (param.initial < param.uniformLower || param.initial > param.uniformUpper)) {
+                    } else if (param.priorType == PriorType.UNIFORM_PRIOR && (param.getInitial() < param.uniformLower || param.getInitial() > param.uniformUpper)) {
                         throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                                "\ninitial value " + param.initial + " is NOT in the range [" + param.uniformLower + ", " + param.uniformUpper + "]," +
+                                "\ninitial value " + param.getInitial() + " is NOT in the range [" + param.uniformLower + ", " + param.uniformUpper + "]," +
                                 "\nor this range is wrong. Please check the Prior panel.", BeautiFrame.PRIORS);
                     }
-                    if (param.isNonNegative && param.initial < 0.0) {
+                    if (param.isNonNegative && param.getInitial() < 0.0) {
                         throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                                "\ninitial value " + param.initial + " should be non-negative. Please check the Prior panel.", BeautiFrame.PRIORS);
+                                "\ninitial value " + param.getInitial() + " should be non-negative. Please check the Prior panel.", BeautiFrame.PRIORS);
                     }
 
-                    if (param.isZeroOne && (param.initial < 0.0 || param.initial > 1.0)) {
+                    if (param.isZeroOne && (param.getInitial() < 0.0 || param.getInitial() > 1.0)) {
                         throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
-                                "\ninitial value " + param.initial + " should lie in the interval [0, 1]. Please check the Prior panel.", BeautiFrame.PRIORS);
+                                "\ninitial value " + param.getInitial() + " should lie in the interval [0, 1]. Please check the Prior panel.", BeautiFrame.PRIORS);
                     }
                 }
             }
@@ -543,22 +541,9 @@ public class BeastGenerator extends Generator {
         //++++++++++++++++ Branch Rates Model ++++++++++++++++++
         try {
             for (PartitionClockModel model : options.getPartitionClockModels()) {
-                branchRatesModelGenerator.writeBranchRatesModel(model, writer);
+                clockModelGenerator.writeBranchRatesModel(model, writer);
                 writer.writeText("");
             }
-
-            // write allClockRate for fix mean option in clock model panel
-            for (ClockModelGroup clockModelGroup : options.clockModelOptions.getClockModelGroups()) {
-                if (clockModelGroup.getRateTypeOption() == FixRateType.FIX_MEAN) {
-                    writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER,
-                            new Attribute[]{new Attribute.Default<String>(XMLParser.ID, clockModelGroup.getName())});
-                    for (PartitionClockModel model : options.getPartitionClockModels(clockModelGroup)) {
-                        branchRatesModelGenerator.writeAllClockRateRefs(model, writer);
-                    }
-                    writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
-                    writer.writeText("");
-                }
-            }
         } catch (Exception e) {
             e.printStackTrace();
             throw new GeneratorException("Branch rates model generation has failed:\n" + e.getMessage());
@@ -568,7 +553,6 @@ public class BeastGenerator extends Generator {
         try {
             for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
                 substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
-                substitutionModelGenerator.writeAllMus(model, writer); // allMus
                 writer.writeText("");
             }
 
@@ -578,6 +562,16 @@ public class BeastGenerator extends Generator {
             throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
         }
 
+        //++++++++++++++++ AllMus parameter ++++++++++++++++++
+        try {
+            for (PartitionClockModel model : options.getPartitionClockModels()) {
+                clockModelGenerator.writeAllMus(model, writer);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new GeneratorException("Clock model generation has failed:\n" + e.getMessage());
+        }
+
         //++++++++++++++++ Site Model ++++++++++++++++++
 //        for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
 //            substitutionModelGenerator.writeSiteModel(model, writer); // site model
@@ -925,7 +919,7 @@ public class BeastGenerator extends Generator {
             writer.writeOpenTag(CompoundLikelihoodParser.LIKELIHOOD, new Attribute.Default<String>(XMLParser.ID, "likelihood"));
 
             treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer);
-            branchRatesModelGenerator.writeClockLikelihoodReferences(writer);
+            clockModelGenerator.writeClockLikelihoodReferences(writer);
 
             generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_MCMC_LIKELIHOOD, writer);
 
@@ -937,10 +931,10 @@ public class BeastGenerator extends Generator {
         writer.writeIDref(SimpleOperatorScheduleParser.OPERATOR_SCHEDULE, "operators");
 
         // write log to screen
-        logGenerator.writeLogToScreen(writer, branchRatesModelGenerator, substitutionModelGenerator);
+        logGenerator.writeLogToScreen(writer, clockModelGenerator, substitutionModelGenerator);
 
         // write log to file
-        logGenerator.writeLogToFile(writer, treePriorGenerator, branchRatesModelGenerator,
+        logGenerator.writeLogToFile(writer, treePriorGenerator, clockModelGenerator,
                 substitutionModelGenerator, treeLikelihoodGenerator);
 
         // write tree log to file
diff --git a/src/dr/app/beauti/generator/BranchRatesModelGenerator.java b/src/dr/app/beauti/generator/ClockModelGenerator.java
similarity index 95%
rename from src/dr/app/beauti/generator/BranchRatesModelGenerator.java
rename to src/dr/app/beauti/generator/ClockModelGenerator.java
index bb5881e..9da22f8 100644
--- a/src/dr/app/beauti/generator/BranchRatesModelGenerator.java
+++ b/src/dr/app/beauti/generator/ClockModelGenerator.java
@@ -30,6 +30,7 @@ import dr.app.beauti.options.*;
 import dr.app.beauti.types.ClockType;
 import dr.app.beauti.types.PriorType;
 import dr.app.beauti.util.XMLWriter;
+import dr.evolution.datatype.DataType;
 import dr.evolution.util.Taxa;
 import dr.evomodel.branchratemodel.BranchRateModel;
 import dr.evomodel.branchratemodel.ContinuousBranchRates;
@@ -57,9 +58,9 @@ import java.util.List;
  * @author Alexei Drummond
  * @author Andrew Rambaut
  */
-public class BranchRatesModelGenerator extends Generator {
+public class ClockModelGenerator extends Generator {
 
-    public BranchRatesModelGenerator(BeautiOptions options, ComponentFactory[] components) {
+    public ClockModelGenerator(BeautiOptions options, ComponentFactory[] components) {
         super(options, components);
     }
 
@@ -275,11 +276,11 @@ public class BranchRatesModelGenerator extends Generator {
                     writer.writeOpenTag(ACLikelihoodParser.AC_LIKELIHOOD, attributes);
                     writer.writeIDref(TreeModel.TREE_MODEL, treePrefix + TreeModel.TREE_MODEL);
 
-                    if (!model.isEstimatedRate()) { //TODO move to options or panel select method
-                        Parameter para = tree.getParameter(TreeModel.TREE_MODEL + "." + RateEvolutionLikelihood.ROOTRATE);//"treeModel.rootRate"
-                        para.isFixed = true;
-                        para.initial = model.getRate();
-                    }
+//                    if (!model.isEstimatedRate()) { //TODO move to options or panel select method
+//                        Parameter parameter = tree.getParameter(TreeModel.TREE_MODEL + "." + RateEvolutionLikelihood.ROOTRATE);//"treeModel.rootRate"
+//                        parameter.isFixed = true;
+//                        parameter.initial = model.getRate();
+//                    }
 
                     writer.writeOpenTag(RateEvolutionLikelihood.RATES,
                             new Attribute[]{
@@ -308,7 +309,7 @@ public class BranchRatesModelGenerator extends Generator {
 
                     writer.writeCloseTag(ACLikelihoodParser.AC_LIKELIHOOD);
 
-                    if (model.isEstimatedRate()) {//TODO
+//                    if (model.isEstimatedRate()) {//TODO
                         writer.writeText("");
                         writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER,
                                 new Attribute[]{new Attribute.Default<String>(XMLParser.ID, options.noDuplicatedPrefix(modelPrefix, treePrefix) + TreeModel.TREE_MODEL
@@ -318,7 +319,7 @@ public class BranchRatesModelGenerator extends Generator {
                         writer.writeIDref(ParameterParser.PARAMETER, options.noDuplicatedPrefix(modelPrefix, treePrefix) + TreeModel.TREE_MODEL + "."
                                 + RateEvolutionLikelihood.ROOTRATE);
                         writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
-                    }
+//                    }
 
                     writer.writeText("");
                     writer.writeOpenTag(
@@ -587,6 +588,31 @@ public class BranchRatesModelGenerator extends Generator {
         writer.writeIDref(tag, id);
     }
 
+    /**
+     * Write the allMus for each partition model.
+     *
+     * @param model  PartitionClockModel
+     * @param writer XMLWriter
+     */
+    public void writeAllMus(PartitionClockModel model, XMLWriter writer) {
+        Parameter allMus = model.getParameter("allMus");
+        if (allMus.getSubParameters().size() > 1) {
+            writer.writeComment("Collecting together relative rates for partitions");
+
+            // allMus is global for each gene
+            writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER,
+                    new Attribute[]{new Attribute.Default<String>(XMLParser.ID, model.getPrefix() + "allMus")});
+
+            for (Parameter parameter : allMus.getSubParameters()) {
+                writer.writeIDref(ParameterParser.PARAMETER, parameter.getName());
+            }
+
+            writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
+            writer.writeText("");
+        }
+    }
+
+
     public void writeAllClockRateRefs(PartitionClockModel model, XMLWriter writer) {
         writer.writeIDref(ParameterParser.PARAMETER, getClockRateString(model));
     }
@@ -628,6 +654,11 @@ public class BranchRatesModelGenerator extends Generator {
     public void writeLog(PartitionClockModel model, XMLWriter writer) {
         setModelPrefix(model.getPrefix());
 
+        Parameter allMus = model.getParameter("allMus");
+        if (allMus.getSubParameters().size() > 1) {
+            writer.writeIDref(CompoundParameterParser.COMPOUND_PARAMETER, model.getPrefix() + "allMus");
+        }
+
         switch (model.getClockType()) {
             case STRICT_CLOCK:
             case RANDOM_LOCAL_CLOCK:
@@ -718,8 +749,7 @@ public class BranchRatesModelGenerator extends Generator {
     }
 
     public void writeClockLikelihoodReferences(XMLWriter writer) {
-        for (AbstractPartitionData partition : options.dataPartitions) { // Each PD has one TreeLikelihood
-            PartitionClockModel clockModel = partition.getPartitionClockModel();
+        for (PartitionClockModel clockModel : options.getPartitionClockModels()) { // Each PD has one TreeLikelihood
             writeBranchRatesModelRef(clockModel, writer);
         }
     }
diff --git a/src/dr/app/beauti/generator/Generator.java b/src/dr/app/beauti/generator/Generator.java
index 652a290..b997d91 100644
--- a/src/dr/app/beauti/generator/Generator.java
+++ b/src/dr/app/beauti/generator/Generator.java
@@ -200,8 +200,8 @@ public abstract class Generator {
         if (parameter == null) {
             throw new IllegalArgumentException("parameter (== null) is unknown");
         }
-        if (parameter.isFixed) { // with prefix
-            writeParameter(parameter.getName(), dimension, parameter.initial, Double.NaN, Double.NaN, writer);
+        if (parameter.isFixed()) { // with prefix
+            writeParameter(parameter.getName(), dimension, parameter.getInitial(), Double.NaN, Double.NaN, writer);
         } else {
             double lower = Double.NaN;
             double upper = Double.NaN;
@@ -212,7 +212,7 @@ public abstract class Generator {
                 lower = 0.0;
                 upper = 1.0;
             }
-            writeParameter(parameter.getName(), dimension, parameter.initial, lower, upper, writer);
+            writeParameter(parameter.getName(), dimension, parameter.getInitial(), lower, upper, writer);
         }
     }
 
@@ -242,8 +242,8 @@ public abstract class Generator {
     }
 
     public void writeParameter(String id, Parameter parameter, XMLWriter writer) {
-        if (parameter.isFixed) {
-            writeParameter(id, 1, parameter.initial, Double.NaN, Double.NaN, writer);
+        if (parameter.isFixed()) {
+            writeParameter(id, 1, parameter.getInitial(), Double.NaN, Double.NaN, writer);
         } else {
             double lower = Double.NaN;
             double upper = Double.NaN;
@@ -254,7 +254,7 @@ public abstract class Generator {
                 lower = 0.0;
                 upper = 1.0;
             }
-            writeParameter(id, 1, parameter.initial, lower, upper, writer);
+            writeParameter(id, 1, parameter.getInitial(), lower, upper, writer);
         }
     }
 
diff --git a/src/dr/app/beauti/generator/InitialTreeGenerator.java b/src/dr/app/beauti/generator/InitialTreeGenerator.java
index d105ff9..c2d11fa 100644
--- a/src/dr/app/beauti/generator/InitialTreeGenerator.java
+++ b/src/dr/app/beauti/generator/InitialTreeGenerator.java
@@ -76,7 +76,7 @@ public class InitialTreeGenerator extends Generator {
                 Attribute[] attributes = (rootHeight.priorType != PriorType.NONE_TREE_PRIOR ?
                         new Attribute[] {
                                 new Attribute.Default<String>(XMLParser.ID, modelPrefix + STARTING_TREE),
-                                new Attribute.Default<String>(RescaledTreeParser.HEIGHT, "" + rootHeight.initial)
+                                new Attribute.Default<String>(RescaledTreeParser.HEIGHT, "" + rootHeight.getInitial())
                         } :
                         new Attribute[] {
                                 new Attribute.Default<String>(XMLParser.ID, modelPrefix + STARTING_TREE)
diff --git a/src/dr/app/beauti/generator/LogGenerator.java b/src/dr/app/beauti/generator/LogGenerator.java
index 8675793..f80d7c1 100644
--- a/src/dr/app/beauti/generator/LogGenerator.java
+++ b/src/dr/app/beauti/generator/LogGenerator.java
@@ -27,8 +27,6 @@ package dr.app.beauti.generator;
 
 import dr.app.beauti.components.ComponentFactory;
 import dr.app.beauti.options.*;
-import dr.app.beauti.types.ClockType;
-import dr.app.beauti.types.FixRateType;
 import dr.app.beauti.types.TreePriorType;
 import dr.app.beauti.util.XMLWriter;
 import dr.evolution.datatype.DataType;
@@ -48,7 +46,6 @@ import dr.inferencexml.distribution.MixedDistributionLikelihoodParser;
 import dr.inferencexml.loggers.ColumnsParser;
 import dr.inferencexml.loggers.LoggerParser;
 import dr.inferencexml.model.CompoundLikelihoodParser;
-import dr.inferencexml.model.CompoundParameterParser;
 import dr.util.Attribute;
 import dr.xml.XMLParser;
 
@@ -73,9 +70,9 @@ public class LogGenerator extends Generator {
      * write log to screen
      *
      * @param writer                    XMLWriter
-     * @param branchRatesModelGenerator BranchRatesModelGenerator
+     * @param clockModelGenerator ClockModelGenerator
      */
-    public void writeLogToScreen(XMLWriter writer, BranchRatesModelGenerator branchRatesModelGenerator,
+    public void writeLogToScreen(XMLWriter writer, ClockModelGenerator clockModelGenerator,
                                  SubstitutionModelGenerator substitutionModelGenerator) {
         writer.writeComment("write log to screen");
 
@@ -146,27 +143,19 @@ public class LogGenerator extends Generator {
         }
 
         for (PartitionClockModel model : options.getPartitionClockModels()) {
-            writer.writeOpenTag(ColumnsParser.COLUMN,
-                    new Attribute[]{
-                            new Attribute.Default<String>(ColumnsParser.LABEL, branchRatesModelGenerator.getClockRateString(model)),
-                            new Attribute.Default<String>(ColumnsParser.SIGNIFICANT_FIGURES, "6"),
-                            new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
-                    }
-            );
+            if (!model.getClockRateParameter().isFixed()) {
+                writer.writeOpenTag(ColumnsParser.COLUMN,
+                        new Attribute[]{
+                                new Attribute.Default<String>(ColumnsParser.LABEL, clockModelGenerator.getClockRateString(model)),
+                                new Attribute.Default<String>(ColumnsParser.SIGNIFICANT_FIGURES, "6"),
+                                new Attribute.Default<String>(ColumnsParser.WIDTH, "12")
+                        }
+                );
 
-            branchRatesModelGenerator.writeAllClockRateRefs(model, writer);
-//        if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN) {
-//            writer.writeIDref(ParameterParser.PARAMETER, "allClockRates");
-//            for (PartitionClockModel model : options.getPartitionClockModels()) {
-//                if (model.getClockType() == ClockType.UNCORRELATED_LOGNORMAL)
-//                    writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
-//            }
-//        } else {
-//            for (PartitionClockModel model : options.getPartitionClockModels()) {
-//                branchRatesModelGenerator.writeAllClockRateRefs(model, writer);
-//            }
-//        }
-            writer.writeCloseTag(ColumnsParser.COLUMN);
+                clockModelGenerator.writeAllClockRateRefs(model, writer);
+
+                writer.writeCloseTag(ColumnsParser.COLUMN);
+            }
         }
 
         for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
@@ -186,13 +175,13 @@ public class LogGenerator extends Generator {
      *
      * @param writer                     XMLWriter
      * @param treePriorGenerator         TreePriorGenerator
-     * @param branchRatesModelGenerator  BranchRatesModelGenerator
+     * @param clockModelGenerator  ClockModelGenerator
      * @param substitutionModelGenerator SubstitutionModelGenerator
      * @param treeLikelihoodGenerator    TreeLikelihoodGenerator
      */
     public void writeLogToFile(XMLWriter writer,
                                TreePriorGenerator treePriorGenerator,
-                               BranchRatesModelGenerator branchRatesModelGenerator,
+                               ClockModelGenerator clockModelGenerator,
                                SubstitutionModelGenerator substitutionModelGenerator,
                                TreeLikelihoodGenerator treeLikelihoodGenerator) {
         writer.writeComment("write log to file");
@@ -262,59 +251,26 @@ public class LogGenerator extends Generator {
             }
         }
 
-//        if ( options.shareSameTreePrior ) { // Share Same Tree Prior
-//	        treePriorGenerator.setModelPrefix("");
-//        	treePriorGenerator.writeParameterLog(options.activedSameTreePrior, writer);
-//        } else { // no species
         for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
-//	        	treePriorGenerator.setModelPrefix(prior.getPrefix()); // priorName.treeModel
             treePriorGenerator.writeParameterLog(prior, writer);
         }
-//	    }
 
         for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
             substitutionModelGenerator.writeLog(model, writer);
-            if (model.hasCodon()) {
-                writer.writeIDref(CompoundParameterParser.COMPOUND_PARAMETER, model.getPrefix() + "allMus");
-            }
         }
 
-        for (ClockModelGroup clockModelGroup : options.clockModelOptions.getClockModelGroups()) {
-            if (clockModelGroup.getRateTypeOption() == FixRateType.FIX_MEAN) {
-                writer.writeIDref(ParameterParser.PARAMETER, clockModelGroup.getName());
-                for (PartitionClockModel model : options.getPartitionClockModels(clockModelGroup)) {
-                    if (model.getClockType() == ClockType.UNCORRELATED) {
-                        switch (model.getClockDistributionType()) {
-                            case LOGNORMAL:
-                                writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
-                                break;
-                            case GAMMA:
-                                throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet");
-//                            break;
-                            case CAUCHY:
-                                throw new UnsupportedOperationException("Uncorrelated Cauchy model not implemented yet");
-//                            break;
-                            case EXPONENTIAL:
-                                // nothing required
-                                break;
-                        }
-                    }
-                }
-            } else {
-                for (PartitionClockModel model : options.getPartitionClockModels(clockModelGroup)) {
-                    branchRatesModelGenerator.writeLog(model, writer);
-                }
-            }
+        for (PartitionClockModel model : options.getPartitionClockModels()) {
+            clockModelGenerator.writeLog(model, writer);
         }
 
         for (PartitionClockModel model : options.getPartitionClockModels()) {
-            branchRatesModelGenerator.writeLogStatistic(model, writer);
+            clockModelGenerator.writeLogStatistic(model, writer);
         }
 
         generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_PARAMETERS, writer);
 
         treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer);
-        branchRatesModelGenerator.writeClockLikelihoodReferences(writer);
+        clockModelGenerator.writeClockLikelihoodReferences(writer);
 
         generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_LIKELIHOODS, writer);
 
@@ -340,10 +296,10 @@ public class LogGenerator extends Generator {
     }
 
     public void writeDemographicLogToFile(XMLWriter writer,
-                               TreePriorGenerator treePriorGenerator,
-                               BranchRatesModelGenerator branchRatesModelGenerator,
-                               SubstitutionModelGenerator substitutionModelGenerator,
-                               TreeLikelihoodGenerator treeLikelihoodGenerator) {
+                                          TreePriorGenerator treePriorGenerator,
+                                          ClockModelGenerator clockModelGenerator,
+                                          SubstitutionModelGenerator substitutionModelGenerator,
+                                          TreeLikelihoodGenerator treeLikelihoodGenerator) {
         writer.writeComment("demographic log file");
 
         if (options.demographicLogFileName == null) {
@@ -394,47 +350,39 @@ public class LogGenerator extends Generator {
 
         for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
             substitutionModelGenerator.writeLog(model, writer);
-            if (model.hasCodon()) {
-                writer.writeIDref(CompoundParameterParser.COMPOUND_PARAMETER, model.getPrefix() + "allMus");
-            }
         }
 
-        for (ClockModelGroup clockModelGroup : options.clockModelOptions.getClockModelGroups()) {
-            if (clockModelGroup.getRateTypeOption() == FixRateType.FIX_MEAN) {
-                writer.writeIDref(ParameterParser.PARAMETER, clockModelGroup.getName());
-                for (PartitionClockModel model : options.getPartitionClockModels(clockModelGroup)) {
-                    if (model.getClockType() == ClockType.UNCORRELATED) {
-                        switch (model.getClockDistributionType()) {
-                            case LOGNORMAL:
-                                writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
-                                break;
-                            case GAMMA:
-                                throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet");
+        for (PartitionClockModel model : options.getPartitionClockModels()) {
+//            if (model.getRateTypeOption() == FixRateType.FIXED_MEAN) {
+//                writer.writeIDref(ParameterParser.PARAMETER, model.getName());
+//                if (model.getClockType() == ClockType.UNCORRELATED) {
+//                    switch (model.getClockDistributionType()) {
+//                        case LOGNORMAL:
+//                            writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
 //                            break;
-                            case CAUCHY:
-                                throw new UnsupportedOperationException("Uncorrelated Cauchy model not implemented yet");
+//                        case GAMMA:
+//                            throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet");
+////                            break;
+//                        case CAUCHY:
+//                            throw new UnsupportedOperationException("Uncorrelated Cauchy model not implemented yet");
+////                            break;
+//                        case EXPONENTIAL:
+//                            // nothing required
 //                            break;
-                            case EXPONENTIAL:
-                                // nothing required
-                                break;
-                        }
-                    }
-                }
-            } else {
-                for (PartitionClockModel model : options.getPartitionClockModels(clockModelGroup)) {
-                    branchRatesModelGenerator.writeLog(model, writer);
-                }
-            }
+//                    }
+//                }
+//            }
+            clockModelGenerator.writeLog(model, writer);
         }
 
         for (PartitionClockModel model : options.getPartitionClockModels()) {
-            branchRatesModelGenerator.writeLogStatistic(model, writer);
+            clockModelGenerator.writeLogStatistic(model, writer);
         }
 
         generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_PARAMETERS, writer);
 
         treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer);
-        branchRatesModelGenerator.writeClockLikelihoodReferences(writer);
+        clockModelGenerator.writeClockLikelihoodReferences(writer);
 
         generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_LIKELIHOODS, writer);
 
diff --git a/src/dr/app/beauti/generator/OperatorsGenerator.java b/src/dr/app/beauti/generator/OperatorsGenerator.java
index 99fd690..3ae41c8 100644
--- a/src/dr/app/beauti/generator/OperatorsGenerator.java
+++ b/src/dr/app/beauti/generator/OperatorsGenerator.java
@@ -28,8 +28,6 @@ package dr.app.beauti.generator;
 import dr.app.beauti.components.ComponentFactory;
 import dr.app.beauti.options.*;
 import dr.app.beauti.types.ClockType;
-import dr.app.beauti.types.FixRateType;
-import dr.app.beauti.types.RelativeRatesType;
 import dr.app.beauti.types.TreePriorType;
 import dr.app.beauti.util.XMLWriter;
 import dr.evolution.datatype.DataType;
@@ -108,7 +106,7 @@ public class OperatorsGenerator extends Generator {
         );
 
         for (Operator operator : operators) {
-            if (operator.weight > 0. && operator.inUse) {
+            if (operator.getWeight() > 0. && operator.isUsed()) {
                 setModelPrefix(operator.getPrefix());
 
                 writeOperator(operator, writer);
@@ -122,7 +120,7 @@ public class OperatorsGenerator extends Generator {
 
     private void writeOperator(Operator operator, XMLWriter writer) {
 
-        switch (operator.operatorType) {
+        switch (operator.getOperatorType()) {
 
             case SCALE:
                 writeScaleOperator(operator, writer);
@@ -223,11 +221,11 @@ public class OperatorsGenerator extends Generator {
     }
 
     private void writeParameter1Ref(XMLWriter writer, Operator operator) {
-        writer.writeIDref(ParameterParser.PARAMETER, operator.parameter1.getName());
+        writer.writeIDref(ParameterParser.PARAMETER, operator.getParameter1().getName());
     }
 
     private void writeParameter2Ref(XMLWriter writer, Operator operator) {
-        writer.writeIDref(ParameterParser.PARAMETER, operator.parameter2.getName());
+        writer.writeIDref(ParameterParser.PARAMETER, operator.getParameter2().getName());
     }
 
     private void writeOperatorRef(XMLWriter writer, Operator operator) {
@@ -238,8 +236,8 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 ScaleOperatorParser.SCALE_OPERATOR,
                 new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 });
         writeParameter1Ref(writer, operator);
 //        writeOperatorRef(writer, operator);
@@ -250,8 +248,8 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 ScaleOperatorParser.SCALE_OPERATOR,
                 new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        getWeightAttribute(operator.weight),
+                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                        getWeightAttribute(operator.getWeight()),
                         new Attribute.Default<String>(ScaleOperatorParser.SCALE_ALL_IND, indepedently ? "true" : "false")
                 });
         writeParameter1Ref(writer, operator);
@@ -264,8 +262,8 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 name,
                 new Attribute[]{
-                        new Attribute.Default<Double>("windowSize", operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>("windowSize", operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 });
         writeParameter1Ref(writer, operator);
 //        writeOperatorRef(writer, operator);
@@ -277,10 +275,13 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 name,
                 new Attribute[]{
-                        new Attribute.Default<Double>("windowSize", operator.tuning),
-                        getWeightAttribute(operator.weight),
+                        new Attribute.Default<Double>("windowSize", operator.getTuning()),
+                        getWeightAttribute(operator.getWeight()),
                         new Attribute.Default<String>("boundaryCondition",
-                                (reflecting ? "reflecting" : "absorbing"))
+                                (reflecting ? "reflecting" : "absorbing")),
+                        (operator.isAutoOptimize() == false ?
+                                new Attribute.Default<Boolean>("autoOptimize", false) :
+                                null)
                 });
         writeParameter1Ref(writer, operator);
 //        writeOperatorRef(writer, operator);
@@ -292,8 +293,8 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 name,
                 new Attribute[]{
-                        new Attribute.Default<Double>("windowSize", operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>("windowSize", operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 });
         writeParameter1Ref(writer, operator);
         writer.writeCloseTag(name);
@@ -301,14 +302,14 @@ public class OperatorsGenerator extends Generator {
 
     private void writeIntegerRandomWalkOperator(Operator operator, XMLWriter writer) {
 
-        int windowSize = (int) Math.round(operator.tuning);
+        int windowSize = (int) Math.round(operator.getTuning());
         if (windowSize < 1) windowSize = 1;
         final String name = RandomWalkIntegerOperatorParser.RANDOM_WALK_INTEGER_OPERATOR;
         writer.writeOpenTag(
                 name,
                 new Attribute[]{
                         new Attribute.Default<Integer>("windowSize", windowSize),
-                        getWeightAttribute(operator.weight)
+                        getWeightAttribute(operator.getWeight())
                 });
         writeParameter1Ref(writer, operator);
 //        writeOperatorRef(writer, operator);
@@ -319,17 +320,17 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 ScaleOperatorParser.SCALE_OPERATOR,
                 new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
+                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.getTuning()),
                         new Attribute.Default<String>(ScaleOperatorParser.SCALE_ALL, "true"),
-                        getWeightAttribute(operator.weight)
+                        getWeightAttribute(operator.getWeight())
                 });
 
-        if (operator.parameter2 == null) {
+        if (operator.getParameter2() == null) {
             writeParameter1Ref(writer, operator);
         } else {
             writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER);
             writeParameter1Ref(writer, operator);
-//            writer.writeIDref(ParameterParser.PARAMETER, operator.parameter2.getName());
+//            writer.writeIDref(ParameterParser.PARAMETER, operator.getParameter2().getName());
             writeParameter2Ref(writer, operator);
             writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
         }
@@ -340,8 +341,8 @@ public class OperatorsGenerator extends Generator {
     private void writeCenteredOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(CenteredScaleOperatorParser.CENTERED_SCALE,
                 new Attribute[]{
-                        new Attribute.Default<Double>(CenteredScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>(CenteredScaleOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 }
         );
         writeParameter1Ref(writer, operator);
@@ -351,51 +352,28 @@ public class OperatorsGenerator extends Generator {
 
     private void writeDeltaOperator(Operator operator, XMLWriter writer) {
 
-        if (operator.getBaseName().startsWith(RelativeRatesType.MU_RELATIVE_RATES.toString())) {
+        int[] parameterWeights = operator.getParameter1().getParameterDimensionWeights();
+        Attribute[] attributes;
 
-            int[] parameterWeights = operator.parameter1.getParameterDimensionWeights();
-
-            if (parameterWeights != null && parameterWeights.length > 1) {
-                String pw = "" + parameterWeights[0];
-                for (int i = 1; i < parameterWeights.length; i++) {
-                    pw += " " + parameterWeights[i];
-                }
-                writer.writeOpenTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE,
-                        new Attribute[]{
-                                new Attribute.Default<Double>(DeltaExchangeOperatorParser.DELTA, operator.tuning),
-                                new Attribute.Default<String>(DeltaExchangeOperatorParser.PARAMETER_WEIGHTS, pw),
-                                getWeightAttribute(operator.weight)
-                        }
-                );
-            }
-
-        } else if (operator.getBaseName().startsWith(RelativeRatesType.CLOCK_RELATIVE_RATES.toString())) {
-
-            int[] parameterWeights = options.clockModelOptions.getPartitionClockWeights(operator.getClockModelGroup());
-
-            if (parameterWeights != null && parameterWeights.length > 1) {
-                String pw = "" + parameterWeights[0];
-                for (int i = 1; i < parameterWeights.length; i++) {
-                    pw += " " + parameterWeights[i];
-                }
-                writer.writeOpenTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE,
-                        new Attribute[]{
-                                new Attribute.Default<Double>(DeltaExchangeOperatorParser.DELTA, operator.tuning),
-                                new Attribute.Default<String>(DeltaExchangeOperatorParser.PARAMETER_WEIGHTS, pw),
-                                getWeightAttribute(operator.weight)
-                        }
-                );
+        if (parameterWeights != null && parameterWeights.length > 1) {
+            String pw = "" + parameterWeights[0];
+            for (int i = 1; i < parameterWeights.length; i++) {
+                pw += " " + parameterWeights[i];
             }
+            attributes = new Attribute[]{
+                    new Attribute.Default<Double>(DeltaExchangeOperatorParser.DELTA, operator.getTuning()),
+                    new Attribute.Default<String>(DeltaExchangeOperatorParser.PARAMETER_WEIGHTS, pw),
+                    getWeightAttribute(operator.getWeight())
+            };
 
         } else {
-            writer.writeOpenTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE,
-                    new Attribute[]{
-                            new Attribute.Default<Double>(DeltaExchangeOperatorParser.DELTA, operator.tuning),
-                            getWeightAttribute(operator.weight)
-                    }
-            );
+            attributes = new Attribute[]{
+                    new Attribute.Default<Double>(DeltaExchangeOperatorParser.DELTA, operator.getTuning()),
+                    getWeightAttribute(operator.getWeight())
+            };
         }
 
+        writer.writeOpenTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE, attributes);
         writeParameter1Ref(writer, operator);
         writer.writeCloseTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE);
     }
@@ -403,9 +381,9 @@ public class OperatorsGenerator extends Generator {
     private void writeIntegerDeltaOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(DeltaExchangeOperatorParser.DELTA_EXCHANGE,
                 new Attribute[]{
-                        new Attribute.Default<String>(DeltaExchangeOperatorParser.DELTA, Integer.toString((int) operator.tuning)),
+                        new Attribute.Default<String>(DeltaExchangeOperatorParser.DELTA, Integer.toString((int) operator.getTuning())),
                         new Attribute.Default<String>("integer", "true"),
-                        getWeightAttribute(operator.weight),
+                        getWeightAttribute(operator.getWeight()),
                         new Attribute.Default<String>("autoOptimize", "false")
                 }
         );
@@ -417,8 +395,8 @@ public class OperatorsGenerator extends Generator {
     private void writeSwapOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(SwapOperatorParser.SWAP_OPERATOR,
                 new Attribute[]{
-                        new Attribute.Default<String>("size", Integer.toString((int) operator.tuning)),
-                        getWeightAttribute(operator.weight),
+                        new Attribute.Default<String>("size", Integer.toString((int) operator.getTuning())),
+                        getWeightAttribute(operator.getWeight()),
                         new Attribute.Default<String>("autoOptimize", "false")
                 }
         );
@@ -429,7 +407,7 @@ public class OperatorsGenerator extends Generator {
 
     private void writeBitFlipOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(BitFlipOperatorParser.BIT_FLIP_OPERATOR,
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
         writeParameter1Ref(writer, operator);
 //        writeOperatorRef(writer, operator);
         writer.writeCloseTag(BitFlipOperatorParser.BIT_FLIP_OPERATOR);
@@ -437,8 +415,8 @@ public class OperatorsGenerator extends Generator {
 
     private void writeBitFlipInSubstOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(BitFlipInSubstitutionModelOperator.BIT_FLIP_OPERATOR, new Attribute[]{
-                new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                getWeightAttribute(operator.weight)});
+                new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                getWeightAttribute(operator.getWeight())});
         writeParameter1Ref(writer, operator);
 //        writeOperatorRef(writer, operator);
         PartitionSubstitutionModel model = (PartitionSubstitutionModel) operator.getOptions();
@@ -449,7 +427,7 @@ public class OperatorsGenerator extends Generator {
 
     private void writeRateBitExchangeOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(RateBitExchangeOperator.OPERATOR_NAME,
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
 
         writer.writeOpenTag(RateBitExchangeOperator.BITS);
         writeParameter1Ref(writer, operator);
@@ -464,14 +442,14 @@ public class OperatorsGenerator extends Generator {
 
     private void writeTreeBitMoveOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(TreeBitMoveOperatorParser.BIT_MOVE_OPERATOR,
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
         writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
         writer.writeCloseTag(TreeBitMoveOperatorParser.BIT_MOVE_OPERATOR);
     }
 
     private void writeUniformOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag("uniformOperator",
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
         writeParameter1Ref(writer, operator);
 //        writeOperatorRef(writer, operator);
         writer.writeCloseTag("uniformOperator");
@@ -479,7 +457,7 @@ public class OperatorsGenerator extends Generator {
 
     private void writeIntegerUniformOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(UniformIntegerOperatorParser.UNIFORM_INTEGER_OPERATOR,
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
         writeParameter1Ref(writer, operator);
 //        writeOperatorRef(writer, operator);
         writer.writeCloseTag(UniformIntegerOperatorParser.UNIFORM_INTEGER_OPERATOR);
@@ -487,21 +465,21 @@ public class OperatorsGenerator extends Generator {
 
     private void writeNarrowExchangeOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(ExchangeOperatorParser.NARROW_EXCHANGE,
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
         writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
         writer.writeCloseTag(ExchangeOperatorParser.NARROW_EXCHANGE);
     }
 
     private void writeWideExchangeOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(ExchangeOperatorParser.WIDE_EXCHANGE,
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
         writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
         writer.writeCloseTag(ExchangeOperatorParser.WIDE_EXCHANGE);
     }
 
     private void writeWilsonBaldingOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(WilsonBaldingParser.WILSON_BALDING,
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
         writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
         // not supported anymore. probably never worked. (todo) get it out of GUI too
 //        if (options.nodeHeightPrior == TreePriorType.CONSTANT) {
@@ -512,7 +490,7 @@ public class OperatorsGenerator extends Generator {
 
     private void writeSampleNonActiveOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(SampleNonActiveGibbsOperatorParser.SAMPLE_NONACTIVE_GIBBS_OPERATOR,
-                getWeightAttribute(operator.weight));
+                getWeightAttribute(operator.getWeight()));
 
         writer.writeOpenTag(SampleNonActiveGibbsOperatorParser.DISTRIBUTION);
         writeOperatorRef(writer, operator);
@@ -533,8 +511,8 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 GMRFSkyrideBlockUpdateOperatorParser.GRID_BLOCK_UPDATE_OPERATOR,
                 new Attribute[]{
-                        new Attribute.Default<Double>(GMRFSkyrideBlockUpdateOperatorParser.SCALE_FACTOR, operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>(GMRFSkyrideBlockUpdateOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 }
         );
         writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYLINE_LIKELIHOOD, modelPrefix + "skygrid");
@@ -545,8 +523,8 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 GMRFSkyrideBlockUpdateOperatorParser.BLOCK_UPDATE_OPERATOR,
                 new Attribute[]{
-                        new Attribute.Default<Double>(GMRFSkyrideBlockUpdateOperatorParser.SCALE_FACTOR, operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>(GMRFSkyrideBlockUpdateOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 }
         );
         writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYLINE_LIKELIHOOD, modelPrefix + "skyride");
@@ -557,8 +535,8 @@ public class OperatorsGenerator extends Generator {
         writer.writeOpenTag(
                 ScaleOperatorParser.SCALE_OPERATOR,
                 new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 });
         writeParameter1Ref(writer, operator);
         writer.writeOpenTag(ScaleOperatorParser.INDICATORS, new Attribute.Default<String>(ScaleOperatorParser.PICKONEPROB, "1.0"));
@@ -570,8 +548,8 @@ public class OperatorsGenerator extends Generator {
     private void writeSubtreeLeapOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(SubtreeLeapOperatorParser.SUBTREE_LEAP,
                 new Attribute[]{
-                        new Attribute.Default<Double>("size", operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>("size", operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 }
         );
         writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
@@ -581,9 +559,9 @@ public class OperatorsGenerator extends Generator {
     private void writeSubtreeSlideOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(SubtreeSlideOperatorParser.SUBTREE_SLIDE,
                 new Attribute[]{
-                        new Attribute.Default<Double>("size", operator.tuning),
+                        new Attribute.Default<Double>("size", operator.getTuning()),
                         new Attribute.Default<String>("gaussian", "true"),
-                        getWeightAttribute(operator.weight)
+                        getWeightAttribute(operator.getWeight())
                 }
         );
         writer.writeIDref(TreeModel.TREE_MODEL, modelPrefix + TreeModel.TREE_MODEL);
@@ -592,7 +570,7 @@ public class OperatorsGenerator extends Generator {
 
     private void writeSpeciesTreeOperator(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(TreeNodeSlideParser.TREE_NODE_REHEIGHT,
-                new Attribute[]{getWeightAttribute(operator.weight)}
+                new Attribute[]{getWeightAttribute(operator.getWeight())}
         );
         writer.writeIDref(TraitData.TRAIT_SPECIES, TraitData.TRAIT_SPECIES);
         writer.writeIDref(SpeciesTreeModelParser.SPECIES_TREE, Generator.SP_TREE);
@@ -602,24 +580,25 @@ public class OperatorsGenerator extends Generator {
     private void writeUpDownOperator(String opTag, Operator operator, XMLWriter writer) {
         writer.writeOpenTag(opTag,
                 new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 }
         );
 
         writer.writeOpenTag(UpDownOperatorParser.UP);
-        // for isEstimatedRate() = false, write nothing on up part of upDownOp
-        if (!operator.parameter1.isFixed && operator.getClockModelGroup().getRateTypeOption() != FixRateType.FIX_MEAN) {
+
+        if (!operator.getParameter1().isFixed() /* && operator.getClockModelGroup().getRateTypeOption() != FixRateType.FIXED_MEAN */) {
             writeParameter1Ref(writer, operator);
+        } else {
+            writer.writeComment("Rate is fixed - scale node heights only");
         }
         writer.writeCloseTag(UpDownOperatorParser.UP);
 
         writer.writeOpenTag(UpDownOperatorParser.DOWN);
-        if (operator.tag == null) {
-//	        writer.writeIDref(ParameterParser.PARAMETER,  operator.parameter2.getName());
+        if (operator.getTag() == null) {
             writeParameter2Ref(writer, operator);
         } else {
-            writer.writeIDref(operator.tag, operator.idref);
+            writer.writeIDref(operator.getTag(), operator.getIdref());
         }
         writer.writeCloseTag(UpDownOperatorParser.DOWN);
 
@@ -629,45 +608,44 @@ public class OperatorsGenerator extends Generator {
     private void writeUpDownOperatorAllRatesTrees(Operator operator, XMLWriter writer) {
         writer.writeOpenTag(UpDownOperatorParser.UP_DOWN_OPERATOR,
                 new Attribute[]{
-                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.tuning),
-                        getWeightAttribute(operator.weight)
+                        new Attribute.Default<Double>(ScaleOperatorParser.SCALE_FACTOR, operator.getTuning()),
+                        getWeightAttribute(operator.getWeight())
                 }
         );
 
         writer.writeOpenTag(UpDownOperatorParser.UP);
 
         for (PartitionClockModel model : options.getPartitionClockModels()) {
-            if (model.isEstimatedRate()) {
-                switch (model.getClockType()) {
-                    case STRICT_CLOCK:
-                    case RANDOM_LOCAL_CLOCK:
-                        writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "clock.rate");
-                        break;
-
-                    case UNCORRELATED:
-                        switch (model.getClockDistributionType()) {
-                            case LOGNORMAL:
-                                writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_MEAN);
-                                break;
-                            case GAMMA:
-                                throw new UnsupportedOperationException("Uncorrelated gamma relaxed clock model not implemented yet");
+//            if (model.isEstimatedRate()) {
+            switch (model.getClockType()) {
+                case STRICT_CLOCK:
+                case RANDOM_LOCAL_CLOCK:
+                    writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "clock.rate");
+                    break;
+
+                case UNCORRELATED:
+                    switch (model.getClockDistributionType()) {
+                        case LOGNORMAL:
+                            writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_MEAN);
+                            break;
+                        case GAMMA:
+                            throw new UnsupportedOperationException("Uncorrelated gamma relaxed clock model not implemented yet");
 //                            break;
-                            case CAUCHY:
-                                throw new UnsupportedOperationException("Uncorrelated Cauchy relaxed clock model not implemented yet");
+                        case CAUCHY:
+                            throw new UnsupportedOperationException("Uncorrelated Cauchy relaxed clock model not implemented yet");
 //                            break;
-                            case EXPONENTIAL:
-                                writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCED_MEAN);
-                                break;
-                        }
-                        break;
-
-                    case AUTOCORRELATED:
-                        throw new UnsupportedOperationException("Autocorrelated relaxed clock model not implemented yet");
+                        case EXPONENTIAL:
+                            writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCED_MEAN);
+                            break;
+                    }
+                    break;
+
+                case AUTOCORRELATED:
+                    throw new UnsupportedOperationException("Autocorrelated relaxed clock model not implemented yet");
 //	                break;
 
-                    default:
-                        throw new IllegalArgumentException("Unknown clock model");
-                }
+                default:
+                    throw new IllegalArgumentException("Unknown clock model");
             }
         }
         if (options.useStarBEAST) {
diff --git a/src/dr/app/beauti/generator/ParameterPriorGenerator.java b/src/dr/app/beauti/generator/ParameterPriorGenerator.java
index add36ee..43eae0a 100644
--- a/src/dr/app/beauti/generator/ParameterPriorGenerator.java
+++ b/src/dr/app/beauti/generator/ParameterPriorGenerator.java
@@ -95,7 +95,9 @@ public class ParameterPriorGenerator extends Generator {
         } else {
 
             for (Parameter parameter : parameters) {
-                if (!(parameter.priorType == PriorType.NONE_TREE_PRIOR || parameter.priorType == PriorType.NONE_STATISTIC)) {
+                if (!(parameter.priorType == PriorType.NONE_TREE_PRIOR ||
+                        parameter.priorType == PriorType.NONE_FIXED ||
+                        parameter.priorType == PriorType.NONE_STATISTIC)) {
                     if (parameter.isCached) {
                         writeCachedParameterPrior(parameter, writer);
                     //if (parameter.priorType != PriorType.UNIFORM_PRIOR || parameter.isNodeHeight) {
@@ -124,7 +126,7 @@ public class ParameterPriorGenerator extends Generator {
      * @param writer    the writer
      */
     public void writeParameterPrior(Parameter parameter, XMLWriter writer) {
-        if (parameter.isTruncated) {
+        if (parameter.priorType != PriorType.NONE_FIXED && parameter.isTruncated) {
             // if there is a truncation then put it at the top so it short-circuits any other prior
             // calculations
 
@@ -140,6 +142,8 @@ public class ParameterPriorGenerator extends Generator {
         }
 
         switch (parameter.priorType) {
+            case NONE_FIXED:
+                break;
             case NONE_IMPROPER:
                 writer.writeComment("Improper uniform prior: " + parameter.getName());
                 break;
@@ -248,7 +252,7 @@ public class ParameterPriorGenerator extends Generator {
 
                 PartitionTreeModel treeModel = null;
                 for (PartitionClockModel pcm : options.getPartitionClockModels()) {
-                    if (pcm.getClockRateParam() == parameter) {
+                    if (pcm.getClockRateParameter() == parameter) {
                         for (AbstractPartitionData pd : options.getDataPartitions(pcm)) {
                             treeModel = pd.getPartitionTreeModel();
                             break;
diff --git a/src/dr/app/beauti/generator/STARBEASTGenerator.java b/src/dr/app/beauti/generator/STARBEASTGenerator.java
index 2767e49..5063b04 100644
--- a/src/dr/app/beauti/generator/STARBEASTGenerator.java
+++ b/src/dr/app/beauti/generator/STARBEASTGenerator.java
@@ -145,7 +145,7 @@ public class STARBEASTGenerator extends Generator {
                 });
         writer.writeOpenTag(ConstantPopulationModelParser.POPULATION_SIZE);
 
-        double popSizeValue = options.getPartitionTreePriors().get(0).getParameter("constant.popSize").initial; // "initial" is "value"
+        double popSizeValue = options.getPartitionTreePriors().get(0).getParameter("constant.popSize").getInitial(); // "initial" is "value"
 
         writer.writeTag(ParameterParser.PARAMETER, new Attribute[]{
                 new Attribute.Default<String>(XMLParser.ID, "sp.popSize"),
@@ -246,7 +246,7 @@ public class STARBEASTGenerator extends Generator {
 
         // take sppSplitPopulations value from partionModel(?).constant.popSize
         // *BEAST always share same tree prior
-        double popSizeValue = options.getPartitionTreePriors().get(0).getParameter("constant.popSize").initial; // "initial" is "value"
+        double popSizeValue = options.getPartitionTreePriors().get(0).getParameter("constant.popSize").getInitial(); // "initial" is "value"
         writer.writeOpenTag(SpeciesTreeModelParser.SPP_SPLIT_POPULATIONS, new Attribute[]{
                 new Attribute.Default<Double>(ParameterParser.VALUE, popSizeValue)});
 
diff --git a/src/dr/app/beauti/generator/SubstitutionModelGenerator.java b/src/dr/app/beauti/generator/SubstitutionModelGenerator.java
index 16f994d..84c8267 100644
--- a/src/dr/app/beauti/generator/SubstitutionModelGenerator.java
+++ b/src/dr/app/beauti/generator/SubstitutionModelGenerator.java
@@ -445,42 +445,6 @@ public class SubstitutionModelGenerator extends Generator {
         writer.writeCloseTag(FrequencyModelParser.FREQUENCIES);
     }
 
-    /**
-     * Write the allMus for each partition model.
-     *
-     * @param model  PartitionSubstitutionModel
-     * @param writer XMLWriter
-     */
-    public void writeAllMus(PartitionSubstitutionModel model, XMLWriter writer) {
-        if (model.hasCodon()) { // write allMus for codon model
-            // allMus is global for each gene
-            writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER,
-                    new Attribute[]{new Attribute.Default<String>(XMLParser.ID, model.getPrefix() + "allMus")});
-
-            writeMuParameterRefs(model, writer);
-
-            writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
-        }
-    }
-
-    /**
-     * Write the all the mu parameters for this partition model.
-     *
-     * @param writer the writer
-     * @param model  the partition model to write in BEAST XML
-     */
-    public void writeMuParameterRefs(PartitionSubstitutionModel model, XMLWriter writer) {
-
-        if (model.getDataType().getType() == DataType.NUCLEOTIDES && model.getCodonHeteroPattern() != null) {
-            for (int i = 1; i <= model.getCodonPartitionCount(); i++) {
-                writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix(i) + "mu");
-            }
-        } else {
-            writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + "mu");
-        }
-
-    }
-
     public void writeLog(PartitionSubstitutionModel model, XMLWriter writer) {
 
         int codonPartitionCount = model.getCodonPartitionCount();
@@ -686,8 +650,10 @@ public class SubstitutionModelGenerator extends Generator {
 
         writer.writeCloseTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
 
-        if (model.hasCodon()) {
+        if (model.hasCodonPartitions()) {
             writeParameter(num, GammaSiteModelParser.RELATIVE_RATE, "mu", model, writer);
+        } else {
+            writeParameter(GammaSiteModelParser.RELATIVE_RATE, "mu", model, writer);
         }
 
         if (model.isGammaHetero()) {
@@ -726,6 +692,7 @@ public class SubstitutionModelGenerator extends Generator {
         }
 
         writer.writeCloseTag(GammaSiteModel.SITE_MODEL);
+        writer.writeText("");
     }
 
     /**
@@ -761,9 +728,7 @@ public class SubstitutionModelGenerator extends Generator {
 
         writer.writeCloseTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
 
-        if (model.hasCodon()) {
-            writeParameter(GammaSiteModelParser.RELATIVE_RATE, "mu", model, writer);
-        }
+        writeParameter(GammaSiteModelParser.RELATIVE_RATE, "mu", model, writer);
 
         if (model.isGammaHetero()) {
             writer.writeOpenTag(GammaSiteModelParser.GAMMA_SHAPE,
@@ -798,9 +763,7 @@ public class SubstitutionModelGenerator extends Generator {
         writer.writeIDref(EmpiricalAminoAcidModelParser.EMPIRICAL_AMINO_ACID_MODEL, prefix + "aa");
         writer.writeCloseTag(GammaSiteModelParser.SUBSTITUTION_MODEL);
 
-        if (model.hasCodon()) {
-            writeParameter(GammaSiteModelParser.RELATIVE_RATE, "mu", model, writer);
-        }
+        writeParameter(GammaSiteModelParser.RELATIVE_RATE, "mu", model, writer);
 
 
         if (model.isGammaHetero()) {
diff --git a/src/dr/app/beauti/generator/TreeLikelihoodGenerator.java b/src/dr/app/beauti/generator/TreeLikelihoodGenerator.java
index 7f26eb7..2b959fa 100644
--- a/src/dr/app/beauti/generator/TreeLikelihoodGenerator.java
+++ b/src/dr/app/beauti/generator/TreeLikelihoodGenerator.java
@@ -32,7 +32,6 @@ import dr.app.beauti.options.*;
 import dr.app.beauti.types.MicroSatModelType;
 import dr.app.beauti.util.XMLWriter;
 import dr.evolution.datatype.DataType;
-import dr.evolution.datatype.Nucleotides;
 import dr.evomodel.branchratemodel.BranchRateModel;
 import dr.evomodel.sitemodel.GammaSiteModel;
 import dr.evomodel.sitemodel.SiteModel;
@@ -40,9 +39,6 @@ import dr.evomodel.substmodel.AsymmetricQuadraticModel;
 import dr.evomodel.substmodel.LinearBiasModel;
 import dr.evomodel.substmodel.TwoPhaseModel;
 import dr.evomodel.tree.TreeModel;
-import dr.evomodelxml.branchratemodel.DiscretizedBranchRatesParser;
-import dr.evomodelxml.branchratemodel.LocalClockModelParser;
-import dr.evomodelxml.branchratemodel.RandomLocalClockModelParser;
 import dr.evomodelxml.branchratemodel.StrictClockBranchRatesParser;
 import dr.evomodelxml.tree.MicrosatelliteSamplerTreeModelParser;
 import dr.evomodelxml.treelikelihood.AncestralStateTreeLikelihoodParser;
@@ -177,7 +173,7 @@ public class TreeLikelihoodGenerator extends Generator {
             writer.writeIDref(GammaSiteModel.SITE_MODEL, substModel.getPrefix() + SiteModel.SITE_MODEL);
         }
 
-        BranchRatesModelGenerator.writeBranchRatesModelRef(clockModel, writer);
+        ClockModelGenerator.writeBranchRatesModelRef(clockModel, writer);
 
         generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TREE_LIKELIHOOD, partition, prefix, writer);
 
diff --git a/src/dr/app/beauti/generator/TreePriorGenerator.java b/src/dr/app/beauti/generator/TreePriorGenerator.java
index ee39a96..eeba4f7 100644
--- a/src/dr/app/beauti/generator/TreePriorGenerator.java
+++ b/src/dr/app/beauti/generator/TreePriorGenerator.java
@@ -351,7 +351,7 @@ public class TreePriorGenerator extends Generator {
 
                 // initial value for pop mean is the same as what used to be the value for the population size
                 Parameter para = options.starBEASTOptions.getParameter(TraitData.TRAIT_SPECIES + "." + options.starBEASTOptions.POP_MEAN);
-                prior.getParameter("constant.popSize").initial = para.initial;
+                prior.getParameter("constant.popSize").setInitial(para.getInitial());
 
                 writer.writeOpenTag(ConstantPopulationModelParser.POPULATION_SIZE);
                 writeParameter("constant.popSize", prior, writer);
@@ -733,13 +733,13 @@ public class TreePriorGenerator extends Generator {
 
             writer.writeOpenTag(GMRFSkyrideLikelihoodParser.NUM_GRID_POINTS);
             Parameter numGridPoint = prior.getParameter("skygrid.numGridPoints");
-            numGridPoint.initial = skyGridIntervalCount - 1;
+            numGridPoint.setInitial(skyGridIntervalCount - 1);
             writeParameter(numGridPoint, 1, writer);
             writer.writeCloseTag(GMRFSkyrideLikelihoodParser.NUM_GRID_POINTS);
 
             writer.writeOpenTag(GMRFSkyrideLikelihoodParser.CUT_OFF);
             Parameter cutOff = prior.getParameter("skygrid.cutOff");
-            cutOff.initial = skyGridInterval;
+            cutOff.setInitial(skyGridInterval);
             writeParameter(cutOff, 1, writer);
             writer.writeCloseTag(GMRFSkyrideLikelihoodParser.CUT_OFF);
 
@@ -772,14 +772,14 @@ public class TreePriorGenerator extends Generator {
 
             Parameter popSize = prior.getParameter(VariableDemographicModelParser.demoElementName + ".popSize");
             Parameter populationMean = prior.getParameter(VariableDemographicModelParser.demoElementName + ".populationMean");
-            popSize.initial = populationMean.initial;
+            popSize.setInitial(populationMean.getInitial());
 
             writer.writeOpenTag(VariableDemographicModelParser.POPULATION_SIZES);
             writer.writeComment("popSize value = populationMean value");
             writer.writeTag(ParameterParser.PARAMETER,
                     new Attribute[]{
                             new Attribute.Default<String>(XMLParser.ID, modelPrefix + VariableDemographicModelParser.demoElementName + ".popSize"),
-                            new Attribute.Default<String>(ParameterParser.VALUE, Double.toString(popSize.initial))}, true);
+                            new Attribute.Default<String>(ParameterParser.VALUE, Double.toString(popSize.getInitial()))}, true);
 //	        writeParameter(popSize, -1, writer);
             writer.writeCloseTag(VariableDemographicModelParser.POPULATION_SIZES);
 
@@ -842,7 +842,7 @@ public class TreePriorGenerator extends Generator {
             writer.writeTag(ParameterParser.PARAMETER,
                     new Attribute[]{
                             new Attribute.Default<String>(XMLParser.ID, modelPrefix + VariableDemographicModelParser.demoElementName + ".populationMean"),
-                            new Attribute.Default<String>(ParameterParser.VALUE, Double.toString(populationMean.initial))}, true);
+                            new Attribute.Default<String>(ParameterParser.VALUE, Double.toString(populationMean.getInitial()))}, true);
 
             writer.writeCloseTag(DistributionModelParser.MEAN);
             writer.writeCloseTag(ExponentialDistributionModel.EXPONENTIAL_DISTRIBUTION_MODEL);
diff --git a/src/dr/app/beauti/mcmcpanel/MCMCPanel.java b/src/dr/app/beauti/mcmcpanel/MCMCPanel.java
index efbba47..f42407f 100644
--- a/src/dr/app/beauti/mcmcpanel/MCMCPanel.java
+++ b/src/dr/app/beauti/mcmcpanel/MCMCPanel.java
@@ -284,10 +284,12 @@ public class MCMCPanel extends BeautiPanel {
             public void actionPerformed(ActionEvent e) {
                 if (performMLECombo.getSelectedIndex() == 1) {
                     mleOptions.performMLE = true;
+                    mleOptions.performMLEGSS = false;
                     options.logCoalescentEventsStatistic = false;
                     buttonMLE.setEnabled(true);
                     updateMLEFileNameStem();
                 } else if (performMLECombo.getSelectedIndex() == 2) {
+                    mleOptions.performMLE = false;
                     mleOptions.performMLEGSS = true;
                     //set to true because product of exponentials is the default option
                     options.logCoalescentEventsStatistic = true;
diff --git a/src/dr/app/beauti/operatorspanel/OperatorsPanel.java b/src/dr/app/beauti/operatorspanel/OperatorsPanel.java
index 36b755c..77df55e 100644
--- a/src/dr/app/beauti/operatorspanel/OperatorsPanel.java
+++ b/src/dr/app/beauti/operatorspanel/OperatorsPanel.java
@@ -203,19 +203,19 @@ public class OperatorsPanel extends BeautiPanel implements Exportable {
             Operator op = operators.get(row);
             switch (col) {
                 case 0:
-                    return op.inUse;
+                    return op.isUsed();
                 case 1:
                     return op.getName();
                 case 2:
-                    return op.operatorType;
+                    return op.getOperatorType();
                 case 3:
                     if (op.isTunable()) {
-                        return op.tuning;
+                        return op.getTuning();
                     } else {
                         return "n/a";
                     }
                 case 4:
-                    return op.weight;
+                    return op.getWeight();
                 case 5:
                     return op.getDescription();
             }
@@ -226,14 +226,13 @@ public class OperatorsPanel extends BeautiPanel implements Exportable {
             Operator op = operators.get(row);
             switch (col) {
                 case 0:
-                    op.inUse = (Boolean) aValue;
+                    op.setUsed((Boolean) aValue);
                     break;
                 case 3:
-                    op.tuning = (Double) aValue;
-                    op.tuningEdited = true;
+                    op.setTuning((Double) aValue);
                     break;
                 case 4:
-                    op.weight = (Double) aValue;
+                    op.setWeight((Double) aValue);
                     break;
             }
             operatorsChanged();
@@ -254,13 +253,14 @@ public class OperatorsPanel extends BeautiPanel implements Exportable {
 
             switch (col) {
                 case 0:// Check box
-                    editable = true;
+                    // if the paramter is fixed then 'in use' can't be turned on
+                    editable = !op.isParameterFixed();
                     break;
                 case 3:
-                    editable = op.inUse && op.isTunable();
+                    editable = op.isUsed() && op.isTunable();
                     break;
                 case 4:
-                    editable = op.inUse;
+                    editable = op.isUsed();
                     break;
                 default:
                     editable = false;
@@ -313,10 +313,17 @@ public class OperatorsPanel extends BeautiPanel implements Exportable {
                     aRow, aColumn);
 
             Operator op = operators.get(aRow);
-            if (!op.inUse && aColumn > 0)
+            if (!op.isUsed() && aColumn > 0) {
                 renderer.setForeground(Color.gray);
-            else
+            } else {
                 renderer.setForeground(Color.black);
+            }
+            if (op.isParameterFixed()) {
+               setToolTipText(
+                       "This parameter is set to a fixed value. To turn \r" +
+                       "this move on, select a prior in the Priors tab");
+            }
+
             return this;
         }
 
diff --git a/src/dr/app/beauti/options/AbstractPartitionData.java b/src/dr/app/beauti/options/AbstractPartitionData.java
index 07f3c10..78a6fdc 100644
--- a/src/dr/app/beauti/options/AbstractPartitionData.java
+++ b/src/dr/app/beauti/options/AbstractPartitionData.java
@@ -66,7 +66,7 @@ public abstract class AbstractPartitionData implements Serializable {
             meanDistance = distances.getMeanDistance();
         } else {
             distances = null;
-            meanDistance = 0.0;
+            meanDistance = 1.0;
         }
     }
 
diff --git a/src/dr/app/beauti/options/BeautiOptions.java b/src/dr/app/beauti/options/BeautiOptions.java
index 0df86b2..0349288 100644
--- a/src/dr/app/beauti/options/BeautiOptions.java
+++ b/src/dr/app/beauti/options/BeautiOptions.java
@@ -34,7 +34,6 @@ import dr.app.beauti.components.discrete.DiscreteTraitsComponentOptions;
 import dr.app.beauti.mcmcpanel.MCMCPanel;
 import dr.app.beauti.types.OperatorSetType;
 import dr.app.beauti.types.TreePriorType;
-import dr.app.beauti.util.BeautiTemplate;
 import dr.evolution.alignment.Alignment;
 import dr.evolution.alignment.Patterns;
 import dr.evolution.datatype.DataType;
@@ -157,8 +156,6 @@ public class BeautiOptions extends ModelOptions {
 
         microsatelliteOptions = new MicrosatelliteOptions(this);
 
-        beautiTemplate = new BeautiTemplate(this);
-
         parameters.clear();
         operators.clear();
         statistics.clear();
@@ -250,9 +247,29 @@ public class BeautiOptions extends ModelOptions {
 //          parameters.addAll(model.getParameters(multiplePartitions));
             model.selectParameters(parameters);
         }
-//        substitutionModelOptions.selectParameters(parameters);
 
         for (PartitionClockModel model : getPartitionClockModels()) {
+            Set<PartitionSubstitutionModel> substitutionModels = new LinkedHashSet<PartitionSubstitutionModel>();
+            for (AbstractPartitionData partition : getDataPartitions()) {
+                if (partition.getPartitionClockModel() == model) {
+                    substitutionModels.add(partition.getPartitionSubstitutionModel());
+                }
+            }
+
+            // collect all the relative rate paremeters (partition rates and codon position rates)
+            ArrayList<Parameter> relativeRateParameters = new ArrayList<Parameter>();
+            for (PartitionSubstitutionModel substitutionModel : substitutionModels) {
+                relativeRateParameters.addAll(substitutionModel.getRelativeRateParameters());
+            }
+            if (relativeRateParameters.size() > 1) {
+                Parameter allMus = model.getParameter("allMus");
+                allMus.clearSubParameters();
+                for (Parameter mu : relativeRateParameters) {
+                    allMus.addSubParameter(mu);
+                }
+                parameters.add(allMus);
+            }
+
             model.selectParameters(parameters);
         }
         clockModelOptions.selectParameters();
@@ -351,8 +368,8 @@ public class BeautiOptions extends ModelOptions {
         // no remove operators for parameters that are part of a joint prior...
         List<Operator> toRemove = new ArrayList<Operator>();
         for (Operator operator : ops) {
-            if ((operator.parameter1 != null && operator.parameter1.isLinked) ||
-                    (operator.parameter2 != null && operator.parameter2.isLinked)) {
+            if ((operator.getParameter1() != null && operator.getParameter1().isLinked) ||
+                    (operator.getParameter2() != null && operator.getParameter2().isLinked)) {
                 toRemove.add(operator);
             }
         }
@@ -364,7 +381,7 @@ public class BeautiOptions extends ModelOptions {
 
     public Operator getOperator(Parameter parameter) {
         for (Operator operator : selectOperators()) {
-            if (operator.parameter1 == parameter || operator.parameter2 == parameter) {
+            if (operator.getParameter1() == parameter || operator.getParameter2() == parameter) {
                 return operator;
             }
         }
@@ -516,24 +533,6 @@ public class BeautiOptions extends ModelOptions {
     }
 
     private final Map<PartitionClockModel, List<AbstractPartitionData>> pcmCache = new HashMap<PartitionClockModel, List<AbstractPartitionData>>();
-
-    public List<AbstractPartitionData> getDataPartitions(PartitionClockModel model) {
-        List<AbstractPartitionData> pdList = pcmCache.get(model);
-
-        if (pdList == null) {
-            pdList = new ArrayList<AbstractPartitionData>();
-
-            for (AbstractPartitionData pd : dataPartitions) {
-                if (pd.getPartitionClockModel() == model) {
-                    pdList.add(pd);
-                }
-            }
-
-            pcmCache.put(model, pdList);
-        }
-        return pdList;
-    }
-
     private final Map<PartitionTreeModel, List<AbstractPartitionData>> ptmCache = new HashMap<PartitionTreeModel, List<AbstractPartitionData>>();
 
     public List<AbstractPartitionData> getDataPartitions(PartitionTreeModel model) {
@@ -610,21 +609,19 @@ public class BeautiOptions extends ModelOptions {
         return pdList;
     }
 
-    private final Map<ClockModelGroup, List<AbstractPartitionData>> cmgCache = new HashMap<ClockModelGroup, List<AbstractPartitionData>>();
-
-    public List<AbstractPartitionData> getDataPartitions(ClockModelGroup clockModelGroup) {
-        List<AbstractPartitionData> pdList = pcmsmlCache.get(clockModelGroup);
+    public List<AbstractPartitionData> getDataPartitions(PartitionClockModel clockModel) {
+        List<AbstractPartitionData> pdList = pcmCache.get(clockModel);
 
         if (pdList == null) {
             pdList = new ArrayList<AbstractPartitionData>();
 
             for (AbstractPartitionData pd : dataPartitions) {
-                if (pd.getPartitionClockModel() != null && pd.getPartitionClockModel().getClockModelGroup() == clockModelGroup) {
+                if (pd.getPartitionClockModel() != null && pd.getPartitionClockModel() == clockModel) {
                     pdList.add(pd);
                 }
             }
 
-            cmgCache.put(clockModelGroup, pdList);
+            pcmCache.put(clockModel, pdList);
         }
         return pdList;
     }
@@ -636,7 +633,6 @@ public class BeautiOptions extends ModelOptions {
         ptpCache.clear();
         pcmtmlCache.clear();
         pcmsmlCache.clear();
-        cmgCache.clear();
         psmlCache.clear();
         ptmlCache.clear();
         pcmlCache.clear();
@@ -739,24 +735,6 @@ public class BeautiOptions extends ModelOptions {
         return models;
     }
 
-    public List<PartitionClockModel> getPartitionClockModels(ClockModelGroup group) {
-        List<PartitionClockModel> models = new ArrayList<PartitionClockModel>();
-        for (PartitionClockModel model : getPartitionClockModels()) {
-            if (model.getClockModelGroup() == group) {
-                models.add(model);
-            }
-        }
-        return models;
-    }
-
-//    public List<PartitionClockModel> getPartitionNonTraitsClockModels() {
-//        return getPartitionClockModels(getNonTraitsDataList());
-//    }
-//
-//    public List<PartitionClockModel> getPartitionTraitsClockModels() {
-//        return getPartitionClockModels(getTraitsList());
-//    }
-
     public List<PartitionClockModel> getPartitionClockModels() {
         return getPartitionClockModels(dataPartitions);
     }
@@ -1158,8 +1136,6 @@ public class BeautiOptions extends ModelOptions {
             // PartitionClockModel based on PartitionData
             PartitionClockModel pcm = new PartitionClockModel(this, partition);
             partition.setPartitionClockModel(pcm);
-
-            clockModelOptions.addClockModelGroup(pcm);
         }
 
         if (partition.getPartitionTreeModel() == null) {
@@ -1313,7 +1289,7 @@ public class BeautiOptions extends ModelOptions {
 //                message += ";    Phylogeographic Analysis";
 //            }
 
-            message += "; " + clockModelOptions.statusMessageClockModel();
+//            message += "; " + clockModelOptions.statusMessageClockModel();
 
         } else if (userTrees.size() > 0) { // TODO
             message += "Trees only : " + userTrees.size() +
@@ -1442,8 +1418,6 @@ public class BeautiOptions extends ModelOptions {
 
     public MicrosatelliteOptions microsatelliteOptions = new MicrosatelliteOptions(this);
 
-    public BeautiTemplate beautiTemplate = new BeautiTemplate(this);
-
     public boolean shareMicroSat = true;
 
     public boolean logCoalescentEventsStatistic = false;
diff --git a/src/dr/app/beauti/options/ClockModelGroup.java b/src/dr/app/beauti/options/ClockModelGroup.java
deleted file mode 100644
index 611c427..0000000
--- a/src/dr/app/beauti/options/ClockModelGroup.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * ClockModelGroup.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.beauti.options;
-
-import dr.app.beauti.types.FixRateType;
-import dr.evolution.datatype.DataType;
-
-import java.io.Serializable;
-
-/**
- * @author Alexei Drummond
- * @author Walter Xie
- */
-public class ClockModelGroup implements Serializable {
-
-    private static final long serialVersionUID = -3034174176050520635L;
-    private String name;
-    private boolean fixMean = false;
-    private double fixMeanRate = 1.0;
-    private FixRateType rateTypeOption = FixRateType.RELATIVE_TO;
-
-//    public ClockModelGroup() { }
-
-    public ClockModelGroup(String name) {
-        this.name = name;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public boolean isFixMean() {
-        return fixMean;
-    }
-
-    public void setFixMean(boolean fixMean) {
-        this.fixMean = fixMean;
-    }
-
-    public FixRateType getRateTypeOption() {
-        return rateTypeOption;
-    }
-
-    public void setRateTypeOption(FixRateType rateTypeOption) {
-        this.rateTypeOption = rateTypeOption;
-        setFixMean(rateTypeOption == FixRateType.FIX_MEAN);
-    }
-
-    public double getFixMeanRate() {
-        return fixMeanRate;
-    }
-
-    public void setFixMeanRate(double fixMeanRate, BeautiOptions options) {
-        this.fixMeanRate = fixMeanRate;
-        for (PartitionClockModel model : options.getPartitionClockModels(this)) {
-            model.setRate(fixMeanRate, false);
-        }
-    }
-
-    public boolean contain(DataType dataType, BeautiOptions options) {
-        for (AbstractPartitionData pd : options.getDataPartitions(this)) {
-           if (pd.getDataType().getType() == dataType.getType()) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-}
diff --git a/src/dr/app/beauti/options/ClockModelOptions.java b/src/dr/app/beauti/options/ClockModelOptions.java
index dc985d5..9e7575a 100644
--- a/src/dr/app/beauti/options/ClockModelOptions.java
+++ b/src/dr/app/beauti/options/ClockModelOptions.java
@@ -1,7 +1,7 @@
 /*
  * ClockModelOptions.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -25,22 +25,14 @@
 
 package dr.app.beauti.options;
 
-import dr.app.beauti.types.FixRateType;
-import dr.app.beauti.types.OperatorType;
-import dr.app.beauti.types.RelativeRatesType;
-import dr.evolution.datatype.DataType;
-import dr.evolution.datatype.Microsatellite;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evolution.tree.UPGMATree;
 import dr.evolution.util.Taxa;
-import dr.math.MathUtils;
 import dr.stats.DiscreteStatistics;
 
-import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
-import java.util.Vector;
 
 
 /**
@@ -55,42 +47,14 @@ public class ClockModelOptions extends ModelOptions {
     // Instance variables
     private final BeautiOptions options;
 
-//    private FixRateType rateOptionClockModel = FixRateType.RELATIVE_TO;
-//    private double meanRelativeRate = 1.0;
-
-//    public List<ClockModelGroup> clockModelGroupList = new ArrayList<ClockModelGroup>();
-
     public ClockModelOptions(BeautiOptions options) {
         this.options = options;
-
-        initGlobalClockModelParaAndOpers();
-
-//        fixRateOfFirstClockPartition();
-    }
-
-    private void initGlobalClockModelParaAndOpers() {
-
-//        createParameter("allClockRates", "All the relative rates regarding clock models");
-//
-//        createOperator("deltaAllClockRates", RelativeRatesType.CLOCK_RELATIVE_RATES.toString(),
-//                "Delta exchange operator for all the relative rates regarding clock models", "allClockRates",
-//                OperatorType.DELTA_EXCHANGE, 0.75, rateWeights);
-//
-//        // only available for *BEAST and EBSP
-//        createUpDownAllOperator("upDownAllRatesHeights", "Up down all rates and heights",
-//                "Scales all rates inversely to node heights of the tree",
-//                demoTuning, branchWeights);
-
     }
 
     /**
      * return a list of parameters that are required
      */
     public void selectParameters() {
-        for (ClockModelGroup clockModelGroup : getClockModelGroups()) {
-            createParameter(clockModelGroup.getName(), // used in BeastGenerator (Branch Rates Model) part
-                    "All relative rates regarding clock models in group " + clockModelGroup.getName());
-        }
     }
 
     /**
@@ -99,593 +63,11 @@ public class ClockModelOptions extends ModelOptions {
      * @param ops the operator list
      */
     public void selectOperators(List<Operator> ops) {
-        for (ClockModelGroup clockModelGroup : getClockModelGroups()) {
-            if (clockModelGroup.getRateTypeOption() == FixRateType.FIX_MEAN) {
-                createOperator("delta_" + clockModelGroup.getName(),
-                        RelativeRatesType.CLOCK_RELATIVE_RATES.toString() + " in " + clockModelGroup.getName(),
-                        "Delta exchange operator for all relative rates regarding clock models",
-                        clockModelGroup.getName(), OperatorType.DELTA_EXCHANGE, 0.75, rateWeights);
-
-                Operator deltaOperator = getOperator("delta_" + clockModelGroup.getName());
-
-                // update delta clock operator weight
-                deltaOperator.weight = options.getPartitionClockModels(clockModelGroup).size();
-
-                ops.add(deltaOperator);
-            }
-
-            //up down all rates and trees operator only available for *BEAST and EBSP
-            if (clockModelGroup.getRateTypeOption() == FixRateType.RELATIVE_TO && //TODO what about Calibration?
-                    (options.useStarBEAST || options.isEBSPSharingSamePrior())) {
-                // only available for *BEAST and EBSP
-                createUpDownAllOperator("upDownAllRatesHeights_" + clockModelGroup.getName(),
-                        "Up down all rates and heights in " + clockModelGroup.getName(),
-                        "Scales all rates inversely to node heights of the tree",
-                        demoTuning, branchWeights);
-                Operator op = getOperator("upDownAllRatesHeights_" + clockModelGroup.getName());
-                op.setClockModelGroup(clockModelGroup);
-
-                ops.add(op);
-            }
-        }
-    }
-
-    //+++++++++++++++++++++++ Clock Model Group ++++++++++++++++++++++++++++++++
-    public void initClockModelGroup() { // only used in BeautiImporter
-        for (PartitionClockModel model : options.getPartitionClockModels()) {
-            addClockModelGroup(model);
-        }
-
-        for (ClockModelGroup clockModelGroup : getClockModelGroups()) {
-            if (clockModelGroup.contain(Microsatellite.INSTANCE, options)) {
-                if (options.getPartitionClockModels(clockModelGroup).size() == 1) {
-                    fixRateOfFirstClockPartition(clockModelGroup);
-                    options.getPartitionClockModels(clockModelGroup).get(0).setEstimatedRate(true);
-                } else {
-                    fixMeanRate(clockModelGroup);
-                }
-            } else if (!(clockModelGroup.getRateTypeOption() == FixRateType.TIP_CALIBRATED
-                    || clockModelGroup.getRateTypeOption() == FixRateType.NODE_CALIBRATED
-                    || clockModelGroup.getRateTypeOption() == FixRateType.RATE_CALIBRATED)) {
-                //TODO correct?
-                fixRateOfFirstClockPartition(clockModelGroup);
-            }
-        }
-    }
-
-    public void addClockModelGroup(PartitionClockModel model) {
-        if (model.getClockModelGroup() == null) {
-            String groupName = model.getDataType().getDescription().toLowerCase() + "_group";
-            List<ClockModelGroup> groupsList = getClockModelGroups();
-            ClockModelGroup clockModelGroup;
-            if (containsGroup(groupName, groupsList)) {
-                clockModelGroup = getGroup(groupName, groupsList);
-            } else {
-                clockModelGroup = new ClockModelGroup(groupName);
-            }
-            model.setClockModelGroup(clockModelGroup);
-        }
-    }
-
-    public List<ClockModelGroup> getClockModelGroups(DataType dataType) {
-        List<ClockModelGroup> activeClockModelGroups = new ArrayList<ClockModelGroup>();
-        for (PartitionClockModel model : options.getPartitionClockModels(dataType)) {
-            ClockModelGroup group = model.getClockModelGroup();
-            if (group != null && (!activeClockModelGroups.contains(group))) {
-                activeClockModelGroups.add(group);
-            }
-        }
-        return activeClockModelGroups;
-    }
-
-    public List<ClockModelGroup> getClockModelGroups(List<? extends AbstractPartitionData> givenDataPartitions) {
-        List<ClockModelGroup> activeClockModelGroups = new ArrayList<ClockModelGroup>();
-        for (PartitionClockModel model : options.getPartitionClockModels(givenDataPartitions)) {
-            ClockModelGroup group = model.getClockModelGroup();
-            if (group != null && (!activeClockModelGroups.contains(group))) {
-                activeClockModelGroups.add(group);
-            }
-        }
-        return activeClockModelGroups;
-    }
-
-    public List<ClockModelGroup> getClockModelGroups() {
-        return getClockModelGroups(options.dataPartitions);
-    }
-
-    public Vector<String> getClockModelGroupNames(List<ClockModelGroup> group) {
-        Vector<String> activeClockModelGroups = new Vector<String>();
-        for (ClockModelGroup clockModelGroup : group) {
-            String name = clockModelGroup.getName();
-            if (name != null && (!activeClockModelGroups.contains(name))) {
-                activeClockModelGroups.add(name);
-            }
-        }
-        return activeClockModelGroups;
-    }
-
-    public boolean containsGroup(String groupName, List<ClockModelGroup> groupsList) {
-        for (ClockModelGroup clockModelGroup : groupsList) {
-            if (clockModelGroup.getName().equalsIgnoreCase(groupName)) return true;
-        }
-        return false;
-    }
-
-    public ClockModelGroup getGroup(String groupName, List<ClockModelGroup> groupsList) {
-        for (ClockModelGroup clockModelGroup : groupsList) {
-            if (clockModelGroup.getName().equalsIgnoreCase(groupName))
-                return clockModelGroup;
-        }
-        return null;
-    }
-
-    public void fixRateOfFirstClockPartition(ClockModelGroup group) {
-        group.setRateTypeOption(FixRateType.RELATIVE_TO);
-        // fix rate of 1st partition
-        int i = 0;
-        for (PartitionClockModel model : options.getPartitionClockModels(group)) {
-            if (i < 1) {
-                model.setEstimatedRate(false);
-            } else {
-                model.setEstimatedRate(true);
-            }
-            i = i + 1;
-        }
-    }
-
-    public void fixMeanRate(ClockModelGroup group) {
-        group.setRateTypeOption(FixRateType.FIX_MEAN);
-
-        for (PartitionClockModel model : options.getPartitionClockModels(group)) {
-            model.setEstimatedRate(true); // all set to NOT fixed, because detla exchange
-            model.setRate(group.getFixMeanRate(), false);
-        }
-    }
-
-    public void tipTimeCalibration(ClockModelGroup group) {
-        group.setRateTypeOption(FixRateType.TIP_CALIBRATED);
-
-        for (PartitionClockModel model : options.getPartitionClockModels(group)) {
-            model.setEstimatedRate(true);
-        }
-    }
-
-
-    public void nodeCalibration(ClockModelGroup group) {
-        group.setRateTypeOption(FixRateType.NODE_CALIBRATED);
-
-        for (PartitionClockModel model : options.getPartitionClockModels(group)) {
-            model.setEstimatedRate(true);
-        }
-    }
-
-
-    public void rateCalibration(ClockModelGroup group) {
-        group.setRateTypeOption(FixRateType.RATE_CALIBRATED);
-
-        for (PartitionClockModel model : options.getPartitionClockModels(group)) {
-            model.setEstimatedRate(true);
-        }
-    }
-
-    public String statusMessageClockModel(ClockModelGroup group) {
-        String t;
-        if (group.getRateTypeOption() == FixRateType.RELATIVE_TO) {
-            if (options.getPartitionClockModels(group).size() == 1) { // single partition clock
-                if (options.getPartitionClockModels(group).get(0).isEstimatedRate()) {
-                    t = "Estimate clock rate";
-                } else {
-                    t = "Fix clock rate to " + options.getPartitionClockModels(group).get(0).getRate();
-                }
-
-            } else {
-                // todo is the following code excuted?
-                t = group.getRateTypeOption().toString() + " ";
-                int c = 0;
-                for (PartitionClockModel model : options.getPartitionClockModels(group)) {
-                    if (!model.isEstimatedRate()) {
-                        if (c > 0) t = t + ", ";
-                        c = c + 1;
-                        t = t + model.getName();
-                    }
-                }
-
-                if (c == 0) t = "Estimate all clock rates";
-                if (c == options.getPartitionClockModels(group).size()) t = "Fix all clock rates";
-            }
-
-        } else {
-            t = group.getRateTypeOption().toString();
-        }
-
-        return t + " in " + group.getName();
-    }
-
-    public String statusMessageClockModel() {
-        String t = "";
-        for (ClockModelGroup clockModelGroup : getClockModelGroups()) {
-            t += statusMessageClockModel(clockModelGroup) + "; ";
-        }
-        return t;
-    }
-
-
-    /////////////////////////////////////////////////////////////
-//    public FixRateType getRateOptionClockModel() {
-//        return rateOptionClockModel;
-//    }
-
-//	public void setRateOptionClockModel(FixRateType rateOptionClockModel) {
-//		this.rateOptionClockModel = rateOptionClockModel;
-//	}
-
-//    public void setMeanRelativeRate(double meanRelativeRate) {
-//        this.meanRelativeRate = meanRelativeRate;
-//    }
-
-//    public double calculateAvgBranchLength(List<AbstractPartitionData> partitions) { // todo
-//        double avgBranchLength = 1;
-//
-//        for (PartitionTreeModel tree : options.getPartitionTreeModels(partitions)) {
-//        }
-//        return MathUtils.round(avgBranchLength, 2);
-//    }
-
-
-    public double[] calculateInitialRootHeightAndRate(List<AbstractPartitionData> partitions) {
-        double avgInitialRootHeight = 1;
-        double avgInitialRate = 1;
-        double avgMeanDistance = 1;
-
-//        List<AbstractPartitionData> partitions = options.getDataPartitions(clockModelGroup);
-
-        if (partitions.size() > 0) {
-            avgMeanDistance = options.getAveWeightedMeanDistance(partitions);
-        }
-
-        if (options.getPartitionClockModels(partitions).size() > 0) {
-            avgInitialRate = options.clockModelOptions.getSelectedRate(partitions); // all clock models
-            //todo multi-group?
-            ClockModelGroup clockModelGroup = options.getPartitionClockModels(partitions).get(0).getClockModelGroup();
-
-            switch (clockModelGroup.getRateTypeOption()) {
-                case FIX_MEAN:
-                case RELATIVE_TO:
-                    if (partitions.size() > 0) {
-                        avgInitialRootHeight = avgMeanDistance / avgInitialRate;
-                    }
-                    break;
-
-                case TIP_CALIBRATED:
-                    avgInitialRootHeight = options.maximumTipHeight * 10.0;//TODO
-                    avgInitialRate = avgMeanDistance / avgInitialRootHeight;//TODO
-                    break;
-
-                case NODE_CALIBRATED:
-                    avgInitialRootHeight = getCalibrationEstimateOfRootTime(partitions);
-                    if (avgInitialRootHeight < 0) avgInitialRootHeight = 1; // no leaf nodes
-                    avgInitialRate = avgMeanDistance / avgInitialRootHeight;//TODO
-                    break;
-
-                case RATE_CALIBRATED:
-
-                    break;
-
-                default:
-                    throw new IllegalArgumentException("Unknown fix rate type");
-            }
-        }
-        avgInitialRootHeight = MathUtils.round(avgInitialRootHeight, 2);
-        avgInitialRate = MathUtils.round(avgInitialRate, 2);
-
-        return new double[]{avgInitialRootHeight, avgInitialRate};
-    }
-
-    public double getSelectedRate(List<AbstractPartitionData> partitions) {
-        double selectedRate = 1;
-        double avgInitialRootHeight;
-        double avgMeanDistance = 1;
-        // calibration: all isEstimatedRate = true
-
-//        List<AbstractPartitionData> partitions = options.getDataPartitions(clockModelGroup);
-
-        if (partitions.size() > 0 && options.getPartitionClockModels(partitions).size() > 0) {
-            //todo multi-group?
-            ClockModelGroup clockModelGroup = options.getPartitionClockModels(partitions).get(0).getClockModelGroup();
-            switch (clockModelGroup.getRateTypeOption()) {
-                case FIX_MEAN:
-                    selectedRate = clockModelGroup.getFixMeanRate();
-                    break;
-
-                case RELATIVE_TO:
-                    List<PartitionClockModel> models = options.getPartitionClockModels(partitions);
-                    // fix ?th partition
-                    if (models.size() == 1) {
-                        selectedRate = models.get(0).getRate();
-                    } else {
-                        selectedRate = getAverageRate(models);
-                    }
-                    break;
-
-                case TIP_CALIBRATED:
-                    if (partitions.size() > 0) {
-                        avgMeanDistance = options.getAveWeightedMeanDistance(partitions);
-                    }
-                    avgInitialRootHeight = options.maximumTipHeight * 10.0;//TODO
-                    selectedRate = avgMeanDistance / avgInitialRootHeight;//TODO
-                    break;
-
-                case NODE_CALIBRATED:
-                    if (partitions.size() > 0) {
-                        avgMeanDistance = options.getAveWeightedMeanDistance(partitions);
-                    }
-                    avgInitialRootHeight = getCalibrationEstimateOfRootTime(partitions);
-                    if (avgInitialRootHeight < 0) avgInitialRootHeight = 1; // no leaf nodes
-                    selectedRate = avgMeanDistance / avgInitialRootHeight;//TODO
-                    break;
-
-                case RATE_CALIBRATED:
-                    //TODO
-                    break;
-
-                default:
-                    throw new IllegalArgumentException("Unknown fix rate type");
-            }
-        }
-        return selectedRate;
-    }
-
-//    private List<AbstractPartitionData> getAllPartitionDataGivenClockModels(List<PartitionClockModel> models) {
-//
-//        List<AbstractPartitionData> allData = new ArrayList<AbstractPartitionData>();
-//
-//        for (PartitionClockModel model : models) {
-//            for (AbstractPartitionData partition : model.getDataPartitions()) {
-//                if (partition != null && (!allData.contains(partition))) {
-//                    allData.add(partition);
-//                }
-//            }
-//        }
-//
-//        return allData;
-//    }
-
-    private double getCalibrationEstimateOfRootTime(List<AbstractPartitionData> partitions) {
-
-        // TODO - shouldn't this method be in the PartitionTreeModel??
-
-        List<Taxa> taxonSets = options.taxonSets;
-        if (taxonSets != null && taxonSets.size() > 0) { // tmrca statistic
-
-            // estimated root times based on each of the taxon sets
-            double[] rootTimes = new double[taxonSets.size()];
-
-            for (int i = 0; i < taxonSets.size(); i++) {
-
-                Taxa taxa = taxonSets.get(i);
-
-                Parameter tmrcaStatistic = options.getStatistic(taxa);
-
-                double taxonSetCalibrationTime = tmrcaStatistic.getPriorExpectationMean();
-
-                // the calibration distance is the patristic genetic distance back to the common ancestor of
-                // the set of taxa.
-                double calibrationDistance = 0;
-
-                // the root distance is the patristic genetic distance back to the root of the tree.
-                double rootDistance = 0;
-
-                int siteCount = 0;
-
-                for (AbstractPartitionData partition : partitions) {
-                    if (partition.getDistances() != null) {   // ignore partitions that don't have distances
-                        Tree tree = new UPGMATree(partition.getDistances());
-
-                        Set<String> leafNodes = Taxa.Utils.getTaxonListIdSet(taxa);
-
-                        if (leafNodes.size() < 1) {
-                            return -1;
-                        }
-
-                        NodeRef node = Tree.Utils.getCommonAncestorNode(tree, leafNodes);
-
-                        if (node == null) {
-                            throw new IllegalArgumentException("Can't find MRCA node for taxon set, " + taxa.getId() + ", in partition: " + partition.getName());
-                        }
-
-                        calibrationDistance += tree.getNodeHeight(node);
-                        rootDistance += tree.getNodeHeight(tree.getRoot());
-
-                        siteCount += partition.getSiteCount();
-                    }
-                }
-
-                rootDistance /= partitions.size();
-                calibrationDistance /= partitions.size();
-
-                if (calibrationDistance == 0.0) {
-                    calibrationDistance = 0.25 / siteCount;
-                }
-
-                if (rootDistance == 0) {
-                    rootDistance = 0.25 / siteCount;
-                }
-
-                rootTimes[i] += (rootDistance / calibrationDistance) * taxonSetCalibrationTime;
-            }
-
-            // return the mean estimate of the root time for this set of partitions
-            return DiscreteStatistics.mean(rootTimes);
-
-        } else { // prior on treeModel.rootHight
-            double avgInitialRootHeight = 0;
-            double count = 0;
-            for (PartitionTreeModel tree : options.getPartitionTreeModels(partitions)) {
-                avgInitialRootHeight = avgInitialRootHeight + tree.getInitialRootHeight();
-                count = count + 1;
-            }
-            if (count != 0) avgInitialRootHeight = avgInitialRootHeight / count;
-            return avgInitialRootHeight;
-        }
-
-    }
-
-    // FixRateType.FIX_MEAN
-//    public double getMeanRelativeRate() {
-//        return meanRelativeRate;
-//    }
-
-    // FixRateType.ESTIMATE
-
-    public double getAverageRate(List<PartitionClockModel> models) { //TODO average per tree, but how to control the estimate clock => tree?
-        double averageRate = 0;
-        double count = 0;
-
-        for (PartitionClockModel model : models) {
-            if (!model.isEstimatedRate()) {
-                averageRate = averageRate + model.getRate();
-                count = count + 1;
-            }
-        }
-
-        if (count > 0) {
-            averageRate = averageRate / count;
-        } else {
-            averageRate = 1; //TODO how to calculate rate when estimate all
-        }
-
-        return averageRate;
-    }
-
-    // Calibration Series Data
-    public double getAverageRateForCalibrationSeriesData() {
-        //TODO
-        return (double) 0;
-    }
-
-    // Calibration TMRCA
-    public double getAverageRateForCalibrationTMRCA() {
-        //TODO
-        return (double) 0;
     }
 
+    
     public boolean isTipCalibrated() {
         return options.maximumTipHeight > 0;
     }
 
-    public boolean isRateCalibrated() {
-        return false;//TODO
-    }
-
-    public int[] getPartitionClockWeights(ClockModelGroup group) {
-        int[] weights = new int[options.getPartitionClockModels().size()]; // use List?
-
-        int k = 0;
-        for (PartitionClockModel model : options.getPartitionClockModels()) {
-            for (AbstractPartitionData partition : options.getDataPartitions(model)) {
-                int n = partition.getSiteCount();
-                weights[k] += n;
-            }
-            k += 1;
-        }
-
-        assert (k == weights.length);
-
-        return weights;
-    }
-
-//    public void fixRateOfFirstClockPartition() {
-//        this.rateOptionClockModel = FixRateType.RELATIVE_TO;
-//        // fix rate of 1st partition
-//        int i = 0;
-//        for (PartitionClockModel model : options.getPartitionClockModels()) {
-//            if (i < 1) {
-//                model.setEstimatedRate(false);
-//            } else {
-//                model.setEstimatedRate(true);
-//            }
-//            i = i + 1;
-//        }
-//    }
-//
-//    public void fixMeanRate() {
-//        this.rateOptionClockModel = FixRateType.FIX_MEAN;
-//
-//        for (PartitionClockModel model : options.getPartitionClockModels()) {
-//            model.setEstimatedRate(true); // all set to NOT fixed, because detla exchange
-//        }
-//    }
-//
-//    public void tipTimeCalibration() {
-//        this.rateOptionClockModel = FixRateType.TIP_CALIBRATED;
-//
-//        for (PartitionClockModel model : options.getPartitionClockModels()) {
-//            model.setEstimatedRate(true);
-//        }
-//    }
-//
-//
-//    public void nodeCalibration() {
-//        this.rateOptionClockModel = FixRateType.NODE_CALIBRATED;
-//
-//        for (PartitionClockModel model : options.getPartitionClockModels()) {
-//            model.setEstimatedRate(true);
-//        }
-//    }
-//
-//
-//    public void rateCalibration() {
-//        this.rateOptionClockModel = FixRateType.RATE_CALIBRATED;
-//
-//        for (PartitionClockModel model : options.getPartitionClockModels()) {
-//            model.setEstimatedRate(true);
-//        }
-//    }
-
-//    public String statusMessageClockModel() {
-//        if (rateOptionClockModel == FixRateType.RELATIVE_TO) {
-//            if (options.getPartitionClockModels().size() == 1) { // single partition clock
-//                if (options.getPartitionClockModels().get(0).isEstimatedRate()) {
-//                    return "Estimate clock rate";
-//                } else {
-//                    return "Fix clock rate to " + options.getPartitionClockModels().get(0).getRate();
-//                }
-//
-//            } else {
-//                String t = rateOptionClockModel.toString() + " ";
-//                int c = 0;
-//                for (PartitionClockModel model : options.getPartitionClockModels()) {
-//                    if (!model.isEstimatedRate()) {
-//                        if (c > 0) t = t + ", ";
-//                        c = c + 1;
-//                        t = t + model.getName();
-//                    }
-//                }
-//
-//                if (c == 0) t = "Estimate all clock rates";
-//                if (c == options.getPartitionClockModels().size()) t = "Fix all clock rates";
-//
-//                return t;
-//            }
-//
-//        } else {
-//            return rateOptionClockModel.toString();
-//        }
-//    }
-
-    //+++++++++++++++++++++++ Validation ++++++++++++++++++++++++++++++++
-    // true => valid, false => warning message
-//    public boolean validateFixMeanRate(boolean fixedMeanRateCheck) {
-//        return !(fixedMeanRateCheck && options.getPartitionClockModels().size() < 2);
-//    }
-
-//    public boolean validateRelativeTo() {
-//        for (PartitionClockModel model : options.getPartitionClockModels()) {
-//            if (!model.isEstimatedRate()) { // fixed
-//                return true;
-//            }
-//        }
-//        return false;
-//    }
-
 }
diff --git a/src/dr/app/beauti/options/ModelOptions.java b/src/dr/app/beauti/options/ModelOptions.java
index 2f30e88..89ab1d8 100644
--- a/src/dr/app/beauti/options/ModelOptions.java
+++ b/src/dr/app/beauti/options/ModelOptions.java
@@ -50,11 +50,11 @@ public class ModelOptions implements Serializable {
     protected final Map<TaxonList, Parameter> statistics = new HashMap<TaxonList, Parameter>();
 
     public static final double demoTuning = 0.75;
-    public static final double demoWeights = 30.0;
+    public static final double demoWeights = 3.0;
 
-    protected static final double branchWeights = 300.0;
-    protected static final double treeWeights = 150.0;
-    protected static final double rateWeights = 30.0;
+	protected static final double branchWeights = 30.0;
+	protected static final double treeWeights = 15.0;
+	protected static final double rateWeights = 3.0;
 
     private final List<ComponentOptions> components = new ArrayList<ComponentOptions>();
 
@@ -170,7 +170,15 @@ public class ModelOptions implements Serializable {
     //+++++++++++++++++++ Create Operator ++++++++++++++++++++++++++++++++
     public Operator createOperator(String parameterName, OperatorType type, double tuning, double weight) {
         Parameter parameter = getParameter(parameterName);
-        return new Operator.Builder(parameterName, parameterName, parameter, type, tuning, weight).build(operators);
+        return new Operator.Builder(parameterName, parameterName, parameter, type, tuning, weight)
+                .build(operators);
+    }
+
+    public Operator createOperator(String parameterName, OperatorType type, double tuning, double weight, boolean autoOptimize) {
+        Parameter parameter = getParameter(parameterName);
+        return new Operator.Builder(parameterName, parameterName, parameter, type, tuning, weight)
+                .autoOptimize(autoOptimize)
+                .build(operators);
     }
 
     public Operator createScaleOperator(String parameterName, double tuning, double weight) {
@@ -232,7 +240,7 @@ public class ModelOptions implements Serializable {
     }//TODO a switch like createUpDownOperator?
 
     public Operator createDuplicate(String name, String description, Parameter parameter, Operator source) {
-        return new Operator.Builder(name, description, parameter, source.operatorType, source.tuning, source.weight).build(operators);
+        return new Operator.Builder(name, description, parameter, source.getOperatorType(), source.getTuning(), source.getWeight()).build(operators);
     }
 
 
diff --git a/src/dr/app/beauti/options/Operator.java b/src/dr/app/beauti/options/Operator.java
index 7fb3f0f..169a9a5 100644
--- a/src/dr/app/beauti/options/Operator.java
+++ b/src/dr/app/beauti/options/Operator.java
@@ -43,20 +43,21 @@ public class Operator implements Serializable {
     // final
     private String baseName;
     private final String description;
-    public final OperatorType operatorType;
-    public final Parameter parameter1;
-    public final Parameter parameter2;
+    private final OperatorType operatorType;
+    private final Parameter parameter1;
+    private final Parameter parameter2;
     private final PartitionOptions options;
-    public final String tag;
+    private final String tag;
 
     // editable
-    public double tuning;
-    public double weight;
-    public boolean tuningEdited;
-    public boolean inUse;
-    public String idref;
+    private double tuning;
+    private double weight;
+    private boolean tuningEdited = false;
+    private boolean isUsed = true;
 
-    private ClockModelGroup clockModelGroup = null;
+    private boolean autoOptimize = true;
+
+    private String idref;
 
     public static class Builder {
         // Required para
@@ -74,8 +75,7 @@ public class Operator implements Serializable {
         private String tag = null;
         private String idref = null;
 
-        private boolean inUse = true;
-        private boolean tuningEdited = false;
+        private boolean autoOptimize = true;
 
         public Builder(String name, String description, Parameter parameter, OperatorType type, double tuning, double weight) {
             this.baseName = name;
@@ -86,21 +86,12 @@ public class Operator implements Serializable {
             this.weight = weight;
         }
 
+
         public Builder parameter2(Parameter parameter2) {
             this.parameter2 = parameter2;
             return this;
         }
 
-        public Builder isInUse(boolean inUse) {
-            this.inUse = inUse;
-            return this;
-        }
-
-        public Builder tuningEdited(boolean tuningEdited) {
-            this.tuningEdited = tuningEdited;
-            return this;
-        }
-
         public Builder partitionOptions(PartitionOptions options) {
             this.options = options;
             return this;
@@ -110,11 +101,17 @@ public class Operator implements Serializable {
             this.tag = tag;
             return this;
         }
+
         public Builder idref(String idref) {
             this.idref = idref;
             return this;
         }
 
+        public Builder autoOptimize(boolean autoOptimize) {
+            this.autoOptimize = autoOptimize;
+            return this;
+        }
+
         public Operator build() {
             return new Operator(this);
         }
@@ -137,8 +134,9 @@ public class Operator implements Serializable {
         options = builder.options;
         tag = builder.tag;
         idref = builder.idref;
-        inUse = builder.inUse;
-        tuningEdited = builder.tuningEdited;
+        isUsed = true;
+        tuningEdited = false;
+        autoOptimize = builder.autoOptimize;
     }
 
     //+++++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -160,6 +158,63 @@ public class Operator implements Serializable {
         return tuning > 0;
     }
 
+    public String getTag() {
+        return tag;
+    }
+
+    public String getIdref() {
+        return idref;
+    }
+
+    public double getWeight() {
+        return weight;
+    }
+
+    public void setWeight(double weight) {
+        this.weight = weight;
+    }
+
+    public double getTuning() {
+        return tuning;
+    }
+
+    public void setTuning(double tuning) {
+        this.tuning = tuning;
+        tuningEdited = true;
+    }
+
+    public boolean isTuningEdited() {
+        return tuningEdited;
+    }
+
+    public OperatorType getOperatorType() {
+        return operatorType;
+    }
+
+    public boolean isUsed() {
+        return isParameterFixed() ? false : isUsed;
+    }
+
+    public void setUsed(boolean used) {
+        this.isUsed = used;
+    }
+
+    public boolean isAutoOptimize() {
+        return autoOptimize;
+    }
+
+    public boolean isParameterFixed() {
+        return parameter1.isFixed();
+    }
+
+    public Parameter getParameter1() {
+        return parameter1;
+    }
+
+    public Parameter getParameter2() {
+        return parameter2;
+    }
+
     public void setPrefix(String prefix) {
         this.prefix = prefix;
     }
@@ -188,12 +243,4 @@ public class Operator implements Serializable {
         return baseName;
     }
 
-    public ClockModelGroup getClockModelGroup() {
-        return clockModelGroup;
-    }
-
-    public void setClockModelGroup(ClockModelGroup clockModelGroup) {
-        this.clockModelGroup = clockModelGroup;
-    }
-
 }
diff --git a/src/dr/app/beauti/options/Parameter.java b/src/dr/app/beauti/options/Parameter.java
index 1c87d70..d94279a 100644
--- a/src/dr/app/beauti/options/Parameter.java
+++ b/src/dr/app/beauti/options/Parameter.java
@@ -31,6 +31,8 @@ import dr.app.beauti.types.PriorType;
 import dr.math.distributions.Distribution;
 
 import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.Map;
 
 /**
@@ -51,6 +53,10 @@ public class Parameter implements Serializable {
     private String baseName;
     private final String description;
 
+    private int dimensionWeight = 1;
+
+    private final List<Parameter> subParameters = new ArrayList<Parameter>();
+
     // final Builder para
     public String taxaId; // needs to change TMRCA stat name. Issue 520
     public final boolean isNodeHeight;
@@ -72,9 +78,17 @@ public class Parameter implements Serializable {
     public final boolean isPriorFixed;
     public PriorType priorType;
 
+    public double getInitial() {
+        return initial;
+    }
+
+    public void setInitial(double initial) {
+        this.initial = initial;
+    }
+
     // Editable fields
-    public boolean isFixed;
-    public double initial;
+    private boolean isFixed;
+    private double initial;
     public boolean isTruncated;
     public double truncationUpper;
     public double truncationLower;
@@ -472,7 +486,9 @@ public class Parameter implements Serializable {
         double upper = Double.POSITIVE_INFINITY;
 
         if (isZeroOne) {
-            if (upper > 1) upper = 1.0;
+            if (upper > 1) {
+                upper = 1.0;
+            }
         }
 
         if (priorType == PriorType.UNIFORM_PRIOR) {
@@ -486,6 +502,16 @@ public class Parameter implements Serializable {
         return upper;
     }
 
+    public boolean isFixed() {
+        return priorType == PriorType.NONE_FIXED;
+    }
+
+    public void setFixed(boolean isFixed) {
+        if (isFixed) {
+            priorType = PriorType.NONE_FIXED;
+        }
+    }
+
     public boolean isMeanInRealSpace() {
         return meanInRealSpace;
     }
@@ -495,12 +521,38 @@ public class Parameter implements Serializable {
     }
 
     public int[] getParameterDimensionWeights() {
-        if (getOptions() != null && getOptions() instanceof PartitionSubstitutionModel) {
-            return ((PartitionSubstitutionModel)getOptions()).getPartitionCodonWeights();
+//        if (getOptions() != null && getOptions() instanceof PartitionSubstitutionModel) {
+//            return ((PartitionSubstitutionModel)getOptions()).getPartitionCodonWeights();
+//        }
+        if (getSubParameters().size() > 0) {
+            int[] weights = new int[getSubParameters().size()];
+            for (int i = 0; i < weights.length; i++) {
+                weights[i] = getSubParameters().get(i).getDimensionWeight();
+            }
+            return weights;
         }
-        return new int[] { 1 };
+        return new int[] { dimensionWeight };
+    }
+
+    public int getDimensionWeight() {
+        return dimensionWeight;
+    }
+
+    public void setDimensionWeight(int dimensionWeight) {
+        this.dimensionWeight = dimensionWeight;
     }
 
+    public void addSubParameter(Parameter parameter) {
+        subParameters.add(parameter);
+    }
+
+    public void clearSubParameters() {
+        subParameters.clear();
+    }
+
+    public List<Parameter> getSubParameters() {
+        return subParameters;
+    }
 
     @Override
     public String toString() {
diff --git a/src/dr/app/beauti/options/PartitionClockModel.java b/src/dr/app/beauti/options/PartitionClockModel.java
index a3b1b58..a400e9f 100644
--- a/src/dr/app/beauti/options/PartitionClockModel.java
+++ b/src/dr/app/beauti/options/PartitionClockModel.java
@@ -40,15 +40,13 @@ public class PartitionClockModel extends PartitionOptions {
     private static final long serialVersionUID = -6904595851602060488L;
 
     private static final boolean DEFAULT_CMTC_RATE_REFERENCE_PRIOR = true;
+    private static final boolean USE_DIRICHLET_PRIOR_FOR_MUS = false;
 
     private ClockType clockType = ClockType.STRICT_CLOCK;
     private ClockDistributionType clockDistributionType = ClockDistributionType.LOGNORMAL;
     private boolean continuousQuantile = false;
 
-    private double rate; // move to initModelParametersAndOpererators() to initial
-
-    private ClockModelGroup clockModelGroup = null;
-
+    private final AbstractPartitionData partition;
     private final int dataLength;
 
     public PartitionClockModel(final BeautiOptions options, AbstractPartitionData partition) {
@@ -57,6 +55,7 @@ public class PartitionClockModel extends PartitionOptions {
         this.partitionName = partition.getName();
         dataLength = partition.getSiteCount();
 
+        this.partition = partition;
         initModelParametersAndOpererators();
     }
 
@@ -74,11 +73,10 @@ public class PartitionClockModel extends PartitionOptions {
 
         this.clockType = source.clockType;
         clockDistributionType = source.clockDistributionType;
-        rate = source.rate;
 
         dataLength = source.dataLength;
 
-        clockModelGroup = source.clockModelGroup;
+        this.partition = source.partition;
 
         initModelParametersAndOpererators();
     }
@@ -89,7 +87,7 @@ public class PartitionClockModel extends PartitionOptions {
 //    }
 
     protected void initModelParametersAndOpererators() {
-        rate = 1.0;
+        double rate = 1.0;
 
         if (DEFAULT_CMTC_RATE_REFERENCE_PRIOR || dataLength <= 10) { // TODO Discuss threshold
             new Parameter.Builder("clock.rate", "substitution rate")
@@ -148,6 +146,21 @@ public class PartitionClockModel extends PartitionOptions {
         createScaleOperator(ClockType.LOCAL_CLOCK + ".relativeRates", demoTuning, treeWeights);
         createOperator(ClockType.LOCAL_CLOCK + ".changes", OperatorType.BITFLIP, 1, treeWeights);
         createDiscreteStatistic("rateChanges", "number of random local clocks"); // POISSON_PRIOR
+
+        // A vector of relative rates across all partitions...
+
+        if (USE_DIRICHLET_PRIOR_FOR_MUS) {
+            createNonNegativeParameterDirichletPrior("allMus", "relative rates amongst partitions parameter", this, PriorScaleType.SUBSTITUTION_PARAMETER_SCALE, 1.0);
+            createOperator("scaleMus", "allMus",
+                    "Scale partition rates relative to each other", "allMus",
+                    OperatorType.SCALE_INDEPENDENTLY, 0.75, 3.0);
+        } else {
+            createNonNegativeParameterInfinitePrior("allMus", "relative rates amongst partitions parameter", this, PriorScaleType.SUBSTITUTION_PARAMETER_SCALE, 1.0);
+            createOperator("deltaMus", "allMus",
+                    "Scale partition rates relative to each other maintaining mean", "allMus",
+                    OperatorType.DELTA_EXCHANGE, 0.75, 3.0);
+        }
+
     }
 
     /**
@@ -156,7 +169,8 @@ public class PartitionClockModel extends PartitionOptions {
      * @param params the parameter list
      */
     public void selectParameters(List<Parameter> params) {
-        setAvgRootAndRate();
+//        setAvgRootAndRate();
+        double rate = 1.0;
 
         if (options.hasData()) {
             switch (clockType) {
@@ -212,37 +226,16 @@ public class PartitionClockModel extends PartitionOptions {
                     throw new IllegalArgumentException("Unknown clock model");
             }
 
-            Parameter rateParam = getClockRateParam();
-
-//            if (this.getDataPartitions().get(0) instanceof TraitData) {
-//                rateParam.priorType = PriorType.ONE_OVER_X_PRIOR; // 1/location.clock.rate
-//            }
-            // if not fixed then do mutation rate move and up/down move
-
-//            rateParam.isFixed = !isEstimatedRate;
-            if (rate != rateParam.initial) {
-                rate = rateParam.initial;
-//                rateParam.setPriorEdited(true);
-            }
-//            if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN
-//                     || options.clockModelOptions.getRateOptionClockModel() == FixRateType.RELATIVE_TO) {
-//
-//                rateParam.priorEdited = true; // important
-//            }
-//
-//            if (!rateParam.priorEdited) {
-//                rateParam.initial = selectedRate;
-//            }
-
-            if (!rateParam.isFixed) params.add(rateParam);
+            Parameter rateParam = getClockRateParameter();
+            params.add(rateParam);
         }
     }
 
-    public Parameter getClockRateParam() {
-        return getClockRateParam(clockType, clockDistributionType);
+    public Parameter getClockRateParameter() {
+        return getClockRateParameter(clockType, clockDistributionType);
     }
 
-    private Parameter getClockRateParam(ClockType clockType, ClockDistributionType clockDistributionType) {
+    private Parameter getClockRateParameter(ClockType clockType, ClockDistributionType clockDistributionType) {
         Parameter rateParam = null;
         switch (clockType) {
             case STRICT_CLOCK:
@@ -277,6 +270,15 @@ public class PartitionClockModel extends PartitionOptions {
                 throw new IllegalArgumentException("Unknown clock model");
         }
 
+        if (!rateParam.isPriorEdited()) {
+            if (options.treeModelOptions.isNodeCalibrated(partition.treeModel) < 0
+                    && !options.clockModelOptions.isTipCalibrated()) {
+                rateParam.setFixed(true);
+        } else {
+                rateParam.priorType = PriorType.CTMC_RATE_REFERENCE_PRIOR;
+            }
+        }
+
         return rateParam;
     }
 
@@ -288,95 +290,63 @@ public class PartitionClockModel extends PartitionOptions {
     public void selectOperators(List<Operator> ops) {
         if (options.hasData()) {
 
-            if (clockModelGroup.getRateTypeOption() != FixRateType.FIX_MEAN
-                    && isEstimatedRate()) {
-                switch (clockType) {
-                    case STRICT_CLOCK:
-                        ops.add(getOperator("clock.rate"));
-                        break;
+            switch (clockType) {
+                case STRICT_CLOCK:
+                    ops.add(getOperator("clock.rate"));
+                    break;
 
-                    case RANDOM_LOCAL_CLOCK:
-                        ops.add(getOperator("clock.rate"));
-                        addRandomLocalClockOperators(ops);
-                        break;
+                case RANDOM_LOCAL_CLOCK:
+                    ops.add(getOperator("clock.rate"));
+                    addRandomLocalClockOperators(ops);
+                    break;
 
-                    case FIXED_LOCAL_CLOCK:
-                        ops.add(getOperator("clock.rate"));
-                        for (Taxa taxonSet : options.taxonSets) {
-                            if (options.taxonSetsMono.get(taxonSet)) {
-                                ops.add(getOperator(taxonSet.getId() + ".rate"));
-                            }
+                case FIXED_LOCAL_CLOCK:
+                    ops.add(getOperator("clock.rate"));
+                    for (Taxa taxonSet : options.taxonSets) {
+                        if (options.taxonSetsMono.get(taxonSet)) {
+                            ops.add(getOperator(taxonSet.getId() + ".rate"));
                         }
-                        break;
+                    }
+                    break;
 
-                    case UNCORRELATED:
-                        switch (clockDistributionType) {
-                            case LOGNORMAL:
-                                ops.add(getOperator(ClockType.UCLD_MEAN));
-                                ops.add(getOperator(ClockType.UCLD_STDEV));
-                                break;
-                            case GAMMA:
-                                ops.add(getOperator(ClockType.UCGD_MEAN));
-                                ops.add(getOperator(ClockType.UCGD_SHAPE));
-                                break;
-                            case CAUCHY:
+                case UNCORRELATED:
+                    switch (clockDistributionType) {
+                        case LOGNORMAL:
+                            ops.add(getOperator(ClockType.UCLD_MEAN));
+                            ops.add(getOperator(ClockType.UCLD_STDEV));
+                            break;
+                        case GAMMA:
+                            ops.add(getOperator(ClockType.UCGD_MEAN));
+                            ops.add(getOperator(ClockType.UCGD_SHAPE));
+                            break;
+                        case CAUCHY:
 //                                throw new UnsupportedOperationException("Uncorrelated Couchy clock not implemented yet");
-                                break;
-                            case EXPONENTIAL:
-                                ops.add(getOperator(ClockType.UCED_MEAN));
-                                break;
-                        }
-                        break;
+                            break;
+                        case EXPONENTIAL:
+                            ops.add(getOperator(ClockType.UCED_MEAN));
+                            break;
+                    }
+                    break;
 
-                    case AUTOCORRELATED:
-                        throw new UnsupportedOperationException("Autocorrelated clock not implemented yet");
+                case AUTOCORRELATED:
+                    throw new UnsupportedOperationException("Autocorrelated clock not implemented yet");
 //                        break;
 
-                    default:
-                        throw new IllegalArgumentException("Unknown clock model");
-                }
-            } else {
-                switch (clockType) {
-                    case STRICT_CLOCK:
-                        // no parameter to operator on
-                        break;
-
-                    case UNCORRELATED:
-                        switch (clockDistributionType) {
-                            case LOGNORMAL:
-                                ops.add(getOperator(ClockType.UCLD_STDEV));
-                                break;
-                            case GAMMA:
-                                ops.add(getOperator(ClockType.UCGD_SHAPE));
-                                break;
-                            case CAUCHY:
-//                                throw new UnsupportedOperationException("Uncorrelated Cauchy clock not implemented yet");
-                                break;
-                            case EXPONENTIAL:
-                                break;
-                        }
-                        break;
-
-                    case AUTOCORRELATED:
-                        // no parameter to operator on
-                        break;
+                default:
+                    throw new IllegalArgumentException("Unknown clock model");
+            }
+        }
 
-                    case RANDOM_LOCAL_CLOCK:
-                        addRandomLocalClockOperators(ops);
-                        break;
+        Parameter allMus = getParameter("allMus");
+        if (allMus.getSubParameters().size() > 1) {
+            Operator muOperator;
 
-                    case FIXED_LOCAL_CLOCK:
-                        for (Taxa taxonSet : options.taxonSets) {
-                            if (options.taxonSetsMono.get(taxonSet)) {
-                                ops.add(getOperator(taxonSet.getId() + ".rate"));
-                            }
-                        }
-                        break;
-
-                    default:
-                        throw new IllegalArgumentException("Unknown clock model");
-                }
+            if (USE_DIRICHLET_PRIOR_FOR_MUS) {
+                muOperator = getOperator("scaleMus");
+            } else {
+                muOperator = getOperator("deltaMus");
             }
+            ops.add(muOperator);
         }
     }
 
@@ -411,55 +381,6 @@ public class PartitionClockModel extends PartitionOptions {
         this.continuousQuantile = continuousQuantile;
     }
 
-    // important to set all clock rate rateParam.isFixed same, which keeps isEstimatedRate() correct when change clock type
-    public void setEstimatedRate(boolean isEstimatedRate) {
-//        for (ClockType clockType : new ClockType[]{ClockType.STRICT_CLOCK, ClockType.UNCORRELATED, ClockType.RANDOM_LOCAL_CLOCK}) {
-//            Parameter rateParam = getClockRateParam(clockType, );
-//            rateParam.isFixed = !isEstimatedRate;
-//        }
-        //TODO a trouble to deal with clockDistributionType, when try to set all rate parameters
-        Parameter rateParam = getParameter("clock.rate");
-        rateParam.isFixed = !isEstimatedRate;
-        rateParam = getParameter(ClockType.UCLD_MEAN);
-        rateParam.isFixed = !isEstimatedRate;
-        rateParam = getParameter(ClockType.UCED_MEAN);
-        rateParam.isFixed = !isEstimatedRate;
-    }
-
-    public boolean isEstimatedRate() {
-        Parameter rateParam = getClockRateParam();
-        return !rateParam.isFixed;
-    }
-
-    public void setUseReferencePrior(boolean useReferencePrior) {
-        Parameter rateParam = getClockRateParam();
-        if (useReferencePrior) {
-            rateParam.priorType = PriorType.CTMC_RATE_REFERENCE_PRIOR;
-        } else {
-            rateParam.priorType = PriorType.UNDEFINED;
-        }
-    }
-
-    public double getRate() {
-        return rate;
-    }
-
-    public void setRate(double rate, boolean isUpdatedByUser) {
-        this.rate = rate;
-        Parameter rateParam = getClockRateParam();
-        rateParam.initial = rate;
-        if (isUpdatedByUser) rateParam.setPriorEdited(true);
-    }
-
-    public ClockModelGroup getClockModelGroup() {
-        return clockModelGroup;
-    }
-
-    public void setClockModelGroup(ClockModelGroup clockModelGroup) {
-        options.clearDataPartitionCaches();
-        this.clockModelGroup = clockModelGroup;
-    }
-
     public String getPrefix() {
         String prefix = "";
         if (options.getPartitionClockModels().size() > 1) { //|| options.isSpeciesAnalysis()
diff --git a/src/dr/app/beauti/options/PartitionClockModelTreeModelLink.java b/src/dr/app/beauti/options/PartitionClockModelTreeModelLink.java
index 9b615c9..ee7e106 100644
--- a/src/dr/app/beauti/options/PartitionClockModelTreeModelLink.java
+++ b/src/dr/app/beauti/options/PartitionClockModelTreeModelLink.java
@@ -114,14 +114,15 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
                 "Scales UCGD mean inversely to node heights of the tree", model.getParameter(ClockType.UCGD_MEAN),
                 tree.getParameter("treeModel.allInternalNodeHeights"), OperatorType.UP_DOWN, true, demoTuning, rateWeights);
 
+        // These should not have priors on as there will be priors on the clock model parameters already..
 
         // These are statistics which could have priors on...
         // #meanRate = #Relaxed Clock Model * #Tree Model
-        createNonNegativeStatistic("meanRate", "The mean rate of evolution over the whole tree");
+//        createNonNegativeStatistic("meanRate", "The mean rate of evolution over the whole tree");
         // #covariance = #Relaxed Clock Model * #Tree Model
-        createStatistic("covariance", "The covariance in rates of evolution on each lineage with their ancestral lineages");
+//        createStatistic("covariance", "The covariance in rates of evolution on each lineage with their ancestral lineages");
         // #COEFFICIENT_OF_VARIATION = #Uncorrelated Clock Model
-        createNonNegativeStatistic(RateStatisticParser.COEFFICIENT_OF_VARIATION, "The variation in rate of evolution over the whole tree");
+//        createNonNegativeStatistic(RateStatisticParser.COEFFICIENT_OF_VARIATION, "The variation in rate of evolution over the whole tree");
 
         createUpDownOperator("microsatUpDownRateHeights", "Substitution rate and heights",
                 "Scales substitution rates inversely to node heights of the tree", model.getParameter("clock.rate"),
@@ -134,28 +135,28 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
      * @param params the parameter list
      */
     public void selectParameters(List<Parameter> params) {
-        setAvgRootAndRate();
-        getParameter("branchRates.categories");
-        getParameter("treeModel.rootRate");
-        getParameter("treeModel.nodeRates");
-        getParameter("treeModel.allRates");
-
-        if (options.hasData()) {
-            // if not fixed then do mutation rate move and up/down move
-            boolean fixed = !model.isEstimatedRate();
-
-            Parameter rateParam;
-
-            switch (model.getClockType()) {
-                case AUTOCORRELATED:
-                    rateParam = getParameter("treeModel.rootRate");
-                    rateParam.isFixed = fixed;
-                    if (!fixed) params.add(rateParam);
-
-                    params.add(getParameter("branchRates.var"));
-                    break;
-            }
-        }
+//        setAvgRootAndRate();
+//        getParameter("branchRates.categories");
+//        getParameter("treeModel.rootRate");
+//        getParameter("treeModel.nodeRates");
+//        getParameter("treeModel.allRates");
+//
+//        if (options.hasData()) {
+//            // if not fixed then do mutation rate move and up/down move
+//            boolean fixed = !model.isEstimatedRate();
+//
+//            Parameter rateParam;
+//
+//            switch (model.getClockType()) {
+//                case AUTOCORRELATED:
+//                    rateParam = getParameter("treeModel.rootRate");
+//                    rateParam.isFixed = fixed;
+//                    if (!fixed) params.add(rateParam);
+//
+//                    params.add(getParameter("branchRates.var"));
+//                    break;
+//            }
+//        }
     }
 
     /**
@@ -170,7 +171,6 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
             if (model.getDataType().getType() == DataType.MICRO_SAT) {
                 if (model.getClockType() == ClockType.STRICT_CLOCK) {
                     op = getOperator("microsatUpDownRateHeights");
-                    op.setClockModelGroup(model.getClockModelGroup());
                     ops.add(op);
                 } else {
                     throw new UnsupportedOperationException("Microsatellite only supports strict clock model");
@@ -181,7 +181,6 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
                 switch (model.getClockType()) {
                     case STRICT_CLOCK:
                         op = getOperator("upDownRateHeights");
-                        op.setClockModelGroup(model.getClockModelGroup());
                         ops.add(op);
                         break;
 
@@ -190,13 +189,11 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
 
                             case LOGNORMAL:
                                 op = getOperator("upDownUCLDMeanHeights");
-                                op.setClockModelGroup(model.getClockModelGroup());
                                 ops.add(op);
                                 break;
                             case GAMMA:
 //                                throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet");
                                 op = getOperator("upDownUCGDMeanHeights");
-                                op.setClockModelGroup(model.getClockModelGroup());
                                 ops.add(op);
                             break;
                             case CAUCHY:
@@ -204,7 +201,6 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
 //                            break;
                             case EXPONENTIAL:
                                 op = getOperator("upDownUCEDMeanHeights");
-                                op.setClockModelGroup(model.getClockModelGroup());
                                 ops.add(op);
                                 break;
                         }
@@ -237,7 +233,6 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
                     case RANDOM_LOCAL_CLOCK:
                     case FIXED_LOCAL_CLOCK:
                         op = getOperator("upDownRateHeights");
-                        op.setClockModelGroup(model.getClockModelGroup());
                         ops.add(op);
 
                         break;
@@ -255,27 +250,6 @@ public class PartitionClockModelTreeModelLink extends PartitionOptions {
      * @param params the parameter list
      */
     public void selectStatistics(List<Parameter> params) {
-
-//        if (options.taxonSets != null) {
-//            for (Taxa taxonSet : options.taxonSets) {
-//                Parameter statistic = statistics.get(taxonSet);
-//                if (statistic == null) {
-//                    statistic = new Parameter(taxonSet, "tMRCA for taxon set ");
-//                    statistics.put(taxonSet, statistic);
-//                }
-//                params.add(statistic);
-//            }
-//        } else {
-//            System.err.println("TaxonSets are null");
-//        }
-
-        // Statistics
-        if (model.getClockType() != ClockType.STRICT_CLOCK) {
-            params.add(getParameter("meanRate"));
-            params.add(getParameter("covariance"));
-            params.add(getParameter(RateStatisticParser.COEFFICIENT_OF_VARIATION));
-        }
-
     }
 
     /////////////////////////////////////////////////////////////
diff --git a/src/dr/app/beauti/options/PartitionData.java b/src/dr/app/beauti/options/PartitionData.java
index fe11948..582e3dc 100644
--- a/src/dr/app/beauti/options/PartitionData.java
+++ b/src/dr/app/beauti/options/PartitionData.java
@@ -66,7 +66,10 @@ public class PartitionData extends AbstractPartitionData {
         if (alignment != null) {
             patterns = new Patterns(alignment);
         }
-        calculateMeanDistance(patterns);
+
+        // This is too slow to be done at data loading.
+        // calculateMeanDistance(patterns);
+        calculateMeanDistance(null);
     }
 
     public PartitionData(BeautiOptions options, String name, String fileName, List<TraitData> traits) {
diff --git a/src/dr/app/beauti/options/PartitionOptions.java b/src/dr/app/beauti/options/PartitionOptions.java
index 10f83b7..f27a891 100644
--- a/src/dr/app/beauti/options/PartitionOptions.java
+++ b/src/dr/app/beauti/options/PartitionOptions.java
@@ -43,7 +43,7 @@ public abstract class PartitionOptions extends ModelOptions {
     protected String partitionName;
     protected final BeautiOptions options;
 
-    protected double[] avgRootAndRate = new double[]{1.0, 1.0};
+//    protected double[] avgRootAndRate = new double[]{1.0, 1.0};
 
     public PartitionOptions(BeautiOptions options) {
         this.options = options;
@@ -64,45 +64,9 @@ public abstract class PartitionOptions extends ModelOptions {
 
     public abstract String getPrefix();
 
-//    protected void createParameterClockRateUndefinedPrior(PartitionOptions options, String name, String description, PriorScaleType scaleType,
-//                                                          double initial, double truncationLower, double truncationUpper) { // it will change to Uniform
-//        new Parameter.Builder(name, description).scaleType(scaleType).prior(PriorType.UNDEFINED).initial(initial)
-//                .isCMTCRate(true).isNonNegative(true)
-//                .truncationLower(truncationLower).truncationUpper(truncationUpper).partitionOptions(options).build(parameters);
-//    }
-//
-//    protected void createParameterClockRateReferencePrior(PartitionOptions options, String name, String description, PriorScaleType scaleType,
-//                                                          double initial) { // it will change to Uniform
-//        new Parameter.Builder(name, description).scaleType(scaleType).prior(PriorType.CTMC_RATE_REFERENCE_PRIOR).initial(initial)
-//                .isCMTCRate(true).isNonNegative(true)
-//                .truncationLower(truncationLower).truncationUpper(truncationUpper).partitionOptions(options).build(parameters);
-//    }
-//
-//    protected void createParameterClockRateUniform(PartitionOptions options, String name, String description, PriorScaleType scaleType,
-//                                                   double initial, double truncationLower, double truncationUpper) {
-//        new Parameter.Builder(name, description).scaleType(scaleType).prior(PriorType.UNIFORM_PRIOR).initial(initial)
-//                .isCMTCRate(true).isNonNegative(true)
-//                .truncationLower(truncationLower).truncationUpper(truncationUpper).partitionOptions(options).build(parameters);
-//    }
-//
-//    protected void createParameterClockRateGamma(PartitionOptions options, String name, String description, PriorScaleType scaleType,
-//                                                 double initial, double shape, double scale) {
-//        new Parameter.Builder(name, description).scaleType(scaleType).prior(PriorType.GAMMA_PRIOR).initial(initial)
-//                .isCMTCRate(true).isNonNegative(true)
-//                .shape(shape).scale(scale).partitionOptions(options).build(parameters);
-//    }
-//
-//    public void createParameterClockRateExponential(PartitionOptions options, String name, String description, PriorScaleType scaleType,
-//                                                    double initial, double mean, double offset) {
-//        new Parameter.Builder(name, description).scaleType(scaleType).prior(PriorType.EXPONENTIAL_PRIOR)
-//                .isCMTCRate(true).isNonNegative(true)
-//                .initial(initial).mean(mean).offset(offset).partitionOptions(options).build(parameters);
-//    }
-//
-//
-    protected void createParameterTree(PartitionOptions options, String name, String description, boolean isNodeHeight, double value) {
+    protected void createParameterTree(PartitionOptions options, String name, String description, boolean isNodeHeight) {
         new Parameter.Builder(name, description).isNodeHeight(isNodeHeight).scaleType(PriorScaleType.TIME_SCALE)
-                .isNonNegative(true).initial(value).partitionOptions(options).build(parameters);
+                .isNonNegative(true).initial(Double.NaN).partitionOptions(options).build(parameters);
     }
 
     public Parameter getParameter(String name) {
@@ -115,7 +79,7 @@ public abstract class PartitionOptions extends ModelOptions {
 
         parameter.setPrefix(getPrefix());
 
-        autoScale(parameter); // not include clock rate, and treeModel.rootHeight
+//        autoScale(parameter); // not include clock rate, and treeModel.rootHeight
 
         return parameter;
     }
@@ -155,141 +119,143 @@ public abstract class PartitionOptions extends ModelOptions {
         return options.getDataPartitions(this).get(0).getDataType();
     }
 
-    public double[] getAvgRootAndRate() {
-        return avgRootAndRate;
-    }
-
-    public void setAvgRootAndRate() {
-        this.avgRootAndRate = options.clockModelOptions.calculateInitialRootHeightAndRate(options.getDataPartitions(this));
-    }
+//    public double[] getAvgRootAndRate() {
+//        return avgRootAndRate;
+//    }
+//
+//    public void setAvgRootAndRate() {
+//        this.avgRootAndRate = options.clockModelOptions.calculateInitialRootHeightAndRate(options.getDataPartitions(this));
+//    }
 
     protected void autoScale(Parameter param) {
-        double avgInitialRootHeight = avgRootAndRate[0];
-        double avgInitialRate = avgRootAndRate[1];
-
-//        double growthRateMaximum = 1E6;
-        double birthRateMaximum = 1E6;
-//        double substitutionRateMaximum = 100;
-//        double logStdevMaximum = 10;
-//        double substitutionParameterMaximum = 100;
-
-//        if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN
-//                || options.clockModelOptions.getRateOptionClockModel() == FixRateType.RELATIVE_TO) {
+////        double avgInitialRootHeight = avgRootAndRate[0];
+////        double avgInitialRate = avgRootAndRate[1];
+//        double avgInitialRootHeight = 1.0;
+//        double avgInitialRate = 0.1;
 //
-//            growthRateMaximum = 1E6 * avgInitialRate;
-        birthRateMaximum = 1E6 * avgInitialRate;
-//        }
-
-//        if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN) {
-//            double rate = options.clockModelOptions.getMeanRelativeRate();
+////        double growthRateMaximum = 1E6;
+//        double birthRateMaximum = 1E6;
+////        double substitutionRateMaximum = 100;
+////        double logStdevMaximum = 10;
+////        double substitutionParameterMaximum = 100;
 //
-//            growthRateMaximum = 1E6 * rate;
-//            birthRateMaximum = 1E6 * rate;
+////        if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN
+////                || options.clockModelOptions.getRateOptionClockModel() == FixRateType.RELATIVE_TO) {
+////
+////            growthRateMaximum = 1E6 * avgInitialRate;
+//        birthRateMaximum = 1E6 * avgInitialRate;
+////        }
 //
-//            if (options.hasData()) {
-//                initialRootHeight = meanDistance / rate;
+////        if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN) {
+////            double rate = options.clockModelOptions.getMeanRelativeRate();
+////
+////            growthRateMaximum = 1E6 * rate;
+////            birthRateMaximum = 1E6 * rate;
+////
+////            if (options.hasData()) {
+////                initialRootHeight = meanDistance / rate;
+////
+////                initialRootHeight = round(initialRootHeight, 2);
+////            }
+////
+////        } else {
+////            if (options.maximumTipHeight > 0) {
+////                initialRootHeight = options.maximumTipHeight * 10.0;
+////            }
+////
+////            initialRate = round((meanDistance * 0.2) / initialRootHeight, 2);
+////        }
 //
-//                initialRootHeight = round(initialRootHeight, 2);
-//            }
+////        double timeScaleMaximum = MathUtils.round(avgInitialRootHeight * 1000.0, 2);
 //
-//        } else {
-//            if (options.maximumTipHeight > 0) {
-//                initialRootHeight = options.maximumTipHeight * 10.0;
-//            }
 //
-//            initialRate = round((meanDistance * 0.2) / initialRootHeight, 2);
-//        }
-
-//        double timeScaleMaximum = MathUtils.round(avgInitialRootHeight * 1000.0, 2);
-
-
-//        if (!options.hasData()) param.setPriorEdited(false);
-
-        if (!param.isPriorEdited()) {
-            switch (param.scaleType) {
-                case TIME_SCALE:
-//                        param.lower = Math.max(0.0, param.lower);
-//                        param.upper = Math.min(timeScaleMaximum, param.upper);
-//                    if (param.isNodeHeight) { //TODO only affecting "treeModel.rootHeight", need to review
-//                        param.lower = options.maximumTipHeight;
-////                    param.upper = timeScaleMaximum;
-////                    param.initial = avgInitialRootHeight;
-//                            if (param.getOptions() instanceof PartitionTreeModel) { // move to PartitionTreeModel
-//                                param.initial = ((PartitionTreeModel) param.getOptions()).getInitialRootHeight();
-//                            }
+////        if (!options.hasData()) param.setPriorEdited(false);
+//
+//        if (!param.isPriorEdited()) {
+//            switch (param.scaleType) {
+//                case TIME_SCALE:
+////                        param.lower = Math.max(0.0, param.lower);
+////                        param.upper = Math.min(timeScaleMaximum, param.upper);
+////                    if (param.isNodeHeight) { //TODO only affecting "treeModel.rootHeight", need to review
+////                        param.lower = options.maximumTipHeight;
+//////                    param.upper = timeScaleMaximum;
+//////                    param.initial = avgInitialRootHeight;
+////                            if (param.getOptions() instanceof PartitionTreeModel) { // move to PartitionTreeModel
+////                                param.initial = ((PartitionTreeModel) param.getOptions()).getInitialRootHeight();
+////                            }
+////                    } else {
+//                    param.initial = avgInitialRootHeight;
+////                    }
+//
+//                    break;
+//                case LOG_TIME_SCALE:
+//                    param.initial = Math.log(avgInitialRootHeight);
+//                    break;
+//
+//                case T50_SCALE:
+////                        param.lower = Math.max(0.0, param.lower);
+//                    //param.upper = Math.min(timeScaleMaximum, param.upper);
+//                    param.initial = avgInitialRootHeight / 5.0;
+//                    break;
+//
+//                case GROWTH_RATE_SCALE:
+//                    param.initial = avgInitialRootHeight / 1000;
+//                    // use Laplace
+//                    if (param.getBaseName().startsWith("logistic")) {
+//                        param.scale = Math.log(1000) / avgInitialRootHeight;
+////                            System.out.println("logistic");
 //                    } else {
-                    param.initial = avgInitialRootHeight;
+//                        param.scale = Math.log(10000) / avgInitialRootHeight;
+////                            System.out.println("not logistic");
 //                    }
-
-                    break;
-                case LOG_TIME_SCALE:
-                    param.initial = Math.log(avgInitialRootHeight);
-                    break;
-
-                case T50_SCALE:
-//                        param.lower = Math.max(0.0, param.lower);
-                    //param.upper = Math.min(timeScaleMaximum, param.upper);
-                    param.initial = avgInitialRootHeight / 5.0;
-                    break;
-
-                case GROWTH_RATE_SCALE:
-                    param.initial = avgInitialRootHeight / 1000;
-                    // use Laplace
-                    if (param.getBaseName().startsWith("logistic")) {
-                        param.scale = Math.log(1000) / avgInitialRootHeight;
-//                            System.out.println("logistic");
-                    } else {
-                        param.scale = Math.log(10000) / avgInitialRootHeight;
-//                            System.out.println("not logistic");
-                    }
-                    break;
-
-                case BIRTH_RATE_SCALE:
-//                    param.uniformLower = Math.max(0.0, param.lower);
-//                    param.uniformUpper = Math.min(birthRateMaximum, param.upper);
-                    param.initial = MathUtils.round(1 / options.treeModelOptions.getExpectedAvgBranchLength(avgInitialRootHeight), 2);
-                    break;
-                case ORIGIN_SCALE:
-                    param.initial = MathUtils.round(avgInitialRootHeight * 1.1, 2);
-                    break;
-
-                case SUBSTITUTION_RATE_SCALE:
-//                        param.lower = Math.max(0.0, param.lower);
-                    //param.upper = Math.min(substitutionRateMaximum, param.upper);
-                    param.initial = avgInitialRate;
-                    break;
-
-                case LOG_STDEV_SCALE:
-//                        param.lower = Math.max(0.0, param.lower);
-                    //param.upper = Math.min(logStdevMaximum, param.upper);
-                    break;
-
-                case SUBSTITUTION_PARAMETER_SCALE:
-//                        param.lower = Math.max(0.0, param.lower);
-                    //param.upper = Math.min(substitutionParameterMaximum, param.upper);
-                    break;
-
-                // Now have a field 'isZeroOne'
-//                case UNITY_SCALE:
-//                    param.lower = 0.0;
-//                    param.upper = 1.0;
 //                    break;
-
-                case ROOT_RATE_SCALE:
-                    param.initial = avgInitialRate;
-                    param.shape = 0.5;
-                    param.scale = param.initial / 0.5;
-                    break;
-
-                case LOG_VAR_SCALE:
-                    param.initial = avgInitialRate;
-                    param.shape = 2.0;
-                    param.scale = param.initial / 2.0;
-                    break;
-
-            }
-
-        }
+//
+//                case BIRTH_RATE_SCALE:
+////                    param.uniformLower = Math.max(0.0, param.lower);
+////                    param.uniformUpper = Math.min(birthRateMaximum, param.upper);
+//                    param.initial = MathUtils.round(1 / options.treeModelOptions.getExpectedAvgBranchLength(avgInitialRootHeight), 2);
+//                    break;
+//                case ORIGIN_SCALE:
+//                    param.initial = MathUtils.round(avgInitialRootHeight * 1.1, 2);
+//                    break;
+//
+//                case SUBSTITUTION_RATE_SCALE:
+////                        param.lower = Math.max(0.0, param.lower);
+//                    //param.upper = Math.min(substitutionRateMaximum, param.upper);
+//                    param.initial = avgInitialRate;
+//                    break;
+//
+//                case LOG_STDEV_SCALE:
+////                        param.lower = Math.max(0.0, param.lower);
+//                    //param.upper = Math.min(logStdevMaximum, param.upper);
+//                    break;
+//
+//                case SUBSTITUTION_PARAMETER_SCALE:
+////                        param.lower = Math.max(0.0, param.lower);
+//                    //param.upper = Math.min(substitutionParameterMaximum, param.upper);
+//                    break;
+//
+//                // Now have a field 'isZeroOne'
+////                case UNITY_SCALE:
+////                    param.lower = 0.0;
+////                    param.upper = 1.0;
+////                    break;
+//
+//                case ROOT_RATE_SCALE:
+//                    param.initial = avgInitialRate;
+//                    param.shape = 0.5;
+//                    param.scale = param.initial / 0.5;
+//                    break;
+//
+//                case LOG_VAR_SCALE:
+//                    param.initial = avgInitialRate;
+//                    param.shape = 2.0;
+//                    param.scale = param.initial / 2.0;
+//                    break;
+//
+//            }
+//
+//        }
     }
 
     public BeautiOptions getOptions() {
diff --git a/src/dr/app/beauti/options/PartitionSubstitutionModel.java b/src/dr/app/beauti/options/PartitionSubstitutionModel.java
index b796aef..dbb03bd 100644
--- a/src/dr/app/beauti/options/PartitionSubstitutionModel.java
+++ b/src/dr/app/beauti/options/PartitionSubstitutionModel.java
@@ -35,6 +35,7 @@ import dr.evolution.datatype.Nucleotides;
 import dr.evomodel.substmodel.AminoAcidModelType;
 import dr.evomodel.substmodel.NucModelType;
 
+import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -47,8 +48,6 @@ import java.util.Set;
 public class PartitionSubstitutionModel extends PartitionOptions {
     private static final long serialVersionUID = -2570346396317131108L;
 
-    private final static boolean USE_DIRICHLET_PRIOR_FOR_MUS = false;
-
     // Instance variables
 
     public static final String[] GTR_RATE_NAMES = {"ac", "ag", "at", "cg", "gt"};
@@ -254,20 +253,6 @@ public class PartitionSubstitutionModel extends PartitionOptions {
         createNonNegativeParameterInfinitePrior("CP3.mu", "relative rate parameter for codon position 3",
                 PriorScaleType.SUBSTITUTION_PARAMETER_SCALE, 1.0);
 
-        // A vector of relative rates across all partitions...
-
-        if (USE_DIRICHLET_PRIOR_FOR_MUS) {
-            createNonNegativeParameterDirichletPrior("allMus", "relative rates amongst partitions parameter", this, PriorScaleType.SUBSTITUTION_PARAMETER_SCALE, 1.0);
-            createOperator("scaleMus", RelativeRatesType.MU_RELATIVE_RATES.toString(),
-                    "Scale codon position rates relative to each other", "allMus",
-                    OperatorType.SCALE_INDEPENDENTLY, 0.75, 3.0);
-        } else {
-            createNonNegativeParameterInfinitePrior("allMus", "relative rates amongst partitions parameter", this, PriorScaleType.SUBSTITUTION_PARAMETER_SCALE, 1.0);
-            createOperator("deltaMus", RelativeRatesType.MU_RELATIVE_RATES.toString(),
-                    "Scale codon position rates relative to each other maintaining mean", "allMus",
-                    OperatorType.DELTA_EXCHANGE, 0.75, 3.0);
-        }
-
         createScaleOperator("kappa", demoTuning, substWeights);
         createScaleOperator("CP1.kappa", demoTuning, substWeights);
         createScaleOperator("CP2.kappa", demoTuning, substWeights);
@@ -341,9 +326,8 @@ public class PartitionSubstitutionModel extends PartitionOptions {
     ////////////////////////////////////////////////////////////////
 
     public void selectParameters(List<Parameter> params) {
-        setAvgRootAndRate();
+//        setAvgRootAndRate();
         boolean includeRelativeRates = getCodonPartitionCount() > 1;//TODO check
-
         switch (getDataType().getType()) {
             case DataType.NUCLEOTIDES:
                 if (includeRelativeRates && unlinkedSubstitutionModel) {
@@ -535,11 +519,41 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                 params.add(getParameter("pInv"));
             }
         }
+    }
 
-        if (includeRelativeRates) {
-            params.add(getParameter("allMus"));
-//                    params.add(getParameter("mu"));
+    public List<Parameter> getRelativeRateParameters() {
+        List<Parameter> allMus = new ArrayList<Parameter>();
+        int[] weights = getPartitionCodonWeights();
+        if (getCodonPartitionCount() > 1) {
+            if (codonHeteroPattern.equals("123")) {
+                Parameter parameter = getParameter("CP1.mu");
+                parameter.setDimensionWeight(weights[0]);
+                allMus.add(parameter);
+
+                parameter = getParameter("CP2.mu");
+                parameter.setDimensionWeight(weights[1]);
+                allMus.add(parameter);
+
+                parameter = getParameter("CP3.mu");
+                parameter.setDimensionWeight(weights[2]);
+                allMus.add(parameter);
+            } else if (codonHeteroPattern.equals("112")) {
+                Parameter parameter = getParameter("CP1+2.mu");
+                parameter.setDimensionWeight(weights[0]);
+                allMus.add(parameter);
+
+                parameter = getParameter("CP3.mu");
+                parameter.setDimensionWeight(weights[1]);
+                allMus.add(parameter);
+            } else {
+                throw new IllegalArgumentException("codonHeteroPattern must be one of '111', '112' or '123'");
+            }
+        } else {
+            Parameter mu = getParameter("mu");
+            mu.setDimensionWeight(weights[0]);
+            allMus.add(mu);
         }
+        return allMus;
     }
 
     private void addFrequencyParams(List<Parameter> params, boolean includeRelativeRates) {
@@ -563,12 +577,11 @@ public class PartitionSubstitutionModel extends PartitionOptions {
     }
 
     public void selectOperators(List<Operator> ops) {
-        boolean includeRelativeRates = getCodonPartitionCount() > 1;//TODO check
 
         switch (getDataType().getType()) {
             case DataType.NUCLEOTIDES:
 
-                if (includeRelativeRates && unlinkedSubstitutionModel) {
+                if (hasCodonPartitions() && unlinkedSubstitutionModel) {
                     if (codonHeteroPattern.equals("123")) {
                         switch (nucSubstitutionModel) {
                             case JC:
@@ -658,7 +671,7 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                 }
 
                 // only AMINO_ACIDS not addFrequency
-                addFrequencyOps(ops, includeRelativeRates);
+                addFrequencyOps(ops);
                 break;
 
             case DataType.AMINO_ACIDS:
@@ -682,7 +695,7 @@ public class PartitionSubstitutionModel extends PartitionOptions {
                 }
 
                 // only AMINO_ACIDS not addFrequency
-                addFrequencyOps(ops, includeRelativeRates);
+                addFrequencyOps(ops);
                 break;
 
             case DataType.GENERAL:
@@ -723,7 +736,7 @@ public class PartitionSubstitutionModel extends PartitionOptions {
 
         // if gamma do shape move
         if (gammaHetero) {
-            if (includeRelativeRates && unlinkedHeterogeneityModel) {
+            if (hasCodonPartitions() && unlinkedHeterogeneityModel) {
                 if (codonHeteroPattern.equals("123")) {
                     ops.add(getOperator("CP1.alpha"));
                     ops.add(getOperator("CP2.alpha"));
@@ -740,7 +753,7 @@ public class PartitionSubstitutionModel extends PartitionOptions {
         }
         // if pinv do pinv move
         if (invarHetero) {
-            if (includeRelativeRates && unlinkedHeterogeneityModel) {
+            if (hasCodonPartitions() && unlinkedHeterogeneityModel) {
                 if (codonHeteroPattern.equals("123")) {
                     ops.add(getOperator("CP1.pInv"));
                     ops.add(getOperator("CP2.pInv"));
@@ -756,21 +769,11 @@ public class PartitionSubstitutionModel extends PartitionOptions {
             }
         }
 
-        if (includeRelativeRates) {
-            Operator muOperator;
-
-            if (USE_DIRICHLET_PRIOR_FOR_MUS) {
-                muOperator = getOperator("scaleMus");
-            } else {
-                muOperator = getOperator("deltaMus");
-            }
-            ops.add(muOperator);
-        }
     }
 
-    private void addFrequencyOps(List<Operator> ops, boolean includeRelativeRates) {
+    private void addFrequencyOps(List<Operator> ops) {
         if (frequencyPolicy == FrequencyPolicyType.ESTIMATED) {
-            if (includeRelativeRates && unlinkedSubstitutionModel && unlinkedFrequencyModel) {
+            if (hasCodonPartitions() && unlinkedSubstitutionModel && unlinkedFrequencyModel) {
                 if (codonHeteroPattern.equals("123")) {
                     ops.add(getOperator("CP1.frequencies"));
                     ops.add(getOperator("CP2.frequencies"));
@@ -791,7 +794,7 @@ public class PartitionSubstitutionModel extends PartitionOptions {
      * @return true either if the options have more than one partition or any partition is
      *         broken into codon positions.
      */
-    public boolean hasCodon() {
+    public boolean hasCodonPartitions() {
         return getCodonPartitionCount() > 1;
     }
 
diff --git a/src/dr/app/beauti/options/PartitionTreeModel.java b/src/dr/app/beauti/options/PartitionTreeModel.java
index c176476..83c4478 100644
--- a/src/dr/app/beauti/options/PartitionTreeModel.java
+++ b/src/dr/app/beauti/options/PartitionTreeModel.java
@@ -46,13 +46,15 @@ public class PartitionTreeModel extends PartitionOptions {
     private Tree userStartingTree = null;
 
     private boolean isNewick = true;
-    private boolean fixedTree = false;
-//    private double initialRootHeight = 1.0;
 
     //TODO if use EBSP and *BEAST, validate Ploidy of every PD is same for each tree that the PD(s) belongs to
     // BeastGenerator.checkOptions()
     private PloidyType ploidyType = PloidyType.AUTOSOMAL_NUCLEAR;
 
+    private boolean hasTipCalibrations = false;
+    private boolean hasNodeCalibrations = false;
+
+
     public PartitionTreeModel(BeautiOptions options, AbstractPartitionData partition) {
         super(options, partition.getName());
     }
@@ -81,7 +83,7 @@ public class PartitionTreeModel extends PartitionOptions {
         createParameter("tree", "The tree");
         createParameter("treeModel.internalNodeHeights", "internal node heights of the tree (except the root)");
         createParameter("treeModel.allInternalNodeHeights", "internal node heights of the tree");
-        createParameterTree(this, "treeModel.rootHeight", "root height of the tree", true, 1.0);
+        createParameterTree(this, "treeModel.rootHeight", "root height of the tree", true);
 
         //TODO treeBitMove should move to PartitionClockModelTreeModelLink, after Alexei finish
         createOperator("treeBitMove", "Tree", "Swaps the rates and change locations of local clocks", "tree",
@@ -101,7 +103,7 @@ public class PartitionTreeModel extends PartitionOptions {
                 OperatorType.WILSON_BALDING, -1, demoWeights);
 
         createOperator("subtreeLeap", "Tree", "Performs the subtree-leap rearrangement of the tree", "tree",
-                OperatorType.SUBTREE_LEAP, 1.0, options.taxonList.getTaxonCount());
+                OperatorType.SUBTREE_LEAP, 1.0, options.taxonList.getTaxonCount() < treeWeights ? treeWeights : options.taxonList.getTaxonCount());
 
     }
 
@@ -111,7 +113,7 @@ public class PartitionTreeModel extends PartitionOptions {
      * @param parameters the parameter list
      */
     public void selectParameters(List<Parameter> parameters) {
-        setAvgRootAndRate();
+//        setAvgRootAndRate();
 
         getParameter("tree");
         getParameter("treeModel.internalNodeHeights");
@@ -119,7 +121,7 @@ public class PartitionTreeModel extends PartitionOptions {
 
         Parameter rootHeightParameter = getParameter("treeModel.rootHeight");
         if (rootHeightParameter.priorType == PriorType.NONE_TREE_PRIOR || !rootHeightParameter.isPriorEdited()) {
-            rootHeightParameter.initial = getInitialRootHeight();
+            rootHeightParameter.setInitial(getInitialRootHeight());
             rootHeightParameter.truncationLower = options.maximumTipHeight;
             rootHeightParameter.uniformLower = options.maximumTipHeight;
             rootHeightParameter.isTruncated = true;
@@ -139,11 +141,15 @@ public class PartitionTreeModel extends PartitionOptions {
      * @param operators the operator list
      */
     public void selectOperators(List<Operator> operators) {
-        setAvgRootAndRate();
+//        setAvgRootAndRate();
 
         Operator subtreeSlideOp = getOperator("subtreeSlide");
-        if (!subtreeSlideOp.tuningEdited) {
-            subtreeSlideOp.tuning = getInitialRootHeight() / 10.0;
+        if (!subtreeSlideOp.isTuningEdited()) {
+            double tuning = 1.0;
+            if (!Double.isNaN(getInitialRootHeight()) && !Double.isInfinite(getInitialRootHeight())) {
+                tuning = getInitialRootHeight() / 10.0;
+            }
+            subtreeSlideOp.setTuning(tuning);
         }
 
         operators.add(subtreeSlideOp);
@@ -177,15 +183,15 @@ public class PartitionTreeModel extends PartitionOptions {
             throw new IllegalArgumentException("Unknown operator set type");
         }
 
-        getOperator("subtreeSlide").inUse = defaultInUse;
-        getOperator("narrowExchange").inUse = defaultInUse;
-        getOperator("wideExchange").inUse = defaultInUse;
-        getOperator("wilsonBalding").inUse = defaultInUse;
+        getOperator("subtreeSlide").setUsed(defaultInUse);
+        getOperator("narrowExchange").setUsed(defaultInUse);
+        getOperator("wideExchange").setUsed(defaultInUse);
+        getOperator("wilsonBalding").setUsed(defaultInUse);
 
-        getOperator("treeModel.rootHeight").inUse = branchesInUse;
-        getOperator("uniformHeights").inUse = branchesInUse;
+        getOperator("treeModel.rootHeight").setUsed(branchesInUse);
+        getOperator("uniformHeights").setUsed(branchesInUse);
 
-        getOperator("subtreeLeap").inUse = newMixInUse;
+        getOperator("subtreeLeap").setUsed(newMixInUse);
     }
 
     /////////////////////////////////////////////////////////////
@@ -219,10 +225,19 @@ public class PartitionTreeModel extends PartitionOptions {
         return isNewick;
     }
 
-    public void setNewick(boolean newick) {
-        isNewick = newick;
+    public void setNewick(boolean isNewick) {
+        this.isNewick = isNewick;
+    }
+
+    public void setTipCalibrations(boolean hasTipCalibrations) {
+        this.hasTipCalibrations = hasTipCalibrations;
     }
 
+    public void setNodeCalibrations(boolean hasNodeCalibrations) {
+        this.hasNodeCalibrations = hasNodeCalibrations;
+    }
+
+
     public void setPloidyType(PloidyType ploidyType) {
         this.ploidyType = ploidyType;
     }
@@ -232,7 +247,8 @@ public class PartitionTreeModel extends PartitionOptions {
     }
 
     public double getInitialRootHeight() {
-        return getAvgRootAndRate()[0];
+        return Double.NaN;
+//        return getAvgRootAndRate()[0];
     }
 
 //    public void setInitialRootHeight(double initialRootHeight) {
diff --git a/src/dr/app/beauti/options/PartitionTreePrior.java b/src/dr/app/beauti/options/PartitionTreePrior.java
index 8b29ec8..cd2bde2 100644
--- a/src/dr/app/beauti/options/PartitionTreePrior.java
+++ b/src/dr/app/beauti/options/PartitionTreePrior.java
@@ -253,7 +253,7 @@ public class PartitionTreePrior extends PartitionOptions {
      * @param params the parameter list
      */
     public void selectParameters(List<Parameter> params) {
-        setAvgRootAndRate();
+//        setAvgRootAndRate();
 
         if (nodeHeightPrior == TreePriorType.CONSTANT) {
             params.add(getParameter("constant.popSize"));
@@ -306,7 +306,9 @@ public class PartitionTreePrior extends PartitionOptions {
                     + BirthDeathSerialSamplingModelParser.RELATIVE_MU));
             Parameter psi = getParameter(BirthDeathSerialSamplingModelParser.BDSS + "."
                     + BirthDeathSerialSamplingModelParser.PSI);
-            if (options.maximumTipHeight > 0) psi.initial = MathUtils.round(1 / options.maximumTipHeight, 4);
+            if (options.maximumTipHeight > 0) {
+                psi.setInitial(MathUtils.round(1 / options.maximumTipHeight, 4));
+            }
             params.add(psi);
             params.add(getParameter(BirthDeathSerialSamplingModelParser.BDSS + "."
                     + BirthDeathSerialSamplingModelParser.ORIGIN));
diff --git a/src/dr/app/beauti/options/TraitGuesser.java b/src/dr/app/beauti/options/TraitGuesser.java
index e7a1507..ef9b2bf 100644
--- a/src/dr/app/beauti/options/TraitGuesser.java
+++ b/src/dr/app/beauti/options/TraitGuesser.java
@@ -46,7 +46,7 @@ public class TraitGuesser implements Serializable {
         this.traitData = traitData;
     }
 
-    public static enum GuessType {
+    public enum GuessType {
         DELIMITER,
         REGEX
     }
diff --git a/src/dr/app/beauti/options/TreeModelOptions.java b/src/dr/app/beauti/options/TreeModelOptions.java
index 188b37d..0157fb0 100644
--- a/src/dr/app/beauti/options/TreeModelOptions.java
+++ b/src/dr/app/beauti/options/TreeModelOptions.java
@@ -73,18 +73,6 @@ public class TreeModelOptions extends ModelOptions {
 
     }
 
-    /////////////////////////////////////////////////////////////
-//    public double getRandomStartingTreeInitialRootHeight(PartitionTreeModel model) {
-//    	Parameter rootHeight = model.getParameter("treeModel.rootHeight");
-//
-//    	if (rootHeight.priorType != PriorType.NONE_TREE_PRIOR) {
-//    		return rootHeight.initial;
-//    	} else {
-//    		return calculateMeanDistance(model.getDataPartitions());
-//    	}
-//
-//    }
-
     public double getExpectedAvgBranchLength(double rootHeight) {
         double sum = 0;
         int taxonCount = options.taxonList.getTaxonCount();
@@ -97,6 +85,9 @@ public class TreeModelOptions extends ModelOptions {
     }
 
     public int isNodeCalibrated(PartitionTreeModel treeModel) {
+        if (treeModel == null) {
+            return -1;
+        }
         if (isNodeCalibrated(treeModel.getParameter("treeModel.rootHeight"))) {
             return 0; // root node
         } else if (options.getKeysFromValue(options.taxonSetsTreeModel, treeModel).size() > 0) {
@@ -111,14 +102,13 @@ public class TreeModelOptions extends ModelOptions {
         }
     }
 
-    public boolean isNodeCalibrated(Parameter para) {
-        return (para.taxaId != null && hasProperPriorOn(para)) // param.taxa != null is TMRCA
-                || (para.getBaseName().endsWith("treeModel.rootHeight") && hasProperPriorOn(para));
+    public boolean isNodeCalibrated(Parameter parameter) {
+        return (parameter.taxaId != null && hasProperPriorOn(parameter)) // param.taxa != null is TMRCA
+                || (parameter.getBaseName().endsWith("treeModel.rootHeight") && hasProperPriorOn(parameter));
     }
 
     private boolean hasProperPriorOn(Parameter para) {
         return para.priorType == PriorType.EXPONENTIAL_PRIOR
-//                || para.priorType == PriorType.TRUNC_NORMAL_PRIOR
                 || (para.priorType == PriorType.UNIFORM_PRIOR && para.uniformLower > 0 && para.uniformUpper < Double.POSITIVE_INFINITY)
                 || para.priorType == PriorType.LAPLACE_PRIOR
                 || para.priorType == PriorType.NORMAL_PRIOR
diff --git a/src/dr/app/beauti/priorsPanel/HierarchicalPriorDialog.java b/src/dr/app/beauti/priorsPanel/HierarchicalPriorDialog.java
index 2bfa2ce..f9e5acd 100644
--- a/src/dr/app/beauti/priorsPanel/HierarchicalPriorDialog.java
+++ b/src/dr/app/beauti/priorsPanel/HierarchicalPriorDialog.java
@@ -137,11 +137,11 @@ public class HierarchicalPriorDialog {
 
         hpm.getConditionalParameterList().get(0).mean = hpmMeanMean;
         hpm.getConditionalParameterList().get(0).stdev = hpmMeanStDev;
-        hpm.getConditionalParameterList().get(0).initial = hpmMeanInitial;
+        hpm.getConditionalParameterList().get(0).setInitial(hpmMeanInitial);
 
         hpm.getConditionalParameterList().get(1).shape = hpmPrecShape;
         hpm.getConditionalParameterList().get(1).scale = hpmPrecScale;
-        hpm.getConditionalParameterList().get(1).initial = hpmPrecInitial;
+        hpm.getConditionalParameterList().get(1).setInitial(hpmPrecInitial);
 
     }
 
@@ -208,7 +208,7 @@ public class HierarchicalPriorDialog {
         }
 
         initialField.setRange(lower, upper);
-        initialField.setValue(parameter.initial);
+        initialField.setValue(parameter.getInitial());
 
         panel = new JPanel(new GridBagLayout());
 
diff --git a/src/dr/app/beauti/priorsPanel/PriorOptionsPanel.java b/src/dr/app/beauti/priorsPanel/PriorOptionsPanel.java
index 1447b73..6acc7c7 100644
--- a/src/dr/app/beauti/priorsPanel/PriorOptionsPanel.java
+++ b/src/dr/app/beauti/priorsPanel/PriorOptionsPanel.java
@@ -92,7 +92,7 @@ abstract class PriorOptionsPanel extends OptionsPanel {
     private List<JComponent> argumentFields = new ArrayList<JComponent>();
     private List<String> argumentNames = new ArrayList<String>();
 
-    private boolean isCalibratedYule = true;
+    private boolean isCalibratedYule = false;
     private boolean isInitializable = true;
     private final boolean isTruncatable;
 
@@ -345,10 +345,10 @@ abstract class PriorOptionsPanel extends OptionsPanel {
 
     void setArguments(Parameter parameter, PriorType priorType) {
         this.isCalibratedYule = parameter.isCalibratedYule;
-        this.isInitializable = priorType.isInitializable;
-        if (!parameter.isStatistic) {
+        this.isInitializable = priorType.isInitializable && !parameter.isStatistic && !parameter.isNodeHeight;
+        if (!parameter.isStatistic && !parameter.isNodeHeight) {
             setFieldRange(initialField, parameter.isNonNegative, parameter.isZeroOne);
-            initialField.setValue(parameter.initial);
+            initialField.setValue(parameter.getInitial());
         }
         isTruncatedCheck.setSelected(parameter.isTruncated);
         setFieldRange(lowerField, parameter.isNonNegative, parameter.isZeroOne, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
@@ -364,8 +364,8 @@ abstract class PriorOptionsPanel extends OptionsPanel {
     }
 
     void getArguments(Parameter parameter, PriorType priorType) {
-        if (priorType.isInitializable) {
-            parameter.initial = initialField.getValue();
+        if (priorType.isInitializable && !parameter.isStatistic && !parameter.isNodeHeight) {
+            parameter.setInitial(initialField.getValue());
         }
         parameter.isTruncated = isTruncatedCheck.isSelected();
         if (parameter.isTruncated) {
diff --git a/src/dr/app/beauti/priorsPanel/PriorSettingsPanel.java b/src/dr/app/beauti/priorsPanel/PriorSettingsPanel.java
index e1c04a4..c0de5cf 100644
--- a/src/dr/app/beauti/priorsPanel/PriorSettingsPanel.java
+++ b/src/dr/app/beauti/priorsPanel/PriorSettingsPanel.java
@@ -71,6 +71,7 @@ public class PriorSettingsPanel extends JPanel {
     public PriorSettingsPanel(JFrame frame) {
         this.frame = frame;
 
+        optionsPanels.put(PriorType.NONE_FIXED, PriorOptionsPanel.INFINITE_UNIFORM);
         optionsPanels.put(PriorType.NONE_IMPROPER, PriorOptionsPanel.INFINITE_UNIFORM);
         optionsPanels.put(PriorType.UNIFORM_PRIOR, PriorOptionsPanel.UNIFORM);
         optionsPanels.put(PriorType.EXPONENTIAL_PRIOR, PriorOptionsPanel.EXPONENTIAL);
@@ -132,7 +133,7 @@ public class PriorSettingsPanel extends JPanel {
 
     /**
      * Set the parameter to be controlled
-     *
+     *                                                                                                       q
      * @param parameter
      */
     public void setParameter(final Parameter parameter) {
@@ -222,11 +223,6 @@ public class PriorSettingsPanel extends JPanel {
             optionsPanel.addComponentWithLabel("Prior Distribution: ", new JLabel(priorType.toString()));
         }
 
-//        if (parameter.getOptions() instanceof PartitionClockModel) {
-//            PartitionClockModel pcm = (PartitionClockModel) parameter.getOptions();
-//            initialField.setEnabled(!pcm.getClockModelGroup().isFixMean());
-//        }
-
         PriorOptionsPanel panel3 = optionsPanels.get(priorType);
 
         if (panel3 != null) {
diff --git a/src/dr/app/beauti/priorsPanel/PriorsPanel.java b/src/dr/app/beauti/priorsPanel/PriorsPanel.java
index 0e3bc32..c35c727 100644
--- a/src/dr/app/beauti/priorsPanel/PriorsPanel.java
+++ b/src/dr/app/beauti/priorsPanel/PriorsPanel.java
@@ -31,11 +31,9 @@ import dr.app.beauti.components.hpm.HierarchicalModelComponentOptions;
 import dr.app.beauti.components.hpm.HierarchicalPhylogeneticModel;
 import dr.app.beauti.components.linkedparameters.LinkedParameter;
 import dr.app.beauti.components.linkedparameters.LinkedParameterComponentOptions;
-import dr.app.beauti.options.BeautiOptions;
-import dr.app.beauti.options.ClockModelGroup;
-import dr.app.beauti.options.Operator;
-import dr.app.beauti.options.Parameter;
+import dr.app.beauti.options.*;
 import dr.app.beauti.types.ClockType;
+import dr.app.beauti.types.FixRateType;
 import dr.app.beauti.types.PriorType;
 import dr.app.beauti.util.PanelUtils;
 import dr.app.gui.table.TableEditorStopper;
@@ -360,7 +358,7 @@ public class PriorsPanel extends BeautiPanel implements Exportable {
             }
             if (parameter.truncationLower != firstParameter.truncationLower ||
                     parameter.truncationUpper != firstParameter.truncationUpper ||
-                    options.getOperator(parameter).operatorType != options.getOperator(firstParameter).operatorType) {
+                    options.getOperator(parameter).getOperatorType() != options.getOperator(firstParameter).getOperatorType()) {
                 JOptionPane.showMessageDialog(frame,
                         "Only parameters that share the same bounds\n" +
                                 "and have the same operator types can be linked.",
@@ -428,7 +426,7 @@ public class PriorsPanel extends BeautiPanel implements Exportable {
             Parameter parameter = parameters.get(rows[i]);
             if (parameter.isStatistic) {
                 JOptionPane.showMessageDialog(frame,
-                        "Statistics are not currently allowed.",
+                        "Statistics cannot be used in a hierarchical model.",
                         "HPM parameter linking error",
                         JOptionPane.WARNING_MESSAGE);
                 return;
@@ -577,7 +575,7 @@ public class PriorsPanel extends BeautiPanel implements Exportable {
                 isCompatible = false;
             }
             Operator operator = options.getOperator(parameter);
-            if (operator == null || operator.operatorType != sourceOperator.operatorType) {
+            if (operator == null || operator.getOperatorType() != sourceOperator.getOperatorType()) {
                 isCompatible = false;
             }
             if (isCompatible) {
@@ -650,19 +648,17 @@ public class PriorsPanel extends BeautiPanel implements Exportable {
             if (parameter.getBaseName().endsWith("treeModel.rootHeight") || parameter.taxaId != null) { // param.taxa != null is TMRCA
 
                 if (options.treeModelOptions.isNodeCalibrated(parameter)) {
-                    List<ClockModelGroup> groupList;
+                    List<PartitionTreeModel> treeModels;
                     if (options.useStarBEAST) {
-                        groupList = options.clockModelOptions.getClockModelGroups();
+                        treeModels = options.getPartitionTreeModels();
                     } else {
-                        groupList = options.clockModelOptions.getClockModelGroups(options.getDataPartitions(parameter.getOptions()));
+                        treeModels = options.getPartitionTreeModels(options.getDataPartitions(parameter.getOptions()));
                     }
 
-                    for (ClockModelGroup clockModelGroup : groupList) {
-                        options.clockModelOptions.nodeCalibration(clockModelGroup);
+                    for (PartitionTreeModel treeModel : treeModels) {
+                        treeModel.setNodeCalibrations(true);
                     }
                     frame.setAllOptions();
-//        	} else {
-//        		options.clockModelOptions.fixRateOfFirstClockPartition();
                 }
             }
 
diff --git a/src/dr/app/beauti/siteModelsPanel/PartitionModelPanel.java b/src/dr/app/beauti/siteModelsPanel/PartitionModelPanel.java
index dec3102..c3830f8 100644
--- a/src/dr/app/beauti/siteModelsPanel/PartitionModelPanel.java
+++ b/src/dr/app/beauti/siteModelsPanel/PartitionModelPanel.java
@@ -603,7 +603,7 @@ public class PartitionModelPanel extends OptionsPanel {
      * position model
      */
     private void setSRD06Model() {
-        nucSubstCombo.setSelectedIndex(0);
+        nucSubstCombo.setSelectedIndex(1);
         heteroCombo.setSelectedIndex(1);
         codingCombo.setSelectedIndex(1);
         substUnlinkCheck.setSelected(true);
diff --git a/src/dr/app/beauti/tipdatepanel/TipDatesPanel.java b/src/dr/app/beauti/tipdatepanel/TipDatesPanel.java
index c4d9aca..5dae5c9 100644
--- a/src/dr/app/beauti/tipdatepanel/TipDatesPanel.java
+++ b/src/dr/app/beauti/tipdatepanel/TipDatesPanel.java
@@ -29,6 +29,7 @@ import dr.app.beauti.BeautiFrame;
 import dr.app.beauti.BeautiPanel;
 import dr.app.beauti.components.tipdatesampling.TipDateSamplingComponentOptions;
 import dr.app.beauti.options.*;
+import dr.app.beauti.types.FixRateType;
 import dr.app.beauti.types.TipDateSamplingType;
 import dr.app.beauti.util.BEAUTiImporter;
 import dr.app.beauti.util.PanelUtils;
@@ -374,8 +375,8 @@ public class TipDatesPanel extends BeautiPanel implements Exportable {
         calculateHeights();
 
         if (options.clockModelOptions.isTipCalibrated()) { // todo correct?
-            for (ClockModelGroup clockModelGroup : options.clockModelOptions.getClockModelGroups()) {
-                options.clockModelOptions.tipTimeCalibration(clockModelGroup);
+            for (PartitionTreeModel treeModel : options.getPartitionTreeModels()) {
+                treeModel.setTipCalibrations(true);
             }
         }
 
diff --git a/src/dr/app/beauti/treespanel/PartitionTreeModelPanel.java b/src/dr/app/beauti/treespanel/PartitionTreeModelPanel.java
index e3df2e9..70258fd 100644
--- a/src/dr/app/beauti/treespanel/PartitionTreeModelPanel.java
+++ b/src/dr/app/beauti/treespanel/PartitionTreeModelPanel.java
@@ -27,9 +27,7 @@ package dr.app.beauti.treespanel;
 
 import dr.app.beauti.BeautiFrame;
 import dr.app.beauti.options.BeautiOptions;
-import dr.app.beauti.options.ClockModelGroup;
 import dr.app.beauti.options.PartitionTreeModel;
-import dr.app.beauti.types.FixRateType;
 import dr.app.beauti.types.StartingTreeType;
 import dr.app.beauti.util.PanelUtils;
 import dr.app.gui.components.RealNumberField;
@@ -225,15 +223,6 @@ public class PartitionTreeModelPanel extends OptionsPanel {
 
         removeAll();
 
-        ClockModelGroup group = null;
-        if (options.getDataPartitions(partitionTreeModel).size() > 0)
-            group = options.getDataPartitions(partitionTreeModel).get(0).getPartitionClockModel().getClockModelGroup();
-
-        if (group != null && (group.getRateTypeOption() == FixRateType.FIX_MEAN
-                || group.getRateTypeOption() == FixRateType.RELATIVE_TO)) {
-            addComponentWithLabel("The estimated initial root height:", initRootHeightField);
-        }
-
         if (options.isEBSPSharingSamePrior() || options.useStarBEAST) {
             addComponentWithLabel("Ploidy type:", ploidyTypeCombo);
         }
diff --git a/src/dr/app/beauti/treespanel/TreesPanel.java b/src/dr/app/beauti/treespanel/TreesPanel.java
index 36ab186..43944d2 100644
--- a/src/dr/app/beauti/treespanel/TreesPanel.java
+++ b/src/dr/app/beauti/treespanel/TreesPanel.java
@@ -211,11 +211,6 @@ public class TreesPanel extends BeautiPanel implements Exportable {
 
                 options.getPartitionTreePriors().get(0).setNodeHeightPrior(TreePriorType.SPECIES_YULE);
 
-                List<ClockModelGroup> groupList = options.clockModelOptions.getClockModelGroups();// all data partitions
-                for (ClockModelGroup clockModelGroup : groupList) { // todo correct?
-                    options.clockModelOptions.fixRateOfFirstClockPartition(clockModelGroup); // fix 1st partition
-                }
-
             } else {
 //                if (options.hasData() && options.contains(Microsatellite.INSTANCE)) {
 //                    linkTreePriorCheck.setEnabled(false);
diff --git a/src/dr/app/beauti/types/FixRateType.java b/src/dr/app/beauti/types/FixRateType.java
index 7352f45..2e82176 100644
--- a/src/dr/app/beauti/types/FixRateType.java
+++ b/src/dr/app/beauti/types/FixRateType.java
@@ -30,7 +30,7 @@ package dr.app.beauti.types;
  * @author Walter Xie
  */
 public enum FixRateType {
-	FIX_MEAN("Estimate relative clock rates (fixed mean)"), //
+	FIXED_MEAN("Estimate relative clock rates (fixed mean)"), //
     RELATIVE_TO("Estimate clock rates relative to"),
 	TIP_CALIBRATED("Tip times calibrated"), //
 	NODE_CALIBRATED("Internal node(s) calibrated"), //
diff --git a/src/dr/app/beauti/types/OldClockType.java b/src/dr/app/beauti/types/OldClockType.java
deleted file mode 100644
index ed73f6c..0000000
--- a/src/dr/app/beauti/types/OldClockType.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * OldClockType.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
-package dr.app.beauti.types;
-
-/**
- * @author Alexei Drummond
- */
-public enum OldClockType {
-
-    STRICT_CLOCK("Strict clock", ClockType.STRICT_CLOCK),
-    UNCORRELATED_LOGNORMAL("Lognormal relaxed clock (Uncorrelated)", ClockType.UNCORRELATED, ClockDistributionType.LOGNORMAL),
-    UNCORRELATED_GAMMA("Gamma relaxed clock (Uncorrelated)", ClockType.UNCORRELATED, ClockDistributionType.GAMMA),
-    UNCORRELATED_CAUCHY("Cauchy relaxed clock (Uncorrelated)", ClockType.UNCORRELATED, ClockDistributionType.CAUCHY),
-    UNCORRELATED_EXPONENTIAL("Exponential relaxed clock (Uncorrelated)", ClockType.UNCORRELATED, ClockDistributionType.EXPONENTIAL),
-    RANDOM_LOCAL_CLOCK("Random local clock", ClockType.RANDOM_LOCAL_CLOCK),
-    FIXED_LOCAL_CLOCK("Fixed local clock", ClockType.FIXED_LOCAL_CLOCK),
-    AUTOCORRELATED("Autocorrelated relaxed clock", ClockType.AUTOCORRELATED, ClockDistributionType.LOGNORMAL);
-
-    OldClockType(String displayName, ClockType clockType) {
-        this(displayName, clockType, null);
-    }
-
-    OldClockType(String displayName, ClockType clockType, ClockDistributionType clockDistributionType) {
-        this.displayName = displayName;
-        this.clockType = clockType;
-        this.clockDistributionType = clockDistributionType;
-    }
-
-    public ClockType getClockType() {
-        return clockType;
-    }
-
-    public ClockDistributionType getClockDistributionType() {
-        return clockDistributionType;
-    }
-
-    public String toString() {
-        return displayName;
-    }
-
-    public static OldClockType getType(final ClockType clockType, final ClockDistributionType clockDistributionType) {
-        switch (clockType) {
-            case STRICT_CLOCK:
-                return STRICT_CLOCK;
-            case RANDOM_LOCAL_CLOCK:
-                return RANDOM_LOCAL_CLOCK;
-            case FIXED_LOCAL_CLOCK:
-                return FIXED_LOCAL_CLOCK;
-            case UNCORRELATED:
-                switch (clockDistributionType) {
-                    case LOGNORMAL:
-                        return UNCORRELATED_LOGNORMAL;
-                    case GAMMA:
-                        return UNCORRELATED_GAMMA;
-                    case CAUCHY:
-                        return UNCORRELATED_CAUCHY;
-                    case EXPONENTIAL:
-                        return UNCORRELATED_EXPONENTIAL;
-                    default:
-                        throw new IllegalArgumentException("Unknown clock distribution model");
-                }
-            case AUTOCORRELATED:
-                return AUTOCORRELATED;
-            default:
-                throw new IllegalArgumentException("Unknown clock model");
-        }
-    }
-
-    private final String displayName;
-    private final ClockType clockType;
-    private final ClockDistributionType clockDistributionType;
-}
\ No newline at end of file
diff --git a/src/dr/app/beauti/types/PriorType.java b/src/dr/app/beauti/types/PriorType.java
index e257eb9..97624bc 100644
--- a/src/dr/app/beauti/types/PriorType.java
+++ b/src/dr/app/beauti/types/PriorType.java
@@ -39,6 +39,7 @@ public enum PriorType {
     NONE_TREE_PRIOR("None (Tree Prior Only)", false, false, false),
     NONE_STATISTIC("None (Statistic)", false, false, false),
     NONE_IMPROPER("Infinite Uniform (Improper)", true, false, false),
+    NONE_FIXED("Fixed value", true, false, false),
     UNIFORM_PRIOR("Uniform", true, false, false),
     EXPONENTIAL_PRIOR("Exponential", true, true, true),
     LAPLACE_PRIOR("Laplace", true, true, true),
@@ -105,6 +106,8 @@ public enum PriorType {
                 break;
             case NONE_STATISTIC:
                 break;
+            case NONE_FIXED:
+                break;
             case ONE_OVER_X_PRIOR:
                 break;
             case CTMC_RATE_REFERENCE_PRIOR:
@@ -150,6 +153,9 @@ public enum PriorType {
             case NONE_STATISTIC:
                 buffer.append("Indirectly Specified Through Other Parameter");
                 break;
+            case NONE_FIXED:
+                buffer.append("Fixed value");
+                break;
             case UNDEFINED:
                 buffer.append("Not yet specified");
                 break;
@@ -253,8 +259,10 @@ public enum PriorType {
         }
 
 
-        if (parameter.priorType.isInitializable && parameter.initial != Double.NaN) {
-            buffer.append(", initial=").append(NumberUtil.formatDecimal(parameter.initial, 10, 6));
+        if (parameter.priorType == NONE_FIXED) {
+            buffer.append(", value=").append(NumberUtil.formatDecimal(parameter.getInitial(), 10, 6));
+        } else if (parameter.priorType.isInitializable && parameter.getInitial() != Double.NaN) {
+            buffer.append(", initial=").append(NumberUtil.formatDecimal(parameter.getInitial(), 10, 6));
         }
 
         return buffer.toString();
@@ -332,13 +340,14 @@ public enum PriorType {
         }
         if (parameter.isCMTCRate) {
             return new PriorType[]{
+                    NONE_FIXED,
+                    CTMC_RATE_REFERENCE_PRIOR,
                     NONE_IMPROPER,
                     UNIFORM_PRIOR,
                     EXPONENTIAL_PRIOR,
                     NORMAL_PRIOR,
                     LOGNORMAL_PRIOR,
                     GAMMA_PRIOR,
-                    CTMC_RATE_REFERENCE_PRIOR,
                     INVERSE_GAMMA_PRIOR,
                     ONE_OVER_X_PRIOR};
         }
@@ -349,6 +358,7 @@ public enum PriorType {
         }
         if (parameter.isZeroOne) {
             return new PriorType[]{
+                    NONE_FIXED,
                     UNIFORM_PRIOR,
                     EXPONENTIAL_PRIOR,
                     NORMAL_PRIOR,
@@ -359,6 +369,7 @@ public enum PriorType {
         }
         if (parameter.isNonNegative) {
             return new PriorType[]{
+                    NONE_FIXED,
                     NONE_IMPROPER,
                     UNIFORM_PRIOR,
                     EXPONENTIAL_PRIOR,
@@ -372,6 +383,7 @@ public enum PriorType {
 
         // just a continuous parameter
         return new PriorType[]{
+                NONE_FIXED,
                 NONE_IMPROPER,
                 UNIFORM_PRIOR,
                 EXPONENTIAL_PRIOR,
diff --git a/src/dr/app/beauti/util/BEAUTiImporter.java b/src/dr/app/beauti/util/BEAUTiImporter.java
index 7e66257..813450c 100644
--- a/src/dr/app/beauti/util/BEAUTiImporter.java
+++ b/src/dr/app/beauti/util/BEAUTiImporter.java
@@ -600,7 +600,7 @@ public class BEAUTiImporter {
         }
 
         options.updatePartitionAllLinks();
-        options.clockModelOptions.initClockModelGroup();
+        //options.clockModelOptions.initClockModelGroup();
     }
 
     private void setClockAndTree(AbstractPartitionData partition) {
diff --git a/src/dr/app/beauti/util/BeautiTemplate.java b/src/dr/app/beauti/util/BeautiTemplate.java
deleted file mode 100644
index a13ac08..0000000
--- a/src/dr/app/beauti/util/BeautiTemplate.java
+++ /dev/null
@@ -1,434 +0,0 @@
-/*
- * BeautiTemplate.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-package dr.app.beauti.util;
-
-import dr.app.beast.BeastVersion;
-import dr.app.beauti.types.PriorType;
-import dr.app.beauti.options.BeautiOptions;
-import dr.app.beauti.options.ModelOptions;
-import dr.app.beauti.options.Operator;
-import dr.app.beauti.options.Parameter;
-import dr.evolution.util.Date;
-import dr.evolution.util.Taxa;
-import dr.evolution.util.Units;
-import dr.evoxml.TaxaParser;
-import dr.evoxml.TaxonParser;
-import dr.xml.XMLParser;
-import org.jdom.Document;
-import org.jdom.Element;
-
-
-/**
- * @author Andrew Rambaut
- * @author Alexei Drummond
- * @author Walter Xie
- * @version $Id: BeautiTemplate.java, rambaut Exp $
- */
-public class BeautiTemplate extends ModelOptions {
-
-	private final BeautiOptions options;
-
-    public BeautiTemplate(BeautiOptions options) {
-    	this.options = options;
-
-    }
-
-
-    /**
-     * Write options from a file
-     *
-     * @param guessDates guess dates?
-     * @return the Document
-     */
-    public Document create(boolean guessDates) {
-
-        final BeastVersion version = new BeastVersion();
-        Element root = new Element("beauti");
-        root.setAttribute("version", version.getVersion());
-
-        Element dataElement = new Element("data");
-
-//        dataElement.addContent(createChild("fileNameStem", fileNameStem));
-
-//        dataElement.addContent(createChild("datesUnits", options.datesUnits));
-//        dataElement.addContent(createChild("datesDirection", options.datesDirection));
-        dataElement.addContent(createChild("translation", options.translation));
-        //TODO:
-//        dataElement.addContent(createChild("startingTreeType", startingTreeType.name()));
-
-        dataElement.addContent(createChild("guessDates", guessDates));
-        dataElement.addContent(createChild("guessType", options.dateGuesser.guessType.name()));
-        dataElement.addContent(createChild("fromLast", options.dateGuesser.fromLast));
-        dataElement.addContent(createChild("order", options.dateGuesser.order));
-        dataElement.addContent(createChild("prefix", options.dateGuesser.prefix));
-        dataElement.addContent(createChild("offset", options.dateGuesser.offset));
-        dataElement.addContent(createChild("unlessLessThan", options.dateGuesser.unlessLessThan));
-        dataElement.addContent(createChild("offset2", options.dateGuesser.offset2));
-
-        root.addContent(dataElement);
-
-        Element taxaElement = new Element(TaxaParser.TAXA);
-
-        for (Taxa taxonSet : options.taxonSets) {
-            Element taxonSetElement = new Element("taxonSet");
-            taxonSetElement.addContent(createChild(XMLParser.ID, taxonSet.getId()));
-            taxonSetElement.addContent(createChild("enforceMonophyly",
-            		options.taxonSetsMono.get(taxonSet) ? "true" : "false"));
-            for (int j = 0; j < taxonSet.getTaxonCount(); j++) {
-                Element taxonElement = new Element(TaxonParser.TAXON);
-                taxonElement.addContent(createChild(XMLParser.ID, taxonSet.getTaxon(j).getId()));
-                taxonSetElement.addContent(taxonElement);
-            }
-            taxaElement.addContent(taxonSetElement);
-        }
-
-        root.addContent(taxaElement);
-
-//        for (PartitionSubstitutionModel model : partitionModels) {
-//
-//            Element modelElement = new Element("model");
-//
-//            /*modelElement.addContent(createChild("nucSubstitutionModel", nucSubstitutionModel));
-//                           modelElement.addContent(createChild("aaSubstitutionModel", aaSubstitutionModel));
-//                           modelElement.addContent(createChild("binarySubstitutionModel", binarySubstitutionModel));
-//                           modelElement.addContent(createChild("frequencyPolicy", frequencyPolicy));
-//                           modelElement.addContent(createChild("gammaHetero", gammaHetero));
-//                           modelElement.addContent(createChild("gammaCategories", gammaCategories));
-//                           modelElement.addContent(createChild("invarHetero", invarHetero));
-//                           modelElement.addContent(createChild("codonHeteroPattern", codonHeteroPattern));
-//                           modelElement.addContent(createChild("maximumTipHeight", maximumTipHeight));
-//                           modelElement.addContent(createChild("hasSetFixedSubstitutionRate", hasSetFixedSubstitutionRate));
-//                           modelElement.addContent(createChild("meanSubstitutionRate", meanSubstitutionRate));
-//                           modelElement.addContent(createChild("fixedSubstitutionRate", fixedSubstitutionRate));
-//                           modelElement.addContent(createChild("unlinkedSubstitutionModel", unlinkedSubstitutionModel));
-//                           modelElement.addContent(createChild("unlinkedHeterogeneityModel", unlinkedHeterogeneityModel));
-//                           modelElement.addContent(createChild("unlinkedFrequencyModel", unlinkedFrequencyModel));
-//                           modelElement.addContent(createChild("clockModel", clockModel));
-//                           modelElement.addContent(createChild("nodeHeightPrior", nodeHeightPrior));
-//                           modelElement.addContent(createChild("parameterization", parameterization));
-//                           modelElement.addContent(createChild("skylineGroupCount", skylineGroupCount));
-//                           modelElement.addContent(createChild("skylineModel", skylineModel));
-//                           modelElement.addContent(createChild("fixedTree", fixedTree)); */
-//
-//            root.addContent(modelElement);
-//        }
-
-        Element priorsElement = new Element("priors");
-
-        for (String name : getParameters().keySet()) {
-            Parameter parameter = getParameters().get(name);
-            Element e = new Element(name);
-            e.addContent(createChild("initial", parameter.initial));
-            e.addContent(createChild("priorType", parameter.priorType));
-            e.addContent(createChild("priorEdited", parameter.isPriorEdited()));
-//            e.addContent(createChild("uniformLower", parameter.uniformLower));
-//            e.addContent(createChild("uniformUpper", parameter.uniformUpper));
-//            e.addContent(createChild("exponentialMean", parameter.exponentialMean));
-//            e.addContent(createChild("exponentialOffset", parameter.exponentialOffset));
-//            e.addContent(createChild("normalMean", parameter.normalMean));
-//            e.addContent(createChild("normalStdev", parameter.normalStdev));
-//            e.addContent(createChild("logNormalMean", parameter.logNormalMean));
-//            e.addContent(createChild("logNormalStdev", parameter.logNormalStdev));
-//            e.addContent(createChild("logNormalOffset", parameter.logNormalOffset));
-//            e.addContent(createChild("gammaAlpha", parameter.gammaAlpha));
-//            e.addContent(createChild("gammaBeta", parameter.gammaBeta));
-//            e.addContent(createChild("gammaOffset", parameter.gammaOffset));
-            priorsElement.addContent(e);
-        }
-
-        for (Taxa taxonSet : options.taxonSets) {
-            Parameter statistic = getStatistics().get(taxonSet);
-            Element e = new Element(statistic.getXMLName());
-            e.addContent(createChild("initial", statistic.initial));
-            e.addContent(createChild("priorType", statistic.priorType));
-            e.addContent(createChild("priorEdited", statistic.isPriorEdited()));
-//            e.addContent(createChild("uniformLower", statistic.uniformLower));
-//            e.addContent(createChild("uniformUpper", statistic.uniformUpper));
-//            e.addContent(createChild("exponentialMean", statistic.exponentialMean));
-//            e.addContent(createChild("exponentialOffset", statistic.exponentialOffset));
-//            e.addContent(createChild("normalMean", statistic.normalMean));
-//            e.addContent(createChild("normalStdev", statistic.normalStdev));
-//            e.addContent(createChild("logNormalMean", statistic.logNormalMean));
-//            e.addContent(createChild("logNormalStdev", statistic.logNormalStdev));
-//            e.addContent(createChild("logNormalOffset", statistic.logNormalOffset));
-//            e.addContent(createChild("gammaAlpha", statistic.gammaAlpha));
-//            e.addContent(createChild("gammaBeta", statistic.gammaBeta));
-//            e.addContent(createChild("gammaOffset", statistic.gammaOffset));
-            priorsElement.addContent(e);
-        }
-
-        root.addContent(priorsElement);
-
-        Element operatorsElement = new Element("operators");
-
-        operatorsElement.addContent(createChild("autoOptimize", options.autoOptimize));
-        for (String name : getOperators().keySet()) {
-            Operator operator = getOperators().get(name);
-            Element e = new Element(name);
-            e.addContent(createChild("tuning", operator.tuning));
-            e.addContent(createChild("tuningEdited", operator.tuningEdited));
-            e.addContent(createChild("weight", operator.weight));
-            e.addContent(createChild("inUse", operator.inUse));
-            operatorsElement.addContent(e);
-        }
-
-        root.addContent(operatorsElement);
-
-        Element mcmcElement = new Element("mcmc");
-
-        mcmcElement.addContent(createChild("chainLength", options.chainLength));
-        mcmcElement.addContent(createChild("logEvery", options.logEvery));
-        mcmcElement.addContent(createChild("echoEvery", options.echoEvery));
-        //if (logFileName != null) mcmcElement.addContent(createChild("logFileName", logFileName));
-        //if (treeFileName != null) mcmcElement.addContent(createChild("treeFileName", treeFileName));
-        //mcmcElement.addContent(createChild("mapTreeLog", mapTreeLog));
-        //if (mapTreeFileName != null) mcmcElement.addContent(createChild("mapTreeFileName", mapTreeFileName));
-        mcmcElement.addContent(createChild("substTreeLog", options.substTreeLog));
-        //if (substTreeFileName != null) mcmcElement.addContent(createChild("substTreeFileName", substTreeFileName));
-
-        root.addContent(mcmcElement);
-
-        return new Document(root);
-    }
-
-    private Element createChild(String name, String value) {
-        Element e = new Element(name);
-        if (value != null) {
-            e.setText(value);
-        }
-        return e;
-    }
-
-    private Element createChild(String name, int value) {
-        Element e = new Element(name);
-        e.setText(Integer.toString(value));
-        return e;
-    }
-
-    private Element createChild(String name, PriorType value) {
-        Element e = new Element(name);
-        e.setText(value.name());
-        return e;
-    }
-
-    private Element createChild(String name, double value) {
-        Element e = new Element(name);
-        e.setText(Double.toString(value));
-        return e;
-    }
-
-    private Element createChild(String name, boolean value) {
-        Element e = new Element(name);
-        e.setText(value ? "true" : "false");
-        return e;
-    }
-
-    /**
-     * Read options from a file
-     *
-     * @param document the Document
-     * @throws dr.xml.XMLParseException if there is a problem with XML parsing
-     */
-    public void parse(Document document) throws dr.xml.XMLParseException {
-
-        Element root = document.getRootElement();
-        if (!root.getName().equals("beauti")) {
-            throw new dr.xml.XMLParseException("This document does not appear to be a BEAUti file");
-        }
-
-        Element taxaElement = root.getChild(TaxaParser.TAXA);
-        Element modelElement = root.getChild("model");
-        Element priorsElement = root.getChild("priors");
-        Element operatorsElement = root.getChild("operators");
-        Element mcmcElement = root.getChild("mcmc");
-        /*
-                  if (taxaElement != null) {
-                      for (Object ts : taxaElement.getChildren("taxonSet")) {
-                          Element taxonSetElement = (Element) ts;
-
-                          String id = getStringChild(taxonSetElement, XMLParser.ID, "");
-                          final Taxa taxonSet = new Taxa(id);
-
-                          Boolean enforceMonophyly = Boolean.valueOf(getStringChild(taxonSetElement, "enforceMonophyly", "false"));
-                          for (Object o : taxonSetElement.getChildren("taxon")) {
-                              Element taxonElement = (Element) o;
-                              String taxonId = getStringChild(taxonElement, XMLParser.ID, "");
-                              int index = taxonList.getTaxonIndex(taxonId);
-                              if (index != -1) {
-                                  taxonSet.addTaxon(taxonList.getTaxon(index));
-                              }
-                          }
-                          taxonSets.add(taxonSet);
-                          taxonSetsMono.put(taxonSet, enforceMonophyly);
-                      }
-                  }
-
-                  if (modelElement != null) {
-                      nucSubstitutionModel = getIntegerChild(modelElement, "nucSubstitutionModel", HKY);
-                      aaSubstitutionModel = getIntegerChild(modelElement, "aaSubstitutionModel", BLOSUM_62);
-                      binarySubstitutionModel = getIntegerChild(modelElement, "binarySubstitutionModel", BIN_SIMPLE);
-                      frequencyPolicy = getIntegerChild(modelElement, "frequencyPolicy", ESTIMATED);
-                      gammaHetero = getBooleanChild(modelElement, "gammaHetero", false);
-                      gammaCategories = getIntegerChild(modelElement, "gammaCategories", 5);
-                      invarHetero = getBooleanChild(modelElement, "invarHetero", false);
-                      codonHeteroPattern = (getBooleanChild(modelElement, "codonHetero", false) ? "123" : null);
-                      codonHeteroPattern = getStringChild(modelElement, "codonHeteroPattern", null);
-                      maximumTipHeight = getDoubleChild(modelElement, "maximumTipHeight", 0.0);
-                      fixedSubstitutionRate = getBooleanChild(modelElement, "fixedSubstitutionRate", false);
-                      hasSetFixedSubstitutionRate = getBooleanChild(modelElement, "hasSetFixedSubstitutionRate", false);
-                      meanSubstitutionRate = getDoubleChild(modelElement, "meanSubstitutionRate", 1.0);
-                      unlinkedSubstitutionModel = getBooleanChild(modelElement, "unlinkedSubstitutionModel", false);
-                      unlinkedHeterogeneityModel = getBooleanChild(modelElement, "unlinkedHeterogeneityModel", false);
-                      unlinkedFrequencyModel = getBooleanChild(modelElement, "unlinkedFrequencyModel", false);
-
-                      clockModel = getIntegerChild(modelElement, "clockModel", clockModel);
-
-                      // the old name was "coalescentModel" so try to read this first
-                      nodeHeightPrior = getIntegerChild(modelElement, "coalescentModel", CONSTANT);
-                      nodeHeightPrior = getIntegerChild(modelElement, "nodeHeightPrior", nodeHeightPrior);
-                      // we don't allow no nodeHeightPrior in BEAUti so switch it to Yule:
-                      if (nodeHeightPrior == NONE_TREE_PRIOR) nodeHeightPrior = YULE;
-
-                      parameterization = getIntegerChild(modelElement, "parameterization", GROWTH_RATE);
-                      skylineGroupCount = getIntegerChild(modelElement, "skylineGroupCount", 10);
-                      skylineModel = getIntegerChild(modelElement, "skylineModel", CONSTANT_SKYLINE);
-                      fixedTree = getBooleanChild(modelElement, "fixedTree", false);
-                  }
-
-                  if (operatorsElement != null) {
-                      autoOptimize = getBooleanChild(operatorsElement, "autoOptimize", true);
-                      for (String name : operators.keySet()) {
-                          Operator operator = operators.get(name);
-                          Element e = operatorsElement.getChild(name);
-                          if (e == null) {
-                              throw new XMLParseException("Operators element, " + name + " missing");
-                          }
-
-                          operator.tuning = getDoubleChild(e, "tuning", 1.0);
-                          operator.tuningEdited = getBooleanChild(e, "tuningEdited", false);
-                          operator.weight = getDoubleChild(e, "weight", 1);
-                          operator.inUse = getBooleanChild(e, "inUse", true);
-                      }
-                  }
-
-                  if (priorsElement != null) {
-                      for (String name : parameters.keySet()) {
-                          Parameter parameter = parameters.get(name);
-                          Element e = priorsElement.getChild(name);
-                          if (e == null) {
-                              throw new XMLParseException("Priors element, " + name + " missing");
-                          }
-
-                          parameter.initial = getDoubleChild(e, "initial", 1.0);
-                          parameter.priorType = PriorType.valueOf(getStringChild(e, "priorType", PriorType.UNIFORM_PRIOR.name()));
-                          parameter.priorEdited = getBooleanChild(e, "priorEdited", false);
-                          parameter.uniformLower = Math.max(getDoubleChild(e, "uniformLower", parameter.uniformLower), parameter.lower);
-                          parameter.uniformUpper = Math.min(getDoubleChild(e, "uniformUpper", parameter.uniformUpper), parameter.upper);
-                          parameter.exponentialMean = getDoubleChild(e, "exponentialMean", parameter.exponentialMean);
-                          parameter.exponentialOffset = getDoubleChild(e, "exponentialOffset", parameter.exponentialOffset);
-                          parameter.normalMean = getDoubleChild(e, "normalMean", parameter.normalMean);
-                          parameter.normalStdev = getDoubleChild(e, "normalStdev", parameter.normalStdev);
-                          parameter.logNormalMean = getDoubleChild(e, "logNormalMean", parameter.logNormalMean);
-                          parameter.logNormalStdev = getDoubleChild(e, "logNormalStdev", parameter.logNormalStdev);
-                          parameter.logNormalOffset = getDoubleChild(e, "logNormalOffset", parameter.logNormalOffset);
-                          parameter.gammaAlpha = getDoubleChild(e, "gammaAlpha", parameter.gammaAlpha);
-                          parameter.gammaBeta = getDoubleChild(e, "gammaBeta", parameter.gammaBeta);
-                          parameter.gammaOffset = getDoubleChild(e, "gammaOffset", parameter.gammaOffset);
-                      }
-
-                      for (Taxa taxonSet : taxonSets) {
-                          Parameter statistic = statistics.get(taxonSet);
-                          if (statistic == null) {
-                              statistic = new Parameter(this, taxonSet, "tMRCA for taxon set ");
-                              statistics.put(taxonSet, statistic);
-                          }
-                          Element e = priorsElement.getChild(statistic.getXMLName());
-                          statistic.initial = getDoubleChild(e, "initial", 1.0);
-                          statistic.priorType = PriorType.valueOf(getStringChild(e, "priorType", PriorType.UNIFORM_PRIOR.name()));
-                          statistic.priorEdited = getBooleanChild(e, "priorEdited", false);
-                          statistic.uniformLower = getDoubleChild(e, "uniformLower", statistic.uniformLower);
-                          statistic.uniformUpper = getDoubleChild(e, "uniformUpper", statistic.uniformUpper);
-                          statistic.exponentialMean = getDoubleChild(e, "exponentialMean", statistic.exponentialMean);
-                          statistic.exponentialOffset = getDoubleChild(e, "exponentialOffset", statistic.exponentialOffset);
-                          statistic.normalMean = getDoubleChild(e, "normalMean", statistic.normalMean);
-                          statistic.normalStdev = getDoubleChild(e, "normalStdev", statistic.normalStdev);
-                          statistic.logNormalMean = getDoubleChild(e, "logNormalMean", statistic.logNormalMean);
-                          statistic.logNormalStdev = getDoubleChild(e, "logNormalStdev", statistic.logNormalStdev);
-                          statistic.logNormalOffset = getDoubleChild(e, "logNormalOffset", statistic.logNormalOffset);
-                          statistic.gammaAlpha = getDoubleChild(e, "gammaAlpha", statistic.gammaAlpha);
-                          statistic.gammaBeta = getDoubleChild(e, "gammaBeta", statistic.gammaBeta);
-                          statistic.gammaOffset = getDoubleChild(e, "gammaOffset", statistic.gammaOffset);
-                      }
-
-                  }
-
-
-                  if (mcmcElement != null) {
-                      upgmaStartingTree = getBooleanChild(mcmcElement, "upgmaStartingTree", true);
-                      chainLength = getIntegerChild(mcmcElement, "chainLength", 100000000);
-                      logEvery = getIntegerChild(mcmcElement, "logEvery", 1000);
-                      echoEvery = getIntegerChild(mcmcElement, "echoEvery", 1000);
-                      logFileName = getStringChild(mcmcElement, "logFileName", null);
-                      treeFileName = getStringChild(mcmcElement, "treeFileName", null);
-                      mapTreeLog = getBooleanChild(mcmcElement, "mapTreeLog", false);
-                      mapTreeFileName = getStringChild(mcmcElement, "mapTreeFileName", null);
-                      substTreeLog = getBooleanChild(mcmcElement, "substTreeLog", false);
-                      substTreeFileName = getStringChild(mcmcElement, "substTreeFileName", null);
-                  }      */
-    }
-
-    private String getStringChild(Element element, String childName, String defaultValue) {
-        String value = element.getChildTextTrim(childName);
-        if (value == null || value.length() == 0) return defaultValue;
-        return value;
-    }
-
-    private int getIntegerChild(Element element, String childName, int defaultValue) {
-        String value = element.getChildTextTrim(childName);
-        if (value == null) return defaultValue;
-        return Integer.parseInt(value);
-    }
-
-    private double getDoubleChild(Element element, String childName, double defaultValue) {
-        String value = element.getChildTextTrim(childName);
-        if (value == null) return defaultValue;
-        return Double.parseDouble(value);
-    }
-
-    private boolean getBooleanChild(Element element, String childName, boolean defaultValue) {
-        String value = element.getChildTextTrim(childName);
-        if (value == null) return defaultValue;
-        return value.equals("true");
-    }
-
-    private Date createDate(double timeValue, Units.Type units, boolean backwards, double origin) {
-        if (backwards) {
-            return Date.createTimeAgoFromOrigin(timeValue, units, origin);
-        } else {
-            return Date.createTimeSinceOrigin(timeValue, units, origin);
-        }
-    }
-
-}
diff --git a/src/dr/app/beauti/util/CommandLineBeauti.java b/src/dr/app/beauti/util/CommandLineBeauti.java
deleted file mode 100755
index 1c73a8d..0000000
--- a/src/dr/app/beauti/util/CommandLineBeauti.java
+++ /dev/null
@@ -1,267 +0,0 @@
-/*
- * CommandLineBeauti.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-package dr.app.beauti.util;
-
-import dr.app.beauti.generator.BeastGenerator;
-import dr.app.beauti.options.BeautiOptions;
-import dr.app.beauti.options.PartitionData;
-import dr.evolution.alignment.Alignment;
-import dr.evolution.io.Importer;
-import dr.evolution.io.NexusImporter;
-import dr.evolution.tree.Tree;
-import dr.evolution.util.Taxa;
-import dr.evolution.util.TaxonList;
-import dr.evolution.util.Units;
-import org.jdom.Document;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-
-import java.io.*;
-
-/**
- * @author Andrew Rambaut
- * @author Alexei Drummond
- * @version $Id: BeautiFrame.java,v 1.22 2006/09/09 16:07:06 rambaut Exp $
- */
-public class CommandLineBeauti {
-    private final BeautiOptions options = new BeautiOptions();
-
-    public CommandLineBeauti(String inputFileName, String templateFileName, String outputFileName) {
-
-        try {
-            if (!importFromFile(new File(inputFileName))) {
-                return;
-            }
-        } catch (FileNotFoundException fnfe) {
-            System.err.println("Error: Input file not found");
-            return;
-        } catch (IOException ioe) {
-            System.err.println("Error reading input file: " + ioe.getMessage());
-            return;
-        }
-
-        try {
-            if (!readFromFile(new File(templateFileName))) {
-                return;
-            }
-        } catch (FileNotFoundException fnfe) {
-            System.err.println("Error: Template file not found");
-            return;
-        } catch (IOException ioe) {
-            System.err.println("Error reading template file: " + ioe.getMessage());
-            return;
-        }
-
-        //options.guessDates();
-
-        try {
-            BeastGenerator generator = new BeastGenerator(options, null);
-            generator.generateXML(new File(outputFileName));
-
-        } catch (Exception ioe) {
-            System.err.println("Unable to generate file: " + ioe.getMessage());
-        }
-    }
-
-    private boolean readFromFile(File file) throws IOException {
-        try {
-            SAXBuilder parser = new SAXBuilder();
-            Document doc = parser.build(file);
-            options.beautiTemplate.parse(doc);
-
-        } catch (dr.xml.XMLParseException xpe) {
-            System.err.println("Error reading file: This may not be a BEAUti Template file");
-            System.err.println(xpe.getMessage());
-            return false;
-        } catch (JDOMException e) {
-            System.err.println("Unable to open file: This may not be a BEAUti Template file");
-            System.err.println(e.getMessage());
-            return false;
-        }
-        return true;
-    }
-
-    private boolean importFromFile(File file) throws IOException {
-
-        Alignment alignment = null;
-        Tree tree = null;
-        TaxonList taxa = null;
-
-        try {
-            FileReader reader = new FileReader(file);
-
-            NexusApplicationImporter importer = new NexusApplicationImporter(reader);
-
-            boolean done = false;
-
-            while (!done) {
-                try {
-
-                    NexusImporter.NexusBlock block = importer.findNextBlock();
-
-                    if (block == NexusImporter.TAXA_BLOCK) {
-
-                        if (taxa != null) {
-                            throw new NexusImporter.MissingBlockException("TAXA block already defined");
-                        }
-
-                        taxa = importer.parseTaxaBlock();
-
-                    } else if (block == NexusImporter.CALIBRATION_BLOCK) {
-                        if (taxa == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA or DATA block must be defined before a CALIBRATION block");
-                        }
-
-                        importer.parseCalibrationBlock(options.taxonList);
-
-                    } else if (block == NexusImporter.CHARACTERS_BLOCK) {
-
-                        if (taxa == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA block must be defined before a CHARACTERS block");
-                        }
-
-                        if (alignment != null) {
-                            throw new NexusImporter.MissingBlockException("CHARACTERS or DATA block already defined");
-                        }
-
-                        alignment = importer.parseCharactersBlock(options.taxonList);
-
-                    } else if (block == NexusImporter.DATA_BLOCK) {
-
-                        if (alignment != null) {
-                            throw new NexusImporter.MissingBlockException("CHARACTERS or DATA block already defined");
-                        }
-
-                        // A data block doesn't need a taxon block before it
-                        // but if one exists then it will use it.
-                        alignment = importer.parseDataBlock(options.taxonList);
-                        if (taxa == null) {
-                            taxa = alignment;
-                        }
-
-                    } else if (block == NexusImporter.TREES_BLOCK) {
-
-                        if (taxa == null) {
-                            throw new NexusImporter.MissingBlockException("TAXA or DATA block must be defined before a TREES block");
-                        }
-
-                        if (tree != null) {
-                            throw new NexusImporter.MissingBlockException("TREES block already defined");
-                        }
-
-                        Tree[] trees = importer.parseTreesBlock(taxa);
-                        if (trees.length > 0) {
-                            tree = trees[0];
-                        }
-
-/*					} else if (block == NexusApplicationImporter.PAUP_BLOCK) {
-
-						importer.parsePAUPBlock(options);
-
-					} else if (block == NexusApplicationImporter.MRBAYES_BLOCK) {
-
-						importer.parseMrBayesBlock(options);
-
-					} else if (block == NexusApplicationImporter.RHINO_BLOCK) {
-
-						importer.parseRhinoBlock(options);
-*/
-                    } else {
-                        // Ignore the block..
-                    }
-
-                } catch (EOFException ex) {
-                    done = true;
-                }
-            }
-
-            if (alignment == null && taxa == null) {
-                throw new NexusImporter.MissingBlockException("TAXON, DATA or CHARACTERS block is missing");
-            }
-
-        } catch (Importer.ImportException ime) {
-            System.err.println("Error parsing imported file: " + ime);
-            return false;
-        } catch (IOException ioex) {
-            System.err.println("File I/O Error: " + ioex);
-            return false;
-        } catch (Exception ex) {
-            System.err.println("Fatal exception: " + ex);
-            return false;
-        }
-
-        if (options.taxonList == null) {
-            // This is the first partition to be loaded...
-
-            options.taxonList = new Taxa(taxa);
-
-            // check the taxon names for invalid characters
-            boolean foundAmp = false;
-            for (int i = 0; i < taxa.getTaxonCount(); i++) {
-                String name = taxa.getTaxon(i).getId();
-                if (name.indexOf('&') >= 0) {
-                    foundAmp = true;
-                }
-            }
-            if (foundAmp) {
-                System.err.println("One or more taxon names include an illegal character ('&').\n" +
-                        "These characters will prevent BEAST from reading the resulting XML file.\n\n" +
-                        "Please edit the taxon name(s) before reloading the data file.");
-                return false;
-            }
-
-            // make sure they all have dates...
-            for (int i = 0; i < taxa.getTaxonCount(); i++) {
-                if (taxa.getTaxonAttribute(i, "date") == null) {
-                    java.util.Date origin = new java.util.Date(0);
-
-                    dr.evolution.util.Date date = dr.evolution.util.Date.createTimeSinceOrigin(0.0, Units.Type.YEARS, origin);
-                    taxa.getTaxon(i).setAttribute("date", date);
-                }
-            }
-
-            options.fileNameStem = dr.app.util.Utils.trimExtensions(file.getName(),
-                    new String[]{"nex", "NEX", "tre", "TRE", "nexus", "NEXUS"});
-
-            if (alignment != null) {
-                PartitionData partition = new PartitionData(options, options.fileNameStem, file.getName(), alignment);
-                options.dataPartitions.add(partition);
-//                options.dataType = alignment.getDataType();
-
-//                Patterns patterns = new Patterns(alignment);
-//                DistanceMatrix distances = new JukesCantorDistanceMatrix(patterns);
-//                options.meanDistance = distances.getMeanDistance();
-
-            } else {
-//                options.meanDistance = 0.0;
-            }
-        } else {
-            // This is an additional partition so check it uses the same taxa
-        }
-
-        return true;
-    }
-
-}
diff --git a/src/dr/app/beauti/util/XMLWriter.java b/src/dr/app/beauti/util/XMLWriter.java
index ee9bc56..c5ce0a6 100644
--- a/src/dr/app/beauti/util/XMLWriter.java
+++ b/src/dr/app/beauti/util/XMLWriter.java
@@ -93,11 +93,13 @@ public class XMLWriter extends java.io.PrintWriter {
         StringBuffer buffer = new StringBuffer("<");
         buffer.append(tagname);
         for (Attribute attribute : attributes) {
-            buffer.append(' ');
-            buffer.append(attribute.getAttributeName());
-            buffer.append("=\"");
-            buffer.append(attribute.getAttributeValue());
-            buffer.append("\"");
+            if (attribute != null) {
+                buffer.append(' ');
+                buffer.append(attribute.getAttributeName());
+                buffer.append("=\"");
+                buffer.append(attribute.getAttributeValue());
+                buffer.append("\"");
+            }
         }
         if (close) {
             buffer.append("/");
@@ -113,11 +115,13 @@ public class XMLWriter extends java.io.PrintWriter {
         StringBuffer buffer = new StringBuffer("<");
         buffer.append(tagname);
         for (Attribute attribute : attributes) {
-            buffer.append(' ');
-            buffer.append(attribute.getAttributeName());
-            buffer.append("=\"");
-            buffer.append(attribute.getAttributeValue());
-            buffer.append("\"");
+            if (attribute != null) {
+                buffer.append(' ');
+                buffer.append(attribute.getAttributeName());
+                buffer.append("=\"");
+                buffer.append(attribute.getAttributeValue());
+                buffer.append("\"");
+            }
         }
         if (content != null) {
             buffer.append(">");
diff --git a/src/dr/app/bss/test/BeagleSeqSimTest.java b/src/dr/app/bss/test/BeagleSeqSimTest.java
index 1b4b9a4..0831c6a 100644
--- a/src/dr/app/bss/test/BeagleSeqSimTest.java
+++ b/src/dr/app/bss/test/BeagleSeqSimTest.java
@@ -706,7 +706,7 @@ public class BeagleSeqSimTest {
 						branchRateModel, //
 						null, //
 						false, //
-						PartialsRescalingScheme.DEFAULT);
+						PartialsRescalingScheme.DEFAULT, true);
 
 				System.out.println("likelihood = " + nbtl.getLogLikelihood());
 
diff --git a/src/dr/app/tempest/TempEstApp.java b/src/dr/app/tempest/TempEstApp.java
index 50fa98b..4073c6d 100644
--- a/src/dr/app/tempest/TempEstApp.java
+++ b/src/dr/app/tempest/TempEstApp.java
@@ -49,7 +49,7 @@ public class TempEstApp extends MultiDocApplication {
         }
 
         public String getDateString() {
-            return "2003-2015";
+            return "2003-2016";
         }
 
         public String getBuildString() {
@@ -65,6 +65,9 @@ public class TempEstApp extends MultiDocApplication {
                     "Andrew Rambaut</p>" +
                     "<p>Institute of Evolutionary Biology, University of Edinburgh<br>" +
                     "<a href=\"mailto:a.rambaut at ed.ac.uk\">a.rambaut at ed.ac.uk</a></p>" +
+                    "<p>Citation<br>" +
+                    "<a href=\"http://dx.doi.org/10.1093/ve/vew007\">Rambaut, Lam, de Carvalho & Pybus (2016) Exploring the temporal structure of<br>" +
+                    "heterochronous sequences using TempEst. <i>Virus Evolution</i> <b>2</b>: vew007</a></p>" +
                     "<p>Part of the BEAST package:<br>" +
                     "<a href=\"http://beast.bio.ed.ac.uk/\">http://beast.bio.ed.ac.uk/</a></p>";
         }
diff --git a/src/dr/app/tempest/TempestFrame.java b/src/dr/app/tempest/TempestFrame.java
index 1de67f2..cf5b7a4 100644
--- a/src/dr/app/tempest/TempestFrame.java
+++ b/src/dr/app/tempest/TempestFrame.java
@@ -52,7 +52,6 @@ public class TempestFrame extends DocumentFrame {
 
     private static final long serialVersionUID = 2114148696789612509L;
 
-    private JTabbedPane tabbedPane = new JTabbedPane();
     private JLabel statusLabel = new JLabel("No data loaded");
 
     private TempestPanel tempestPanel;
@@ -254,10 +253,8 @@ public class TempestFrame extends DocumentFrame {
 //        }
     }
 
-//    protected void doExportGraphic() {
-//        ExportDialog export = new ExportDialog();
-//        export.showExportDialog( this, "Export view as ...", treeViewer.getContentPane(), "export" );
-//    }
+    protected void doExportGraphic() {
+    }
 
     protected void doExportData() {
         FileDialog dialog = new FileDialog(this,
@@ -302,7 +299,7 @@ public class TempestFrame extends DocumentFrame {
     public JComponent getExportableComponent() {
 
         JComponent exportable = null;
-        Component comp = tabbedPane.getSelectedComponent();
+        Component comp = tempestPanel.getExportableComponent();
 
         if (comp instanceof Exportable) {
             exportable = ((Exportable) comp).getExportableComponent();
@@ -345,11 +342,11 @@ public class TempestFrame extends DocumentFrame {
         }
     };
 
-//    protected AbstractAction exportGraphicAction = new AbstractAction("Export Graphic...") {
-//        public void actionPerformed(ActionEvent ae) {
-//            doExportGraphic();
-//        }
-//    };
+    protected AbstractAction exportGraphicAction = new AbstractAction("Export Graphic...") {
+        public void actionPerformed(ActionEvent ae) {
+            doExportGraphic();
+        }
+    };
 
     protected AbstractAction exportDataAction = new AbstractAction("Export Data...") {
         public void actionPerformed(ActionEvent ae) {
diff --git a/src/dr/app/tools/AncestralSequenceAnnotator.java b/src/dr/app/tools/AncestralSequenceAnnotator.java
index 9d85cfd..09cd954 100644
--- a/src/dr/app/tools/AncestralSequenceAnnotator.java
+++ b/src/dr/app/tools/AncestralSequenceAnnotator.java
@@ -921,6 +921,7 @@ public class AncestralSequenceAnnotator {
                 null,
                 false,
                 PartialsRescalingScheme.DEFAULT,
+                true,
                 null,
                 alignment.getDataType(),
                 TAG,
diff --git a/src/dr/app/tools/TreeAnnotatorDialog.java b/src/dr/app/tools/TreeAnnotatorDialog.java
index f936ea3..5097dcb 100644
--- a/src/dr/app/tools/TreeAnnotatorDialog.java
+++ b/src/dr/app/tools/TreeAnnotatorDialog.java
@@ -161,7 +161,7 @@ public class TreeAnnotatorDialog {
 
         summaryTreeCombo.addItemListener(new ItemListener() {
             public void itemStateChanged(ItemEvent itemEvent) {
-                boolean selected = summaryTreeCombo.getSelectedItem().equals("User target tree");
+                boolean selected = summaryTreeCombo.getSelectedItem().equals(TreeAnnotator.Target.USER_TARGET_TREE);
                 label1.setEnabled(selected);
                 targetFileNameText.setEnabled(selected);
                 targetFileButton.setEnabled(selected);
diff --git a/src/dr/evolution/alignment/AscertainedSitePatterns.java b/src/dr/evolution/alignment/AscertainedSitePatterns.java
index 4138253..b67190b 100644
--- a/src/dr/evolution/alignment/AscertainedSitePatterns.java
+++ b/src/dr/evolution/alignment/AscertainedSitePatterns.java
@@ -30,8 +30,7 @@ import dr.util.Citable;
 import dr.util.Citation;
 import dr.util.CommonCitations;
 
-import java.util.List;
-import java.util.ArrayList;
+import java.util.*;
 
 /**
  * Package: AscertainedSitePatterns
@@ -112,13 +111,21 @@ public class AscertainedSitePatterns extends SitePatterns implements Citable {
 
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.DATA_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Ascertained Site Patterns";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
+        return Collections.singletonList(
                 CommonCitations.ALEKSEYENKO_2008
         );
-        return citations;
-    }    
+    }
 
     public int getIncludePatternCount() {
         return ascertainmentIncludeCount;
diff --git a/src/dr/evolution/alignment/UncertainSiteList.java b/src/dr/evolution/alignment/UncertainSiteList.java
new file mode 100644
index 0000000..92bb310
--- /dev/null
+++ b/src/dr/evolution/alignment/UncertainSiteList.java
@@ -0,0 +1,101 @@
+/*
+ * UncertainSiteList.java
+ *
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evolution.alignment;
+
+import dr.evolution.datatype.DataType;
+import dr.evolution.util.TaxonList;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.*;
+
+/**
+ * Created by msuchard on 5/19/16.
+ */
+public class UncertainSiteList extends SimpleSiteList implements Citable {
+
+    public UncertainSiteList(DataType dataType, TaxonList taxonList) {
+        super(dataType, taxonList);
+    }
+
+    public int addPattern(int[] pattern) {
+        throw new IllegalArgumentException("Do not call directly");
+    }
+
+    public void addPattern(double[][] uncertainPattern) {
+        uncertainSitePatterns.add(uncertainPattern);
+
+        int[] map = new int[uncertainPattern.length];
+        for (int i = 0; i < uncertainPattern.length; ++i) {
+            map[i] = getMostProbable(uncertainPattern[i]);
+        }
+        super.addPattern(map);
+    }
+
+    private static int getMostProbable(double[] probabilities) {
+        int map = 0;
+        double maxProb = probabilities[0];
+
+        for (int i = 1; i < probabilities.length; ++i) {
+            if (probabilities[i] > maxProb) {
+                maxProb = probabilities[i];
+                map = i;
+            }
+        }
+        return map;
+    }
+
+    public void fillPartials(final int sequenceIndex, final int site, double[] partials, final int offset) {
+        double[][] sitePatterns = uncertainSitePatterns.get(site);
+        System.arraycopy(sitePatterns[sequenceIndex], 0, partials, offset, getDataType().getStateCount());
+    }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.DATA_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Uncertain site list";
+    }
+
+    public List<Citation> getCitations() {
+        return Arrays.asList(
+                new Citation(
+                        new Author[]{
+                                new Author("MA", "Suchard"),
+                                new Author("P", "Lemey"),
+                                new Author("M", "Scotch"),
+
+                        },
+                        Citation.Status.IN_PREPARATION
+                ));
+    }
+
+    private List<double[][]> uncertainSitePatterns = new ArrayList<double[][]>();
+}
diff --git a/src/dr/evolution/coalescent/MultiEpochExponential.java b/src/dr/evolution/coalescent/MultiEpochExponential.java
index 92c3fe4..dbb9332 100644
--- a/src/dr/evolution/coalescent/MultiEpochExponential.java
+++ b/src/dr/evolution/coalescent/MultiEpochExponential.java
@@ -25,6 +25,8 @@
 
 package dr.evolution.coalescent;
 
+import dr.math.matrixAlgebra.Vector;
+
 /**
  * This class models a multi-phase exponential growth
  *
@@ -71,11 +73,16 @@ public class MultiEpochExponential extends ConstantPopulation {
     }
 
     private double integrateConstant(double start, double finish, double logDemographic) {
-        return (finish - start) / Math.exp(logDemographic);
+        double integral =  (finish - start) / Math.exp(logDemographic);
+        return integral;
     }
 
     private double integrateExponential(double start, double finish, double logDemographic, double rate) {
-        return (Math.exp(finish * rate) - Math.exp(start * rate)) / Math.exp(logDemographic) / rate;
+        double integral = (Math.exp(finish * rate) - Math.exp(start * rate)) / Math.exp(logDemographic) / rate;
+//        System.err.println("\tint: " + integral + " " + start + " " + finish + " " + logDemographic + " " + rate);
+//        System.err.println("\t\t" + Math.exp(finish * rate) + " - " + Math.exp(start * rate));
+//        System.err.println("\t\t" + Math.exp(finish * rate - logDemographic - Math.log(rate)) + " - " + Math.exp(start * rate - logDemographic - Math.log(rate)));
+        return integral;
     }
 
     public double getAnalyticIntegral(double start, double finish) {
@@ -132,6 +139,11 @@ public class MultiEpochExponential extends ConstantPopulation {
 //        System.err.println("final incr = " + incr + " for " + start + " -> " + finish + " or " +
 //                (start - lastTransitionTime) + " -> " + (finish - lastTransitionTime) + " @ " + rate[currentEpoch] + " & " + Math.exp(logDemographic));
 
+
+        if (Double.isNaN(integral) || Double.isInfinite(integral)) {
+            System.err.println(integral + " " + start + " " + finish + new Vector(rate) + "\n");
+        }
+
         return integral / getN0();
     }
 
diff --git a/src/dr/evomodel/MSSD/CTMCScalePrior.java b/src/dr/evomodel/MSSD/CTMCScalePrior.java
index 7628106..115b063 100644
--- a/src/dr/evomodel/MSSD/CTMCScalePrior.java
+++ b/src/dr/evomodel/MSSD/CTMCScalePrior.java
@@ -33,6 +33,13 @@ import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.math.GammaFunction;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.logging.Logger;
 
 /**
  * @author Alexander V. Alekseyenko (alexander.alekseyenko at gmail.com)
@@ -41,7 +48,7 @@ import dr.math.GammaFunction;
  *         Date: Aug 22, 2008
  *         Time: 3:26:57 PM
  */
-public class CTMCScalePrior extends AbstractModelLikelihood {
+public class CTMCScalePrior extends AbstractModelLikelihood implements Citable {
     final private Parameter ctmcScale;
     final private TreeModel treeModel;
     private double treeLength;
@@ -165,4 +172,32 @@ public class CTMCScalePrior extends AbstractModelLikelihood {
     public void makeDirty() {
         treeLengthKnown = false;
     }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.PRIOR_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "CTMC Scale Reference Prior model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("MAR", "Ferreira"),
+                    new Author("MA", "Suchard")
+            },
+            "Bayesian analysis of elapsed times in continuous-time Markov chains",
+            2008,
+            "Canadian Journal of Statistics",
+            36,
+            355, 368,
+            Citation.Status.PUBLISHED
+    );
 }
diff --git a/src/dr/evomodel/antigenic/AntigenicDriftPrior.java b/src/dr/evomodel/antigenic/AntigenicDriftPrior.java
index 95283e0..2c3d02a 100644
--- a/src/dr/evomodel/antigenic/AntigenicDriftPrior.java
+++ b/src/dr/evomodel/antigenic/AntigenicDriftPrior.java
@@ -29,10 +29,10 @@ import dr.inference.model.*;
 import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
+import dr.util.CommonCitations;
 import dr.xml.*;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 
 /**
  * @author Andrew Rambaut
@@ -82,7 +82,7 @@ public class AntigenicDriftPrior extends AbstractModelLikelihood implements Cita
     @Override
     protected void handleVariableChangedEvent(Variable variable, int index, Variable.ChangeType type) {
         if (variable == locationsParameter || variable == offsetsParameter
-            || variable == regressionSlopeParameter || variable == regressionPrecisionParameter) {
+                || variable == regressionSlopeParameter || variable == regressionPrecisionParameter) {
             likelihoodKnown = false;
         }
     }
@@ -129,7 +129,7 @@ public class AntigenicDriftPrior extends AbstractModelLikelihood implements Cita
 
         double ssr = 0.0;
 
-        for (int i=0; i < count; i++) {
+        for (int i = 0; i < count; i++) {
 
             Parameter loc = locationsParameter.getParameter(i);
             double offset = offsetsParameter.getParameterValue(i);
@@ -140,9 +140,9 @@ public class AntigenicDriftPrior extends AbstractModelLikelihood implements Cita
 
             ssr += (x - y) * (x - y);
 
-            for (int j=1; j < dimension; j++) {
+            for (int j = 1; j < dimension; j++) {
                 x = loc.getParameterValue(j);
-                ssr += x*x;
+                ssr += x * x;
             }
 
         }
@@ -189,10 +189,10 @@ public class AntigenicDriftPrior extends AbstractModelLikelihood implements Cita
             Parameter regressionPrecisionParameter = (Parameter) xo.getElementFirstChild(REGRESSION_PRECISION);
 
             AntigenicDriftPrior AGDP = new AntigenicDriftPrior(
-                locationsParameter,
-                offsetsParameter,
-                regressionSlopeParameter,
-                regressionPrecisionParameter);
+                    locationsParameter,
+                    offsetsParameter,
+                    regressionSlopeParameter,
+                    regressionPrecisionParameter);
 
             return AGDP;
         }
@@ -221,20 +221,17 @@ public class AntigenicDriftPrior extends AbstractModelLikelihood implements Cita
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Bayesian Antigenic Cartography framework";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("T", "Bedford"),
-                        new Author("MA", "Suchard"),
-                        new Author("P", "Lemey"),
-                        new Author("G", "Dudas"),
-                        new Author("C", "Russell"),
-                        new Author("D", "Smith"),
-                        new Author("A", "Rambaut")
-                },
-                Citation.Status.IN_PREPARATION
-        ));
-        return citations;
+        return Arrays.asList(CommonCitations.BEDFORD_2015_INTEGRATING);
     }
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/antigenic/AntigenicLikelihood.java b/src/dr/evomodel/antigenic/AntigenicLikelihood.java
index e252f41..ed84865 100644
--- a/src/dr/evomodel/antigenic/AntigenicLikelihood.java
+++ b/src/dr/evomodel/antigenic/AntigenicLikelihood.java
@@ -891,26 +891,18 @@ public class AntigenicLikelihood extends AbstractModelLikelihood implements Cita
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Bayesian Antigenic Cartography framework";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("T", "Bedford"),
-                        new Author("MA", "Suchard"),
-                        new Author("P", "Lemey"),
-                        new Author("G", "Dudas"),
-                        new Author("V", "Gregory"),
-                        new Author("AJ", "Hay"),
-                        new Author("JW", "McCauley"),
-                        new Author("CA", "Russell"),
-                        new Author("DJ", "Smith"),
-                        new Author("A", "Rambaut")
-                },
-                "Integrating influenza antigenic dynamics with molecular evolution",
-                "eLife",
-                Citation.Status.ACCEPTED
-        ));
-        return citations;
+        return Arrays.asList(CommonCitations.BEDFORD_2015_INTEGRATING);
     }
 
     public static void main(String[] args) {
diff --git a/src/dr/evomodel/antigenic/AntigenicSplitPrior.java b/src/dr/evomodel/antigenic/AntigenicSplitPrior.java
index 138e505..c996f22 100644
--- a/src/dr/evomodel/antigenic/AntigenicSplitPrior.java
+++ b/src/dr/evomodel/antigenic/AntigenicSplitPrior.java
@@ -29,11 +29,10 @@ import dr.inference.model.*;
 import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
+import dr.util.CommonCitations;
 import dr.xml.*;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * @author Andrew Rambaut
@@ -409,20 +408,17 @@ public class AntigenicSplitPrior extends AbstractModelLikelihood implements Cita
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Bayesian Antigenic Cartography framework";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("T", "Bedford"),
-                        new Author("MA", "Suchard"),
-                        new Author("P", "Lemey"),
-                        new Author("G", "Dudas"),
-                        new Author("C", "Russell"),
-                        new Author("D", "Smith"),
-                        new Author("A", "Rambaut")
-                },
-                Citation.Status.IN_PREPARATION
-        ));
-        return citations;
+        return Arrays.asList(CommonCitations.BEDFORD_2015_INTEGRATING);
     }
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/antigenic/ContinuousAntigenicTraitLikelihood.java b/src/dr/evomodel/antigenic/ContinuousAntigenicTraitLikelihood.java
index 9ba9301..5147cc3 100644
--- a/src/dr/evomodel/antigenic/ContinuousAntigenicTraitLikelihood.java
+++ b/src/dr/evomodel/antigenic/ContinuousAntigenicTraitLikelihood.java
@@ -521,19 +521,17 @@ public class ContinuousAntigenicTraitLikelihood extends AntigenicTraitLikelihood
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Bayesian Antigenic Cartography framework";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("A", "Rambaut"),
-                        new Author("T", "Bedford"),
-                        new Author("P", "Lemey"),
-                        new Author("C", "Russell"),
-                        new Author("D", "Smith"),
-                        new Author("MA", "Suchard"),
-                },
-                Citation.Status.IN_PREPARATION
-        ));
-        return citations;
+        return Arrays.asList(CommonCitations.BEDFORD_2015_INTEGRATING);
     }
 }
diff --git a/src/dr/evomodel/antigenic/DiscreteAntigenicTraitLikelihood.java b/src/dr/evomodel/antigenic/DiscreteAntigenicTraitLikelihood.java
index 84f97ec..99d689d 100644
--- a/src/dr/evomodel/antigenic/DiscreteAntigenicTraitLikelihood.java
+++ b/src/dr/evomodel/antigenic/DiscreteAntigenicTraitLikelihood.java
@@ -769,19 +769,17 @@ public class DiscreteAntigenicTraitLikelihood extends AntigenicTraitLikelihood i
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Bayesian Antigenic Cartography framework";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("A", "Rambaut"),
-                        new Author("T", "Bedford"),
-                        new Author("P", "Lemey"),
-                        new Author("C", "Russell"),
-                        new Author("D", "Smith"),
-                        new Author("MA", "Suchard"),
-                },
-                Citation.Status.IN_PREPARATION
-        ));
-        return citations;
+        return Arrays.asList(CommonCitations.BEDFORD_2015_INTEGRATING);
     }
 }
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodCluster.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodCluster.java
index cbaec30..f738720 100644
--- a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodCluster.java
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodCluster.java
@@ -7,6 +7,7 @@ import dr.math.LogTricks;
 import dr.math.distributions.NormalDistribution;
 import dr.util.*;
 import dr.xml.*;
+import mpi.Comm;
 
 import java.io.*;
 import java.util.*;
@@ -40,7 +41,7 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
     private static final int SERUM_STRAIN = 4;
     private static final int SERUM_DATE = 5;
     private static final int TITRE = 6;
-    
+
 
 
     public enum MeasurementType {
@@ -67,7 +68,7 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
             DataTable<String[]> dataTable,
             boolean mergeSerumIsolates,
             double intervalWidth,
-            double driftInitialLocations, 
+            double driftInitialLocations,
             boolean clusterMeans,
             Parameter clusterOffsetsParameter) {
 
@@ -77,7 +78,7 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
         boolean useIntervals = USE_INTERVALS && intervalWidth > 0.0;
 
         int thresholdCount = 0;
-             
+
         double earliestDate = Double.POSITIVE_INFINITY;
         for (int i = 0; i < dataTable.getRowCount(); i++) {
 
@@ -105,7 +106,7 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
                 serumDates.add(serumDate);
                 serum = serumNames.size() - 1;
             }
-            
+
             boolean isThreshold = false;
             boolean isLowerThreshold = false;
             double rawTitre = Double.NaN;
@@ -121,11 +122,11 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
                         thresholdCount++;
                     }
                     // check if threshold above
-                    if (values[TITRE].contains(">")) {                	
+                    if (values[TITRE].contains(">")) {
                         rawTitre = Double.parseDouble(values[TITRE].replace(">",""));
                         isThreshold = true;
                         isLowerThreshold = false;
-                        thresholdCount++;                    	
+                        thresholdCount++;
                         //throw new IllegalArgumentException("Error in measurement: unsupported greater than threshold at row " + (i+1));
                     }
                 }
@@ -228,48 +229,48 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
         logLikelihoods = new double[measurements.size()];
         storedLogLikelihoods = new double[measurements.size()];
 
-       // driftInitialLocations = 1; //charles added - now specified in the xml
+        // driftInitialLocations = 1; //charles added - now specified in the xml
         setupInitialLocations(driftInitialLocations);
-   //     loadInitialLocations(virusNames, serumNames);
-        
+        //     loadInitialLocations(virusNames, serumNames);
+
         //System.out.println("Print now!");
-		//      for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {    	  
-		 //   	 System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(0) + " ");
-		  //  	 System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(1) + " ");  	  
-		   //   }
-		   //   System.out.println("");
-     
-
-		        if(clusterMeans){
-		        	this.clusterMeans = clusterMeans;
-		        	this.clusterOffsetsParameter = clusterOffsetsParameter;
-		        	
-		        	
-		        	//if(clusterOffsetsParameter != null){
-		        	//System.out.println("virusNames.size()="+ virusNames.size());
-		        	//clusterOffsetsParameter.setDimension( virusNames.size());  
-		        //    for (int i = 0; i < virusNames.size(); i++) {
-		           // 	clusterOffsetsParameter.setId(virusNames.get(i));
-		           // }
-		            //addVariable(clusterOffsetsParameter);
-		        	//}
-		        	
-		        	//stay null
-		           if (clusterOffsetsParameter == null) {
-		            //	clusterOffsetsParameter = new Parameter.Default("clusterOffsets");
-		            } else {
-		            	//clusterOffsetsParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1000));
-		                addVariable(clusterOffsetsParameter);
-			            clusterOffsetsParameter.setDimension(virusNames.size());
-
-		            }
-		        	
-		        	
-		        	System.out.println(" clusterMeans = true");
-		        	//System.exit(0);
-		        }
-
-		      
+        //      for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {
+        //   	 System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(0) + " ");
+        //  	 System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(1) + " ");
+        //   }
+        //   System.out.println("");
+
+
+        if(clusterMeans){
+            this.clusterMeans = clusterMeans;
+            this.clusterOffsetsParameter = clusterOffsetsParameter;
+
+
+            //if(clusterOffsetsParameter != null){
+            //System.out.println("virusNames.size()="+ virusNames.size());
+            //clusterOffsetsParameter.setDimension( virusNames.size());
+            //    for (int i = 0; i < virusNames.size(); i++) {
+            // 	clusterOffsetsParameter.setId(virusNames.get(i));
+            // }
+            //addVariable(clusterOffsetsParameter);
+            //}
+
+            //stay null
+            if (clusterOffsetsParameter == null) {
+                //	clusterOffsetsParameter = new Parameter.Default("clusterOffsets");
+            } else {
+                //clusterOffsetsParameter.addBounds(new Parameter.DefaultBounds(Double.MAX_VALUE, 0.0, 1000));
+                addVariable(clusterOffsetsParameter);
+                clusterOffsetsParameter.setDimension(virusNames.size());
+
+            }
+
+
+            System.out.println(" clusterMeans = true");
+            //System.exit(0);
+        }
+
+
         makeDirty();
     }
 
@@ -397,21 +398,21 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
     }
 
     private void setupInitialLocations(double drift) {
-    	//System.out.println("hihi");
+        //System.out.println("hihi");
         for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {
             double offset = 0.0;
             if (virusOffsetsParameter != null) {
-            	//System.out.print("virus Offset Parameter present"+ ": ");
-            	//System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
-            	//System.out.print(" drift= " + drift + " ");
+                //System.out.print("virus Offset Parameter present"+ ": ");
+                //System.out.print( virusOffsetsParameter.getParameterValue(i) + " ");
+                //System.out.print(" drift= " + drift + " ");
                 offset = drift * virusOffsetsParameter.getParameterValue(i);
             }
             else{
-            	System.out.println("virus Offeset Parameter NOT present");
+                System.out.println("virus Offeset Parameter NOT present");
             }
             double r = MathUtils.nextGaussian() + offset;
             virusLocationsParameter.getParameter(i).setParameterValue(0, r);
-           // System.out.println (  virusLocationsParameter.getParameter(i).getParameterValue(0));
+            // System.out.println (  virusLocationsParameter.getParameter(i).getParameterValue(0));
             if (mdsDimension > 1) {
                 for (int j = 1; j < mdsDimension; j++) {
                     r = MathUtils.nextGaussian();
@@ -434,102 +435,102 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
             }
         }
     }
-    
+
     //load initial
     private void loadInitialLocations(List<String> strainNames, List<String> serumNames) {
 
-		FileReader fileReader;
-		try {
-			//fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2_mds.virusLocs.log");
-			fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/lastIteration/H3N2_mds.virusLocs.log");
-		     /**
-		       * Creating a buffered reader to read the file
-		       */
-		      BufferedReader bReader = new BufferedReader( fileReader);
-
-		      String line;
-
-		      
-		      //this routine may give false results if there are extra lines with spaces
-		      
-		      line = bReader.readLine();
-		      System.out.println(line);
-		      String namevalue[] = line.split("\t");
-
-		      
-		      line = bReader.readLine();
-		      System.out.println(line);
-		      
-		      String datavalue[] = line.split("\t");
-		          
-		      for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {
-		    	  
-		    	  int index = findStrain( namevalue[i*2+1], strainNames);  //note. namevalue actually has the extra 1 or 2attached to it.. but it doesn't seem to matter
-		    //	  System.out.println("name: " + virusLocationsParameter.getParameter(i).getParameterName() + " :" + index);
-		    	 // System.out.println(datavalue[i*2+1]);
-		    	  virusLocationsParameter.getParameter(index).setParameterValue(0, Double.parseDouble(datavalue[i*2+1]));
-		    	  virusLocationsParameter.getParameter(index).setParameterValue(1, Double.parseDouble(datavalue[i*2+2]));
-		          //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
-			    	// System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(0) + " ");
-			    	// System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(1) + " ");  	  
-
-		      }
-		      bReader.close();
-		
-		} catch (FileNotFoundException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}          
- 
-
-		FileReader fileReader2;
-		try {
-			//fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2.serumLocs.log");
-			fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/lastIteration/H3N2.serumLocs.log");
-			
-		     /**
-		       * Creating a buffered reader to read the file
-		       */
-		      BufferedReader bReader2 = new BufferedReader( fileReader2);
-
-		      String line;
-		      
-		      line = bReader2.readLine();
-		      System.out.println(line);
-		      String namevalue[] = line.split("\t");
-
-		      
-		      line = bReader2.readLine();
-		      System.out.println(line);
-		      
-		      String datavalue[] = line.split("\t");
-		       //   System.out.println(serumLocationsParameter.getParameterCount());
-		      for (int i = 0; i < serumLocationsParameter.getParameterCount(); i++) {
-		    	  int index = findStrain( namevalue[i*2+1], serumNames);
-
-		    	 // System.out.println(datavalue[i*2+1]);
-		    	  serumLocationsParameter.getParameter(index).setParameterValue(0, Double.parseDouble(datavalue[i*2+1]));
-		    	  serumLocationsParameter.getParameter(index).setParameterValue(1, Double.parseDouble(datavalue[i*2+2]));
-		          //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
-		   	  
-		      }
-		      bReader2.close();
-		
-		} catch (FileNotFoundException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}        
-      
-    	
+        FileReader fileReader;
+        try {
+            //fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2_mds.virusLocs.log");
+            fileReader = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/lastIteration/H3N2_mds.virusLocs.log");
+            /**
+             * Creating a buffered reader to read the file
+             */
+            BufferedReader bReader = new BufferedReader( fileReader);
+
+            String line;
+
+
+            //this routine may give false results if there are extra lines with spaces
+
+            line = bReader.readLine();
+            System.out.println(line);
+            String namevalue[] = line.split("\t");
+
+
+            line = bReader.readLine();
+            System.out.println(line);
+
+            String datavalue[] = line.split("\t");
+
+            for (int i = 0; i < virusLocationsParameter.getParameterCount(); i++) {
+
+                int index = findStrain( namevalue[i*2+1], strainNames);  //note. namevalue actually has the extra 1 or 2attached to it.. but it doesn't seem to matter
+                //	  System.out.println("name: " + virusLocationsParameter.getParameter(i).getParameterName() + " :" + index);
+                // System.out.println(datavalue[i*2+1]);
+                virusLocationsParameter.getParameter(index).setParameterValue(0, Double.parseDouble(datavalue[i*2+1]));
+                virusLocationsParameter.getParameter(index).setParameterValue(1, Double.parseDouble(datavalue[i*2+2]));
+                //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
+                // System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(0) + " ");
+                // System.out.print(virusLocationsParameter.getParameter(i).getParameterValue(1) + " ");
+
+            }
+            bReader.close();
+
+        } catch (FileNotFoundException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        } catch (IOException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+
+
+        FileReader fileReader2;
+        try {
+            //fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialCondition/H3N2.serumLocs.log");
+            fileReader2 = new FileReader("/Users/charles/Documents/research/antigenic/GenoPheno/Gabriela/results/initialConditionWithInitialLocationDrift/lastIteration/H3N2.serumLocs.log");
+
+            /**
+             * Creating a buffered reader to read the file
+             */
+            BufferedReader bReader2 = new BufferedReader( fileReader2);
+
+            String line;
+
+            line = bReader2.readLine();
+            System.out.println(line);
+            String namevalue[] = line.split("\t");
+
+
+            line = bReader2.readLine();
+            System.out.println(line);
+
+            String datavalue[] = line.split("\t");
+            //   System.out.println(serumLocationsParameter.getParameterCount());
+            for (int i = 0; i < serumLocationsParameter.getParameterCount(); i++) {
+                int index = findStrain( namevalue[i*2+1], serumNames);
+
+                // System.out.println(datavalue[i*2+1]);
+                serumLocationsParameter.getParameter(index).setParameterValue(0, Double.parseDouble(datavalue[i*2+1]));
+                serumLocationsParameter.getParameter(index).setParameterValue(1, Double.parseDouble(datavalue[i*2+2]));
+                //virusLocationsParameter.getParameter(i).setParameterValue(0, 1);
+
+            }
+            bReader2.close();
+
+        } catch (FileNotFoundException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        } catch (IOException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+
+
 
     }
-    
+
 
     @Override
     protected void handleModelChangedEvent(Model model, Object object, int index) {
@@ -554,9 +555,9 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
         } else if (variable == locationDriftParameter) {
             setLocationChangedFlags(true);
         } else if (variable == virusDriftParameter) {
-                setLocationChangedFlags(true);
+            setLocationChangedFlags(true);
         } else if (variable == serumDriftParameter) {
-                setLocationChangedFlags(true);
+            setLocationChangedFlags(true);
         } else if (variable == serumPotenciesParameter) {
             serumEffectChanged[index] = true;
         } else if (variable == serumBreadthsParameter) {
@@ -593,12 +594,12 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
     }
 
     public double getLogLikelihood() {
- //uncommenting for testing only
-    		
+        //uncommenting for testing only
+
         if (!likelihoodKnown) {
             logLikelihood = computeLogLikelihood();
         }
-        
+
 // logLikelihood=0;       //for testing purpose only
 //System.out.println("logLikelihood of AGLikelihoodCluster= " + logLikelihood);
         return logLikelihood;
@@ -606,7 +607,7 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
 
     // This function can be overwritten to implement other sampling densities, i.e. discrete ranks
     private double computeLogLikelihood() {
-    	    	
+
 
         double precision = mdsPrecisionParameter.getParameterValue(0);
         double sd = 1.0 / Math.sqrt(precision);
@@ -630,12 +631,12 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
                         logLikelihoods[i] = computeMeasurementLikelihood(measurement.log2Titre, expectation, sd);
                     } break;
                     case THRESHOLD: {
-                    	if(measurement.isLowerThreshold){
-                    		logLikelihoods[i] = computeMeasurementThresholdLikelihood(measurement.log2Titre, expectation, sd);
-                    	}
-                    	else{
-                    		logLikelihoods[i] = computeMeasurementUpperThresholdLikelihood(measurement.log2Titre, expectation, sd);                  		
-                    	}
+                        if(measurement.isLowerThreshold){
+                            logLikelihoods[i] = computeMeasurementThresholdLikelihood(measurement.log2Titre, expectation, sd);
+                        }
+                        else{
+                            logLikelihoods[i] = computeMeasurementUpperThresholdLikelihood(measurement.log2Titre, expectation, sd);
+                        }
                     } break;
                     case MISSING:
                         break;
@@ -684,45 +685,45 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
         // first dimension is shifted
         double vxOffset = 0.0;
         double sxOffset = 0.0;
-        if(clusterMeans == true){      	
-        	
-        	
-        	if(virusDriftParameter!= null && virusOffsetsParameter != null && serumOffsetsParameter != null && clusterOffsetsParameter!=null){
+        if(clusterMeans == true){
+
+
+            if(virusDriftParameter!= null && virusOffsetsParameter != null && serumOffsetsParameter != null && clusterOffsetsParameter!=null){
                 vxOffset = virusDriftParameter.getParameterValue(0)* clusterOffsetsParameter.getParameterValue(virus);
-        		sxOffset = virusDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+                sxOffset = virusDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
                 //vxOffset = locationDriftParameter.getParameterValue(0)*  ;               
-           //     System.out.println("clusterOffset =" + clusterOffsetsParameter.getParameterValue(virus));
-                 	//System.out.println("offset = " + vxOffset);
-                 
-        	}
-        	
-        	//overwrite serum drift
-	        if (serumDriftParameter != null && serumOffsetsParameter != null) {
-	        //	System.out.println("hihi ya");
-	            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
-	        }
-	        
+                //     System.out.println("clusterOffset =" + clusterOffsetsParameter.getParameterValue(virus));
+                //System.out.println("offset = " + vxOffset);
+
+            }
+
+            //overwrite serum drift
+            if (serumDriftParameter != null && serumOffsetsParameter != null) {
+                //	System.out.println("hihi ya");
+                sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+            }
+
         }
         else{
-	        if (locationDriftParameter != null && virusOffsetsParameter != null && serumOffsetsParameter != null) {
-	            vxOffset = locationDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+            if (locationDriftParameter != null && virusOffsetsParameter != null && serumOffsetsParameter != null) {
+                vxOffset = locationDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
                 sxOffset = locationDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
-	        }
-	        if (virusDriftParameter != null && virusOffsetsParameter != null) {
-	            vxOffset = virusDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
-	        }
-	        if (serumDriftParameter != null && serumOffsetsParameter != null) {
-	            sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
-	        }
+            }
+            if (virusDriftParameter != null && virusOffsetsParameter != null) {
+                vxOffset = virusDriftParameter.getParameterValue(0) * virusOffsetsParameter.getParameterValue(virus);
+            }
+            if (serumDriftParameter != null && serumOffsetsParameter != null) {
+                sxOffset = serumDriftParameter.getParameterValue(0) * serumOffsetsParameter.getParameterValue(serum);
+            }
         }
 
         double vxLoc = vLoc.getParameterValue(0) + vxOffset;
         double sxLoc = sLoc.getParameterValue(0) + sxOffset;
 
-       // if(virus ==1){
+        // if(virus ==1){
         //	System.out.println("virus " + virus + " has vxLoc of " + vxLoc + " = " + vLoc.getParameterValue(0) + "+" + vxOffset);
         //}
-        
+
         double difference = vxLoc - sxLoc;
         sum += difference * difference;
 
@@ -780,16 +781,16 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
 
         // real titre is somewhere between -infinity and measured 'titre'
         // want the lower tail of the normal CDF
-    	double L = NormalDistribution.cdf(titre, expectation, sd, false);          // returns  CDF
-    	double lnL = Math.log(1-L);  //get the upper tail probability, then log it
+        double L = NormalDistribution.cdf(titre, expectation, sd, false);          // returns  CDF
+        double lnL = Math.log(1-L);  //get the upper tail probability, then log it
 
         if (CHECK_INFINITE && Double.isNaN(lnL) || Double.isInfinite(lnL)) {
             throw new RuntimeException("infinite threshold measurement");
         }
         return lnL;
-    }    
-    
-    
+    }
+
+
     private static double computeMeasurementIntervalLikelihood(double minTitre, double maxTitre, double expectation, double sd) {
 
         // real titre is somewhere between measured minTitre and maxTitre
@@ -872,7 +873,7 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
     private final boolean[] virusEffectChanged;
     private double[] logLikelihoods;
     private double[] storedLogLikelihoods;
-    
+
     private boolean clusterMeans = false;
     private Parameter clusterOffsetsParameter;
 
@@ -918,7 +919,7 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
             System.out.println("Loaded HI table file: " + fileName);
 
             boolean mergeSerumIsolates = xo.getAttribute(MERGE_SERUM_ISOLATES, false);
-            
+
             boolean cluster_means = xo.getAttribute(CLUSTER_MEANS, false);
 
             int mdsDimension = xo.getIntegerAttribute(MDS_DIMENSION);
@@ -956,12 +957,12 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
 
             Parameter virusDrift = null;
             if (xo.hasChildNamed(VIRUS_DRIFT)) {
-            	virusDrift = (Parameter) xo.getElementFirstChild(VIRUS_DRIFT);
+                virusDrift = (Parameter) xo.getElementFirstChild(VIRUS_DRIFT);
             }
 
             Parameter serumDrift = null;
             if (xo.hasChildNamed(SERUM_DRIFT)) {
-            	serumDrift = (Parameter) xo.getElementFirstChild(SERUM_DRIFT);
+                serumDrift = (Parameter) xo.getElementFirstChild(SERUM_DRIFT);
             }
 
             Parameter virusOffsetsParameter = null;
@@ -988,10 +989,10 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
             if (xo.hasChildNamed(VIRUS_AVIDITIES)) {
                 virusAviditiesParameter = (Parameter) xo.getElementFirstChild(VIRUS_AVIDITIES);
             }
-            
+
             Parameter clusterOffsetsParameter = null;
             if (xo.hasChildNamed(CLUSTER_OFFSETS)) {
-            	clusterOffsetsParameter = (Parameter) xo.getElementFirstChild(CLUSTER_OFFSETS);
+                clusterOffsetsParameter = (Parameter) xo.getElementFirstChild(CLUSTER_OFFSETS);
             }
 
 
@@ -1012,10 +1013,10 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
                     assayTable,
                     mergeSerumIsolates,
                     intervalWidth,
-                    driftInitialLocations, 
-                    cluster_means, 
+                    driftInitialLocations,
+                    cluster_means,
                     clusterOffsetsParameter);
-                        
+
 
             Logger.getLogger("dr.evomodel").info("Using EvolutionaryCartography model. Please cite:\n" + Utils.getCitationString(AGL));
 
@@ -1054,7 +1055,7 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
                 new ElementRule(VIRUS_DRIFT, Parameter.class, "Optional parameter for drifting only virus locations, overrides locationDrift", true),
                 new ElementRule(SERUM_DRIFT, Parameter.class, "Optional parameter for drifting only serum locations, overrides locationDrift", true),
                 AttributeRule.newBooleanRule(CLUSTER_MEANS, true, "Should we use cluster means to control the virus locations"),
-               new ElementRule(CLUSTER_OFFSETS, Parameter.class, "Parameter of cluster offsets of all virus"),                
+                new ElementRule(CLUSTER_OFFSETS, Parameter.class, "Parameter of cluster offsets of all virus"),
         };
 
         public Class getReturnType() {
@@ -1062,27 +1063,27 @@ public class AGLikelihoodCluster extends AbstractModelLikelihood implements Cita
         }
     };
 
-    public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("T", "Bedford"),
-                        new Author("MA", "Suchard"),
-                        new Author("P", "Lemey"),
-                        new Author("G", "Dudas"),
-                        new Author("V", "Gregory"),
-                        new Author("AJ", "Hay"),
-                        new Author("JW", "McCauley"),
-                        new Author("CA", "Russell"),
-                        new Author("DJ", "Smith"),
-                        new Author("A", "Rambaut")
-                },
-                "Integrating influenza antigenic dynamics with molecular evolution",
-                "eLife",
-                Citation.Status.ACCEPTED
-        ));
-        return citations;
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
     }
 
+    @Override
+    public String getDescription() {
+        return "Bayesian Antigenic Cartography framework";
+    }
 
+    public List<Citation> getCitations() {
+        return Arrays.asList(new Citation(
+                        new Author[]{
+                                new Author("C", "Cheung"),
+                                new Author("A", "Rambaut"),
+                                new Author("P", "Lemey"),
+                                new Author("MA", "Suchard"),
+                                new Author("T", "Bedford")
+                        },
+                        Citation.Status.IN_PREPARATION
+                ),
+                CommonCitations.BEDFORD_2015_INTEGRATING);
+    }
 }
diff --git a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodTreeCluster.java b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodTreeCluster.java
index 423be5f..a6fcd74 100644
--- a/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodTreeCluster.java
+++ b/src/dr/evomodel/antigenic/phyloClustering/misc/obsolete/AGLikelihoodTreeCluster.java
@@ -1240,30 +1240,29 @@ public class AGLikelihoodTreeCluster extends AbstractModelLikelihood implements
         }
     };
 
-    public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("T", "Bedford"),
-                        new Author("MA", "Suchard"),
-                        new Author("P", "Lemey"),
-                        new Author("G", "Dudas"),
-                        new Author("V", "Gregory"),
-                        new Author("AJ", "Hay"),
-                        new Author("JW", "McCauley"),
-                        new Author("CA", "Russell"),
-                        new Author("DJ", "Smith"),
-                        new Author("A", "Rambaut")
-                },
-                "Integrating influenza antigenic dynamics with molecular evolution",
-                "eLife",
-                Citation.Status.ACCEPTED
-        ));
-        return citations;
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
     }
 
-    
-    
+    @Override
+    public String getDescription() {
+        return "Bayesian Antigenic Cartography framework";
+    }
+
+    public List<Citation> getCitations() {
+        return Arrays.asList(new Citation(
+                        new Author[]{
+                                new Author("C", "Cheung"),
+                                new Author("A", "Rambaut"),
+                                new Author("P", "Lemey"),
+                                new Author("MA", "Suchard"),
+                                new Author("T", "Bedford")
+                        },
+                        Citation.Status.IN_PREPARATION
+                ),
+                CommonCitations.BEDFORD_2015_INTEGRATING);
+    }
     
     public double getLogLikelihoodBasedOnPrecompute(int[] clusterLabel, int numClusters, int[] oldObservationCluster, double[] oldContribution, int[] newObservationCluster, double[] newContribution) {
 	   	
diff --git a/src/dr/evomodel/branchratemodel/CompoundBranchRateModel.java b/src/dr/evomodel/branchratemodel/CompoundBranchRateModel.java
index f63dce9..747a6e4 100644
--- a/src/dr/evomodel/branchratemodel/CompoundBranchRateModel.java
+++ b/src/dr/evomodel/branchratemodel/CompoundBranchRateModel.java
@@ -57,7 +57,11 @@ public class CompoundBranchRateModel extends AbstractBranchRateModel {
     }
 
     public void handleModelChangedEvent(Model model, Object object, int index) {
-        fireModelChanged();
+        if (index != -1) {
+            fireModelChanged(null, index);
+        } else {
+            fireModelChanged();
+        }
     }
 
     protected final void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
diff --git a/src/dr/evomodel/branchratemodel/CountableBranchCategoryProvider.java b/src/dr/evomodel/branchratemodel/CountableBranchCategoryProvider.java
index 57c6c7a..89e14d7 100644
--- a/src/dr/evomodel/branchratemodel/CountableBranchCategoryProvider.java
+++ b/src/dr/evomodel/branchratemodel/CountableBranchCategoryProvider.java
@@ -1,7 +1,7 @@
 /*
  * CountableBranchCategoryProvider.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -28,7 +28,6 @@ package dr.evomodel.branchratemodel;
 import dr.app.beagle.evomodel.treelikelihood.MarkovJumpsTraitProvider;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
-import dr.evolution.tree.TreeDoubleTraitProvider;
 import dr.evolution.tree.TreeTrait;
 import dr.evolution.util.TaxonList;
 import dr.evomodel.tree.TreeModel;
@@ -203,12 +202,7 @@ public interface CountableBranchCategoryProvider extends TreeTrait<Double> {
             super(tree, parameter);
         }
 
-//        public CladeBranchCategoryModel(TreeModel treeModel,
-//				Parameter categories, boolean resetCategories) {
-//        	super(treeModel, categories, resetCategories);
-//		}
-
-		public void handleModelChangedEvent(Model model, Object object, int index) {
+        public void handleModelChangedEvent(Model model, Object object, int index) {
             if (model == treeModel) {
                 cladesChanged = true;
                 fireModelChanged();
@@ -217,17 +211,39 @@ public interface CountableBranchCategoryProvider extends TreeTrait<Double> {
             }
         }
 
+        private void recurseDownClade(final NodeRef node, final TreeModel treeModel, final CladeContainer clade, boolean include) {
+
+            if (include) {
+                setNodeValue(treeModel, node, clade.getRateCategory());
+            }
+
+            if (!treeModel.isExternal(node)) {
+                for (int i = 0; i < tree.getChildCount(node); i++) {
+                    NodeRef child = tree.getChild(node, i);
+                    recurseDownClade(child, treeModel, clade, true);
+                }
+            }
+        }
+
         private void updateCladeRateCategories() {
             if (leafSetList != null) {
+                // Set all to zero
                 for (NodeRef node : treeModel.getNodes()) {
                     if (node != treeModel.getRoot()) {
                         setNodeValue(treeModel, node, 0.0);
                     }
                 }
+                // Handle clades
                 for (CladeContainer clade : leafSetList) {
                     NodeRef node = Tree.Utils.getCommonAncestorNode(treeModel, clade.getLeafSet());
                     if (node != treeModel.getRoot()) {
-                        setNodeValue(treeModel, node, clade.getRateCategory());
+                        if (clade.getIncludeStem()) {
+                            setNodeValue(treeModel, node, clade.getRateCategory());
+                        }
+                    }
+                    // Include the clade below
+                    if (!clade.getExcludeClade()) {
+                        recurseDownClade(node, treeModel, clade, clade.getIncludeStem());
                     }
                 }
             }
@@ -281,26 +297,19 @@ public interface CountableBranchCategoryProvider extends TreeTrait<Double> {
         }
 
         public void setClade(TaxonList taxonList, int rateCategory, boolean includeStem, boolean excludeClade, boolean trunk) throws Tree.MissingTaxonException {
-            if (!excludeClade) {
-                throw new IllegalArgumentException("Including clades not yet implemented in countable branch rate mixture models.");
-            }
-
-            if (!includeStem) {
-                throw new IllegalArgumentException("Excluding stems not yet implemented in countable branch rate mixture models.");
-            }
 
             Set<String> leafSet = Tree.Utils.getLeavesForTaxa(treeModel, taxonList);
             if (!trunk) {
                 if (leafSetList == null) {
                     leafSetList = new ArrayList<CladeContainer>();
                 }
-                leafSetList.add(new CladeContainer(leafSet, rateCategory));
+                leafSetList.add(new CladeContainer(leafSet, rateCategory, includeStem, excludeClade));
                 cladesChanged = true;
             } else {
                 if (trunkSetList == null) {
                     trunkSetList = new ArrayList<CladeContainer>();
                 }
-                trunkSetList.add(new CladeContainer(leafSet, rateCategory));
+                trunkSetList.add(new CladeContainer(leafSet, rateCategory, includeStem, excludeClade));
                 cladesChanged = true;
             }
             if (rateCategory >= categoryCount) {
@@ -323,10 +332,14 @@ public interface CountableBranchCategoryProvider extends TreeTrait<Double> {
         private class CladeContainer {
             private Set<String> leafSet;
             private int rateCategory;
+            boolean includeStem;
+            boolean excludeClade;
 
-            public CladeContainer(Set<String> leafSet, int rateCategory) {
+            public CladeContainer(Set<String> leafSet, int rateCategory, boolean includeStem, boolean excludeClade) {
                 this.leafSet = leafSet;
                 this.rateCategory = rateCategory;
+                this.includeStem = includeStem;
+                this.excludeClade = excludeClade;
             }
 
             public Set<String> getLeafSet() {
@@ -336,6 +349,14 @@ public interface CountableBranchCategoryProvider extends TreeTrait<Double> {
             public int getRateCategory() {
                 return rateCategory;
             }
+
+            public boolean getIncludeStem() {
+                return includeStem;
+            }
+
+            public boolean getExcludeClade() {
+                return excludeClade;
+            }
         }
 
         private boolean cladesChanged = false;
diff --git a/src/dr/evomodel/branchratemodel/DiscretizedBranchRates.java b/src/dr/evomodel/branchratemodel/DiscretizedBranchRates.java
index 7fe0987..18688be 100644
--- a/src/dr/evomodel/branchratemodel/DiscretizedBranchRates.java
+++ b/src/dr/evomodel/branchratemodel/DiscretizedBranchRates.java
@@ -35,6 +35,12 @@ import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.math.MathUtils;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * @author Alexei Drummond
@@ -42,7 +48,7 @@ import dr.math.MathUtils;
  * @author Michael Defoin Platel
  * @version $Id: DiscretizedBranchRates.java,v 1.11 2006/01/09 17:44:30 rambaut Exp $
  */
-public class DiscretizedBranchRates extends AbstractBranchRateModel {
+public class DiscretizedBranchRates extends AbstractBranchRateModel implements Citable {
     // Turn on an off the caching on rates for categories -
     // if off then the rates will be flagged to update on
     // a restore.
@@ -232,4 +238,33 @@ public class DiscretizedBranchRates extends AbstractBranchRateModel {
     public double getLogLikelihood() {
         return logDensityNormalizationConstant;
     }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.MOLECULAR_CLOCK;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Uncorrelated relaxed clock";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("AJ", "Drummond"),
+                    new Author("SYW", "Ho"),
+                    new Author("MJ", "Phillips"),
+                    new Author("A", "Rambaut")
+            },
+            "Relaxed Phylogenetics and Dating with Confidence",
+            2006,
+            "PLoS Biology",
+            "4: e88",
+            "10.1371/journal.pbio.0040088"
+    );
 }
diff --git a/src/dr/evomodel/branchratemodel/LocalClockModel.java b/src/dr/evomodel/branchratemodel/LocalClockModel.java
index e7112e1..287b4b4 100644
--- a/src/dr/evomodel/branchratemodel/LocalClockModel.java
+++ b/src/dr/evomodel/branchratemodel/LocalClockModel.java
@@ -34,6 +34,9 @@ import dr.evomodelxml.branchratemodel.LocalClockModelParser;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 
 import java.util.*;
 
@@ -41,7 +44,7 @@ import java.util.*;
  * @author Andrew Rambaut
  * @version $Id: LocalClockModel.java,v 1.1 2005/04/05 09:27:48 rambaut Exp $
  */
-public class LocalClockModel extends AbstractBranchRateModel {
+public class LocalClockModel extends AbstractBranchRateModel implements Citable {
 
     private TreeModel treeModel;
     protected Map<Integer, LocalClock> localTipClocks = new HashMap<Integer, LocalClock>();
@@ -346,4 +349,30 @@ public class LocalClockModel extends AbstractBranchRateModel {
     }
 
     private final Helper helper = new Helper();
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.MOLECULAR_CLOCK;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Local clock model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("AD", "Yoder"),
+                    new Author("Z", "Yang")
+            },
+            "Estimation of Primate Speciation Dates Using Local Molecular Clocks",
+            2000,
+            "Mol Biol Evol",
+            17, 1081, 1090
+    );
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/branchratemodel/RandomLocalClockModel.java b/src/dr/evomodel/branchratemodel/RandomLocalClockModel.java
index 3270b4a..1d07b14 100644
--- a/src/dr/evomodel/branchratemodel/RandomLocalClockModel.java
+++ b/src/dr/evomodel/branchratemodel/RandomLocalClockModel.java
@@ -34,7 +34,12 @@ import dr.evomodelxml.branchratemodel.RandomLocalClockModelParser;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 
+import java.util.Collections;
+import java.util.List;
 import java.util.logging.Logger;
 
 /**
@@ -47,7 +52,7 @@ import java.util.logging.Logger;
  * @version $Id: DiscretizedBranchRates.java,v 1.11 2006/01/09 17:44:30 rambaut Exp $
  */
 public class RandomLocalClockModel extends AbstractBranchRateModel
-        implements RandomLocalTreeVariable {
+        implements RandomLocalTreeVariable, Citable {
 
     public RandomLocalClockModel(TreeModel treeModel,
                                  Parameter meanRateParameter,
@@ -232,4 +237,31 @@ public class RandomLocalClockModel extends AbstractBranchRateModel
     private TreeParameterModel rates;
 
     boolean recalculationNeeded = true;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.MOLECULAR_CLOCK;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Local clock model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("AJ", "Drummond"),
+                    new Author("MA", "Suchard")
+            },
+            "Bayesian random local clocks, or one rate to rule them all",
+            2010,
+            "BMC Biology",
+            "8: 114",
+            "10.1186/1741-7007-8-114"
+    );
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/coalescent/BayesianSkylineLikelihood.java b/src/dr/evomodel/coalescent/BayesianSkylineLikelihood.java
index 1c6bd33..00fe13c 100644
--- a/src/dr/evomodel/coalescent/BayesianSkylineLikelihood.java
+++ b/src/dr/evomodel/coalescent/BayesianSkylineLikelihood.java
@@ -34,8 +34,13 @@ import dr.evomodelxml.coalescent.BayesianSkylineLikelihoodParser;
 import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
 import dr.math.MathUtils;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 
+import java.util.Collections;
 import java.util.Date;
+import java.util.List;
 
 /**
  * A likelihood function for the generalized skyline plot coalescent. Takes a tree and population size and group size parameters.
@@ -44,7 +49,7 @@ import java.util.Date;
  *
  * @author Alexei Drummond
  */
-public class BayesianSkylineLikelihood extends OldAbstractCoalescentLikelihood {
+public class BayesianSkylineLikelihood extends OldAbstractCoalescentLikelihood implements Citable {
 
     // PUBLIC STUFF
 
@@ -368,4 +373,31 @@ public class BayesianSkylineLikelihood extends OldAbstractCoalescentLikelihood {
 
     private final int type;
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Bayesian Skyline Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("AJ", "Drummond"),
+                    new Author("A", "Rambaut"),
+                    new Author("B", "Shapiro"),
+                    new Author("OG", "Pybus")
+            },
+            "Bayesian coalescent inference of past population dynamics from molecular sequences",
+            2005,
+            "Mol Biol Evol",
+            22, 1185, 1192
+    );
 }
diff --git a/src/dr/evomodel/coalescent/CataclysmicDemographicModel.java b/src/dr/evomodel/coalescent/CataclysmicDemographicModel.java
index 3af2d71..5321a18 100644
--- a/src/dr/evomodel/coalescent/CataclysmicDemographicModel.java
+++ b/src/dr/evomodel/coalescent/CataclysmicDemographicModel.java
@@ -29,6 +29,12 @@ import dr.evolution.coalescent.CataclysmicDemographic;
 import dr.evolution.coalescent.DemographicFunction;
 import dr.evomodelxml.coalescent.CataclysmicDemographicModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * This class models an exponentially growing model that suffers a
@@ -39,7 +45,7 @@ import dr.inference.model.Parameter;
  * @author Andrew Rambaut
  * @version $Id: CataclysmicDemographicModel.java,v 1.6 2005/05/24 20:25:57 rambaut Exp $
  */
-public class CataclysmicDemographicModel extends DemographicModel {
+public class CataclysmicDemographicModel extends DemographicModel implements Citable {
 
     /**
      * Construct demographic model with default settings
@@ -119,4 +125,30 @@ public class CataclysmicDemographicModel extends DemographicModel {
     Parameter timeParameter = null;
     Parameter declineRateParameter = null;
     CataclysmicDemographic cataclysm = null;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Boom-Bust Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("B", "Shapiro"),
+                    new Author("", "et al.")
+            },
+            "Rise and fall of the Beringian steppe bison",
+            2004,
+            "Science",
+            306, 1561, 1565
+    );
 }
diff --git a/src/dr/evomodel/coalescent/ConstExpConstModel.java b/src/dr/evomodel/coalescent/ConstExpConstModel.java
index e40f52b..870c8fc 100644
--- a/src/dr/evomodel/coalescent/ConstExpConstModel.java
+++ b/src/dr/evomodel/coalescent/ConstExpConstModel.java
@@ -29,6 +29,12 @@ import dr.evolution.coalescent.ConstExpConst;
 import dr.evolution.coalescent.DemographicFunction;
 import dr.evomodelxml.coalescent.ConstExpConstModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Exponential growth from a constant ancestral population size.
@@ -37,7 +43,7 @@ import dr.inference.model.Parameter;
  * @author Andrew Rambaut
  * @version $Id: ConstantExponentialModel.java,v 1.8 2005/10/28 02:49:17 alexei Exp $
  */
-public class ConstExpConstModel extends DemographicModel {
+public class ConstExpConstModel extends DemographicModel implements Citable {
 
     //
     // Public stuff
@@ -122,4 +128,31 @@ public class ConstExpConstModel extends DemographicModel {
     private final Parameter timeParameter;
     private final Parameter epochParameter;
     private final ConstExpConst constExpConst;
+
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Constant-Exponential-Constant Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("OG", "Pybus"),
+                    new Author("A", "Rambaut")
+            },
+            "GENIE: estimating demographic history from molecular phylogenies",
+            2001,
+            "Bioinformatics",
+            18, 1404, 1405
+    );
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/coalescent/ConstantExponentialModel.java b/src/dr/evomodel/coalescent/ConstantExponentialModel.java
index d9216a7..50ac073 100644
--- a/src/dr/evomodel/coalescent/ConstantExponentialModel.java
+++ b/src/dr/evomodel/coalescent/ConstantExponentialModel.java
@@ -29,6 +29,12 @@ import dr.evolution.coalescent.ConstExponential;
 import dr.evolution.coalescent.DemographicFunction;
 import dr.evomodelxml.coalescent.ConstantExponentialModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Exponential growth from a constant ancestral population size.
@@ -37,7 +43,7 @@ import dr.inference.model.Parameter;
  * @author Andrew Rambaut
  * @version $Id: ConstantExponentialModel.java,v 1.8 2005/10/28 02:49:17 alexei Exp $
  */
-public class ConstantExponentialModel extends DemographicModel {
+public class ConstantExponentialModel extends DemographicModel implements Citable {
 
     //
     // Public stuff
@@ -108,4 +114,30 @@ public class ConstantExponentialModel extends DemographicModel {
     Parameter growthRateParameter = null;
     ConstExponential constExponential = null;
     boolean usingGrowthRate = true;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Constant-Exponential Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("OG", "Pybus"),
+                    new Author("A", "Rambaut")
+            },
+            "GENIE: estimating demographic history from molecular phylogenies",
+            2001,
+            "Bioinformatics",
+            18, 1404, 1405
+    );
 }
diff --git a/src/dr/evomodel/coalescent/ConstantLogisticModel.java b/src/dr/evomodel/coalescent/ConstantLogisticModel.java
index a08505a..1c8d986 100644
--- a/src/dr/evomodel/coalescent/ConstantLogisticModel.java
+++ b/src/dr/evomodel/coalescent/ConstantLogisticModel.java
@@ -29,6 +29,12 @@ import dr.evolution.coalescent.ConstLogistic;
 import dr.evolution.coalescent.DemographicFunction;
 import dr.evomodelxml.coalescent.ConstantLogisticModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Logistic growth from a constant ancestral population size.
@@ -37,7 +43,7 @@ import dr.inference.model.Parameter;
  * @author Andrew Rambaut
  * @version $Id: ConstantLogisticModel.java,v 1.7 2005/04/11 11:24:39 alexei Exp $
  */
-public class ConstantLogisticModel extends DemographicModel {
+public class ConstantLogisticModel extends DemographicModel implements Citable {
 
     //
     // Public stuff
@@ -112,4 +118,30 @@ public class ConstantLogisticModel extends DemographicModel {
     private Parameter shapeParameter = null;
     private double alpha = 0.5;
     private ConstLogistic constLogistic = null;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Constant-Logistic Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("OG", "Pybus"),
+                    new Author("A", "Rambaut")
+            },
+            "GENIE: estimating demographic history from molecular phylogenies",
+            2001,
+            "Bioinformatics",
+            18, 1404, 1405
+    );
 }
diff --git a/src/dr/evomodel/coalescent/ExpConstExpDemographicModel.java b/src/dr/evomodel/coalescent/ExpConstExpDemographicModel.java
index 5fc27f4..1fc0a06 100644
--- a/src/dr/evomodel/coalescent/ExpConstExpDemographicModel.java
+++ b/src/dr/evomodel/coalescent/ExpConstExpDemographicModel.java
@@ -29,6 +29,12 @@ import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.ExpConstExpDemographic;
 import dr.evomodelxml.coalescent.ExpConstExpDemographicModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * This class models a two growth-phase demographic with a plateau in the middle
@@ -37,7 +43,7 @@ import dr.inference.model.Parameter;
  * @author Andrew Rambaut
  * @version $Id: ExpConstExpDemographicModel.java,v 1.2 2006/08/18 07:44:25 alexei Exp $
  */
-public class ExpConstExpDemographicModel extends DemographicModel {
+public class ExpConstExpDemographicModel extends DemographicModel implements Citable {
 
     //
     // Public stuff
@@ -132,4 +138,30 @@ public class ExpConstExpDemographicModel extends DemographicModel {
     Parameter timeParameter = null;
     Parameter relTimeParameter = null;
     ExpConstExpDemographic expConstExp = null;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Exponential-Constant-Exponential Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("OG", "Pybus"),
+                    new Author("A", "Rambaut")
+            },
+            "GENIE: estimating demographic history from molecular phylogenies",
+            2001,
+            "Bioinformatics",
+            18, 1404, 1405
+    );
 }
diff --git a/src/dr/evomodel/coalescent/ExpansionModel.java b/src/dr/evomodel/coalescent/ExpansionModel.java
index 9cc7d62..a5d873d 100644
--- a/src/dr/evomodel/coalescent/ExpansionModel.java
+++ b/src/dr/evomodel/coalescent/ExpansionModel.java
@@ -29,6 +29,12 @@ import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.Expansion;
 import dr.evomodelxml.coalescent.ExpansionModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Exponential growth from a constant ancestral population size.
@@ -37,7 +43,7 @@ import dr.inference.model.Parameter;
  * @author Andrew Rambaut
  * @version $Id: ExpansionModel.java,v 1.5 2005/05/24 20:25:57 rambaut Exp $
  */
-public class ExpansionModel extends DemographicModel {
+public class ExpansionModel extends DemographicModel implements Citable {
 
     //
     // Public stuff
@@ -111,4 +117,30 @@ public class ExpansionModel extends DemographicModel {
     Parameter growthRateParameter = null;
     Expansion expansion = null;
     boolean usingGrowthRate = true;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Expansion Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("OG", "Pybus"),
+                    new Author("A", "Rambaut")
+            },
+            "GENIE: estimating demographic history from molecular phylogenies",
+            2001,
+            "Bioinformatics",
+            18, 1404, 1405
+    );
 }
diff --git a/src/dr/evomodel/coalescent/ExponentialConstantModel.java b/src/dr/evomodel/coalescent/ExponentialConstantModel.java
index f64b66d..b59a8a9 100644
--- a/src/dr/evomodel/coalescent/ExponentialConstantModel.java
+++ b/src/dr/evomodel/coalescent/ExponentialConstantModel.java
@@ -29,13 +29,19 @@ import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.ExpConstant;
 import dr.evomodelxml.coalescent.ExponentialConstantModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Exponential growth followed by constant size.
  *
  * @author Matthew Hall
  */
-public class ExponentialConstantModel extends DemographicModel {
+public class ExponentialConstantModel extends DemographicModel implements Citable {
 
     //
     // Public stuff
@@ -105,4 +111,30 @@ public class ExponentialConstantModel extends DemographicModel {
     Parameter growthRateParameter = null;
     Parameter transitionTimeParameter = null;
     ExpConstant exponentialConstant = null;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Exponential-Constant Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("OG", "Pybus"),
+                    new Author("A", "Rambaut")
+            },
+            "GENIE: estimating demographic history from molecular phylogenies",
+            2001,
+            "Bioinformatics",
+            18, 1404, 1405
+    );
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/coalescent/ExponentialLogisticModel.java b/src/dr/evomodel/coalescent/ExponentialLogisticModel.java
index 28fa9da..2d733ff 100644
--- a/src/dr/evomodel/coalescent/ExponentialLogisticModel.java
+++ b/src/dr/evomodel/coalescent/ExponentialLogisticModel.java
@@ -29,6 +29,12 @@ import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.ExponentialLogistic;
 import dr.evomodelxml.coalescent.ExponentialLogisticModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Exponential growth followed by Logistic growth.
@@ -37,7 +43,7 @@ import dr.inference.model.Parameter;
  * @author Alexei Drummond
  * @version $Id$
  */
-public class ExponentialLogisticModel extends DemographicModel {
+public class ExponentialLogisticModel extends DemographicModel implements Citable {
 
     //
     // Public stuff
@@ -134,4 +140,30 @@ public class ExponentialLogisticModel extends DemographicModel {
     Parameter transistionTimeParameter = null;
     double alpha = 0.5;
     ExponentialLogistic exponentialLogistic = null;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Exponential-Logistic Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("OG", "Pybus"),
+                    new Author("A", "Rambaut")
+            },
+            "GENIE: estimating demographic history from molecular phylogenies",
+            2001,
+            "Bioinformatics",
+            18, 1404, 1405
+    );
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/coalescent/GMRFMultilocusSkyrideLikelihood.java b/src/dr/evomodel/coalescent/GMRFMultilocusSkyrideLikelihood.java
index 0a9365f..3231f7e 100644
--- a/src/dr/evomodel/coalescent/GMRFMultilocusSkyrideLikelihood.java
+++ b/src/dr/evomodel/coalescent/GMRFMultilocusSkyrideLikelihood.java
@@ -25,6 +25,7 @@
 
 package dr.evomodel.coalescent;
 
+//import com.sun.xml.internal.messaging.saaj.packaging.mime.internet.ParameterList;
 import dr.evolution.coalescent.IntervalType;
 import dr.evolution.coalescent.TreeIntervals;
 import dr.evolution.tree.Tree;
@@ -33,11 +34,15 @@ import dr.evomodelxml.coalescent.GMRFSkyrideLikelihoodParser;
 import dr.inference.model.MatrixParameter;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 import no.uib.cipr.matrix.DenseVector;
 import no.uib.cipr.matrix.SymmTridiagMatrix;
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 
 /**
@@ -45,7 +50,8 @@ import java.util.List;
  * @author Marc A. Suchard
  */
 
-public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood implements MultiLociTreeSet, CoalescentIntervalProvider {
+public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood
+        implements MultiLociTreeSet, CoalescentIntervalProvider, Citable {
 
     public static final boolean DEBUG = false;
 
@@ -68,6 +74,13 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
     protected double[] storedPloidySums;
     protected SymmTridiagMatrix precMatrix;
     protected SymmTridiagMatrix storedPrecMatrix;
+    private SkygridHelper skygridHelper;
+    protected List<Parameter> missingCov;
+    protected List<MatrixParameter> covariates;
+    protected List<Parameter> beta;
+    protected List<Parameter> covPrecParameters;
+    protected List<SymmTridiagMatrix> weightMatricesForMissingCov;
+    protected int[] lastObservedIndex;
 
     private double[] coalescentEventStatisticValues;
 
@@ -93,6 +106,9 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
         this.lambdaParameter = lambda;
         this.betaParameter = beta;
         this.dMatrix = dMatrix;
+        if (dMatrix != null) {
+            addVariable(dMatrix);
+        }
         this.timeAwareSmoothing = timeAwareSmoothing;
 
         this.cutOff = cutOff;
@@ -107,6 +123,9 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
         addVariable(lambdaParameter);
         if (betaParameter != null) {
             addVariable(betaParameter);
+            skygridHelper = new SkygridCovariateHelper();
+        } else {
+            skygridHelper = new SkygridHelper();
         }
         if (phiParameter != null) {
             addVariable(phiParameter);
@@ -172,32 +191,69 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
                                            Parameter groupParameter,
                                            Parameter precParameter,
                                            Parameter lambda,
-                                           Parameter beta,
+                                           Parameter betaParameter,
                                            MatrixParameter dMatrix,
                                            boolean timeAwareSmoothing,
-                                           Parameter specGridPoints) {
+                                           Parameter specGridPoints,
+                                           List<MatrixParameter> covariates,
+                                           Parameter ploidyFactorsParameter,
+                                           List<Parameter> lastObservedIndexParameter,
+                                           List<Parameter> covPrecParameters,
+                                           List<Parameter> betaList) {
 
         super(GMRFSkyrideLikelihoodParser.SKYLINE_LIKELIHOOD);
 
-        gridPoints = specGridPoints.getParameterValues();
-        //gridPointsSpecified = true;
+        this.gridPoints = specGridPoints.getParameterValues();
         this.numGridPoints = gridPoints.length;
         this.cutOff = gridPoints[numGridPoints - 1];
 
+        if (lastObservedIndexParameter != null) {
+            lastObservedIndex = new int[lastObservedIndexParameter.size()];
+            for (int i = 0; i < lastObservedIndexParameter.size(); i++) {
+                this.lastObservedIndex[i] = (int) lastObservedIndexParameter.get(i).getParameterValue(0);
+            }
+        }
+
+        /*else{
+            for(int i=0; i < beta.getDimension(); i++) {
+                this.lastObservedIndex[i] = popParameter.getDimension();
+            }
+        }*/
+
+        this.betaParameter = betaParameter;
+        if (betaParameter != null) {
+            addVariable(betaParameter);
+        }
+
         this.popSizeParameter = popParameter;
         this.groupSizeParameter = groupParameter;
         this.precisionParameter = precParameter;
         this.lambdaParameter = lambda;
-        this.betaParameter = beta;
+        this.beta = betaList;
         this.dMatrix = dMatrix;
+        if (dMatrix != null) {
+            addVariable(dMatrix);
+        }
         this.timeAwareSmoothing = timeAwareSmoothing;
+        this.ploidyFactors = ploidyFactorsParameter;
+        this.covariates = covariates;
+        if (covariates != null) {
+            for (MatrixParameter cov : covariates) {
+                addVariable(cov);
+            }
+        }
+        this.covPrecParameters = covPrecParameters;
+        if (covPrecParameters != null) {
+            for (Parameter covPrec : covPrecParameters) {
+                addVariable(covPrec);
+            }
+        }
 
         addVariable(popSizeParameter);
         addVariable(precisionParameter);
         addVariable(lambdaParameter);
-        if (betaParameter != null) {
-            addVariable(betaParameter);
-        }
+
+        addVariable(ploidyFactors);
 
         setTree(treeList);
 
@@ -215,7 +271,28 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
 
         oldFieldLength = getCorrectOldFieldLength();
 
+        if (ploidyFactors.getDimension() != treeList.size()) {
+            throw new IllegalArgumentException("Ploidy factor parameter should have length " + treeList.size());
+        }
+
         // Field length must be set by this point
+
+        if (betaList != null || betaParameter != null) {
+            if (betaList != null) {
+                for (Parameter betaParam : betaList) {
+                    addVariable(betaParam);
+                }
+            }
+            if (lastObservedIndexParameter != null) {
+                setupGMRFWeightsForMissingCov();
+                skygridHelper = new SkygridMissingCovariateHelper();
+            } else {
+                skygridHelper = new SkygridCovariateHelper();
+            }
+        } else {
+            skygridHelper = new SkygridHelper();
+        }
+
         wrapSetupIntervals();
         coalescentIntervals = new double[oldFieldLength];
         storedCoalescentIntervals = new double[oldFieldLength];
@@ -234,7 +311,6 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
 
     }
 
-
     protected void setTree(List<Tree> treeList) {
         treesSet = this;
         this.treeList = treeList;
@@ -288,11 +364,8 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
     }
 
     public void initializationReport() {
-        System.out.println("Creating a GMRF smoothed skyride model for multiple loci:");
+        System.out.println("Creating a GMRF smoothed skyride model for multiple loci (SkyGrid)");
         System.out.println("\tPopulation sizes: " + popSizeParameter.getDimension());
-        System.out.println("\tIf you publish results using this model, please reference: ");
-        System.out.println("\t\tMinin, Bloomquist and Suchard (2008) Molecular Biology and Evolution, 25, 1459-1471, and");
-        System.out.println("\t\tGill, Lemey, Faria, Rambaut, Shapiro and Suchard (2013) Molecular Biology and Evolution, 30, 713-724.");
     }
 
     public void wrapSetupIntervals() {
@@ -352,7 +425,7 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
 
             numLineages = intervalsList.get(i).getLineageCount(currentTimeIndex + 1);
             minGridIndex = 0;
-            while (minGridIndex < numGridPoints - 1 && gridPoints[minGridIndex] <= currentTime) {
+            while (minGridIndex < numGridPoints - 1 && gridPoints[minGridIndex] <= currentTime) { // MAS: Unclear about need for -1
                 minGridIndex++;
             }
             currentGridIndex = minGridIndex;
@@ -368,6 +441,7 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
 
             if (maxGridIndex >= 0 && minGridIndex < numGridPoints) {
 
+
                 //from likelihood of interval between first sampling time and gridPoints[minGridIndex]
 
                 while (nextTime < gridPoints[currentGridIndex]) {
@@ -487,7 +561,9 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
                     currentTimeIndex++;
 
                 }
-            // if tree does not overlap with any gridpoints/change-points, in which case logpopsize is constant
+
+                // if tree does not overlap with any gridpoints/change-points, in which case logpopsize is constant
+
             } else {
                 while ((currentTimeIndex + 1) < intervalsList.get(i).getIntervalCount()) {
                     //check to see if interval is coalescent interval or sampling interval
@@ -585,38 +661,10 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
     }
 
 
-    protected double calculateLogFieldLikelihood() {
-
-        if (!intervalsKnown) {
-            //intervalsKnown -> false when handleModelChanged event occurs in super.
-            wrapSetupIntervals();
-            setupSufficientStatistics();
-            intervalsKnown = true;
-        }
-
-        double currentLike = 0;
-        DenseVector diagonal1 = new DenseVector(fieldLength);
-        DenseVector currentGamma = new DenseVector(popSizeParameter.getParameterValues());
-
-        SymmTridiagMatrix currentQ = getScaledWeightMatrix(precisionParameter.getParameterValue(0), lambdaParameter.getParameterValue(0));
-        currentQ.mult(currentGamma, diagonal1);
-
-        //        currentLike += 0.5 * logGeneralizedDeterminant(currentQ) - 0.5 * currentGamma.dot(diagonal1);
-
-        currentLike += 0.5 * (fieldLength - 1) * Math.log(precisionParameter.getParameterValue(0)) - 0.5 * currentGamma.dot(diagonal1);
-        if (lambdaParameter.getParameterValue(0) == 1) {
-            currentLike -= (fieldLength - 1) / 2.0 * LOG_TWO_TIMES_PI;
-        } else {
-            currentLike -= fieldLength / 2.0 * LOG_TWO_TIMES_PI;
-        }
-
-        return currentLike;
-    }
-
     public double getLogLikelihood() {
         if (!likelihoodKnown) {
             logLikelihood = calculateLogCoalescentLikelihood();
-            logFieldLikelihood = calculateLogFieldLikelihood();
+            logFieldLikelihood = skygridHelper.getLogFieldLikelihood();
             likelihoodKnown = true;
         }
 
@@ -659,6 +707,48 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
     }
 
 
+    protected void setupGMRFWeightsForMissingCov() {
+        //System.err.println("fieldLength: " + fieldLength);
+        // System.err.println("lastObservedIndex: " + lastObservedIndex);
+        //Set up the weight Matrix
+        weightMatricesForMissingCov = new ArrayList<SymmTridiagMatrix>();
+
+        for (int i = 0; i < covPrecParameters.size(); i++) {
+            double[] offdiag = new double[fieldLength - lastObservedIndex[i] - 1];
+            double[] diag = new double[fieldLength - lastObservedIndex[i]];
+
+            //First set up the offdiagonal entries;
+
+            for (int k = 0; k < fieldLength - lastObservedIndex[i] - 1; k++) {
+                offdiag[k] = -1;
+            }
+
+            //Then set up the diagonal entries;
+            for (int k = 0; k < fieldLength - lastObservedIndex[i] - 1; k++) {
+                //	diag[i] = -(offdiag[i] + offdiag[i - 1]);
+                diag[k] = 2.0;
+            }
+            //Take care of the endpoint
+            diag[fieldLength - lastObservedIndex[i] - 1] = 1.0;
+
+            weightMatricesForMissingCov.add(i, new SymmTridiagMatrix(diag, offdiag));
+        }
+
+    }
+
+
+    public SymmTridiagMatrix getScaledWeightMatrixForMissingCov(double precision, int covIndex, int lastObs) {
+        SymmTridiagMatrix a = weightMatricesForMissingCov.get(covIndex).copy();
+        for (int i = 0; i < a.numRows() - 1; i++) {
+            a.set(i, i, a.get(i, i) * precision);
+            a.set(i + 1, i, a.get(i + 1, i) * precision);
+        }
+        a.set(fieldLength - lastObs - 1, fieldLength - lastObs - 1,
+                a.get(fieldLength - lastObs - 1, fieldLength - lastObs - 1) * precision);
+        return a;
+    }
+
+
     private List<Tree> treeList;
     private List<TreeIntervals> intervalsList;
 
@@ -678,6 +768,14 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
         return ploidyFactors.getParameterValue(nt);
     }
 
+    public List<Parameter> getBetaListParameter() {
+        return beta;
+    }
+
+    public List<MatrixParameter> getCovariates() {
+        return covariates;
+    }
+
     public void storeTheState() {
         for (TreeIntervals intervals : intervalsList) {
             intervals.storeState();
@@ -718,5 +816,217 @@ public class GMRFMultilocusSkyrideLikelihood extends GMRFSkyrideLikelihood imple
     public IntervalType getCoalescentIntervalType(int i) {
         return null;
     }*/
-}
 
+    class SkygridHelper {
+
+        public SkygridHelper() {
+        }
+
+        protected void updateGammaWithCovariates(DenseVector currentGamma) {
+            // Do nothing
+        }
+
+        protected double handleMissingValues() {
+            return 0.0;
+        }
+
+        public double getLogFieldLikelihood() {
+
+            if (!intervalsKnown) {
+                //intervalsKnown -> false when handleModelChanged event occurs in super.
+                wrapSetupIntervals();
+                setupSufficientStatistics();
+                intervalsKnown = true;
+            }
+
+            DenseVector diagonal1 = new DenseVector(fieldLength);
+            DenseVector currentGamma = new DenseVector(popSizeParameter.getParameterValues());
+
+            updateGammaWithCovariates(currentGamma);
+
+            double currentLike = handleMissingValues();
+
+            SymmTridiagMatrix currentQ = getScaledWeightMatrix(precisionParameter.getParameterValue(0), lambdaParameter.getParameterValue(0));
+            currentQ.mult(currentGamma, diagonal1);
+
+            currentLike += 0.5 * (fieldLength - 1) * Math.log(precisionParameter.getParameterValue(0)) - 0.5 * currentGamma.dot(diagonal1);
+            if (lambdaParameter.getParameterValue(0) == 1) {
+                currentLike -= (fieldLength - 1) / 2.0 * LOG_TWO_TIMES_PI;
+            } else {
+                currentLike -= fieldLength / 2.0 * LOG_TWO_TIMES_PI;
+            }
+
+            return currentLike;
+        }
+    }
+
+
+    class SkygridCovariateHelper extends SkygridHelper {
+
+        public SkygridCovariateHelper() {
+        }
+
+        @Override
+        protected void updateGammaWithCovariates(DenseVector currentGamma) {
+
+            // Handle betaParameter / designMatrix
+
+            if (NEW_APPROACH) {
+
+                final int N = currentGamma.size();
+                double[] update = new double[N];
+
+                if (dMatrix != null) {
+                    final int K = dMatrix.getColumnDimension();
+
+                    if (N != dMatrix.getRowDimension()) {
+                        throw new RuntimeException("Incorrect covariate dimensions (" + N + " != "
+                                + dMatrix.getRowDimension() + ")");
+                    }
+
+                    for (int i = 0; i < N; ++i) {
+                        for (int j = 0; j < K; ++j) {
+                            update[i] += dMatrix.getParameterValue(i, j) * betaParameter.getParameterValue(j);
+                        }
+                    }
+                }
+
+                if (covariates != null) {
+                    if (beta.size() != covariates.size()) {
+                        throw new RuntimeException("beta.size() != covariates.size()");
+                    }
+
+                    for (int k = 0; k < beta.size(); ++k) {
+
+                        Parameter b = beta.get(k);
+                        final int J = b.getDimension();
+                        MatrixParameter covariate = covariates.get(k);
+
+                        if ((J != covariate.getRowDimension()) ||
+                                (N != covariate.getColumnDimension())) { // Note: XML current has covariates transposed
+                            throw new RuntimeException("Incorrect dimensions in " + covariate.getId());
+                        }
+
+                        for (int i = 0; i < N; ++i) {
+                            for (int j = 0; j < J; ++j) {
+                                update[i] += covariate.getParameterValue(j, i) * b.getParameterValue(j);
+                            }
+                        }
+                    }
+                }
+
+                for (int i = 0; i < N; ++i) {
+                    currentGamma.set(i, currentGamma.get(i) - update[i]);
+                }
+
+            } else {
+                DenseVector currentBeta = new DenseVector(beta.size());
+
+                for (int i = 0; i < beta.size(); i++) {
+                    currentBeta.set(i, beta.get(i).getParameterValue(0));
+                }
+
+                //int numMissing = fieldLength - lastObservedIndex;
+                //DenseVector tempVectCov = new DenseVector(numMissing);
+
+                //System.err.println("covariates.size(): " + covariates.size());
+                //System.err.println("covariates.get(0).getColumnDimension: " + covariates.get(0).getColumnDimension());
+                //System.err.println("covariates.get(0).getRowDimension: " + covariates.get(0).getRowDimension());
+
+                for (int i = 0; i < covariates.size(); i++) {
+                    for (int j = 0; j < covariates.get(i).getColumnDimension(); j++) {
+                        // System.err.println("j: " + j);
+                        // System.err.println("covariates.get(i).getParameterValue(0,j): " + covariates.get(i).getParameterValue(0,j));
+                        currentGamma.set(j, currentGamma.get(j) - covariates.get(i).getParameterValue(0, j) * currentBeta.get(i));
+                    }
+                }
+            }
+        }
+    }
+
+    private static final boolean NEW_APPROACH = true;
+
+    class SkygridMissingCovariateHelper extends SkygridCovariateHelper {
+
+        public SkygridMissingCovariateHelper() {
+        }
+
+        @Override
+        protected double handleMissingValues() {
+
+            int numMissing;
+            DenseVector tempVectMissingCov;
+            SymmTridiagMatrix missingCovQ;
+            DenseVector tempVectMissingCov2;
+
+            double currentLike = 0.0;
+
+            for (int i = 0; i < covPrecParameters.size(); i++) {
+
+                numMissing = fieldLength - lastObservedIndex[i];
+                tempVectMissingCov = new DenseVector(numMissing);
+                tempVectMissingCov2 = new DenseVector(numMissing);
+
+                missingCovQ = getScaledWeightMatrixForMissingCov(covPrecParameters.get(i).getParameterValue(0), i,
+                        lastObservedIndex[i]);
+
+                for (int j = 0; j < numMissing; j++) {
+                    // System.err.println("covariate.get(i).getSize(): " + covariates.get(i).getSize());
+                    // System.err.println("lastObservedIndex: " + lastObservedIndex);
+                    // System.err.println("j: " + j);
+                    // System.err.println("getParameterValue(0, lastObservedIndex-1): " + covariates.get(i).getParameterValue(0,lastObservedIndex-1));
+                    tempVectMissingCov.set(j, covariates.get(i).getParameterValue(0, lastObservedIndex[i] + j) -
+                            covariates.get(i).getParameterValue(0, lastObservedIndex[i] - 1));
+                }
+
+                missingCovQ.mult(tempVectMissingCov, tempVectMissingCov2);
+                // System.err.println("missingCovQ: " + missingCovQ.get(0,0));
+                currentLike += 0.5 * (numMissing) * Math.log(covPrecParameters.get(i).getParameterValue(0))
+                        - 0.5 * tempVectMissingCov.dot(tempVectMissingCov2);
+            }
+            return currentLike;
+        }
+
+    }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Skyride coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Arrays.asList(new Citation(
+                    new Author[]{
+                            new Author("MS", "Gill"),
+                            new Author("P", "Lemey"),
+                            new Author("NR", "Faria"),
+                            new Author("A", "Rambaut"),
+                            new Author("B", "Shapiro"),
+                            new Author("MA", "Suchard")
+                    },
+                    "Improving Bayesian population dynamics inference: a coalescent-based model for multiple loci",
+                    2013,
+                    "Mol Biol Evol",
+                    30, 713, 724
+            ),
+            new Citation(
+                    new Author[]{
+                            new Author("VN", "Minin"),
+                            new Author("EW", "Bloomquist"),
+                            new Author("MA", "Suchard")
+                    },
+                    "Smooth skyride through a rough skyline: Bayesian coalescent-based inference of population dynamics",
+                    2008,
+                    "Mol Biol Evol",
+                    25, 1459, 1471,
+                    "10.1093/molbev/msn090"
+            )
+        );
+    }
+}
diff --git a/src/dr/evomodel/coalescent/GMRFSkyrideLikelihood.java b/src/dr/evomodel/coalescent/GMRFSkyrideLikelihood.java
index 7f8f32f..aeac171 100644
--- a/src/dr/evomodel/coalescent/GMRFSkyrideLikelihood.java
+++ b/src/dr/evomodel/coalescent/GMRFSkyrideLikelihood.java
@@ -34,12 +34,16 @@ import dr.inference.model.MatrixParameter;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.math.MathUtils;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 import no.uib.cipr.matrix.DenseVector;
 import no.uib.cipr.matrix.NotConvergedException;
 import no.uib.cipr.matrix.SymmTridiagEVD;
 import no.uib.cipr.matrix.SymmTridiagMatrix;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 
 /**
@@ -51,7 +55,7 @@ import java.util.List;
  * @author Marc Suchard
  * @version $Id: GMRFSkylineLikelihood.java,v 1.3 2007/03/20 22:40:04 msuchard Exp $
  */
-public class GMRFSkyrideLikelihood extends OldAbstractCoalescentLikelihood implements CoalescentIntervalProvider {
+public class GMRFSkyrideLikelihood extends OldAbstractCoalescentLikelihood implements CoalescentIntervalProvider, Citable {
 
     // PUBLIC STUFF
 
@@ -472,27 +476,27 @@ public class GMRFSkyrideLikelihood extends OldAbstractCoalescentLikelihood imple
     }
 
 
-    public static double logGeneralizedDeterminant(SymmTridiagMatrix X) {
-        //Set up the eigenvalue solver
-        SymmTridiagEVD eigen = new SymmTridiagEVD(X.numRows(), false);
-        //Solve for the eigenvalues
-        try {
-            eigen.factor(X);
-        } catch (NotConvergedException e) {
-            throw new RuntimeException("Not converged error in generalized determinate calculation.\n" + e.getMessage());
-        }
-
-        //Get the eigenvalues
-        double[] x = eigen.getEigenvalues();
-
-        double a = 0;
-        for (double d : x) {
-            if (d > 0.00001)
-                a += Math.log(d);
-        }
-
-        return a;
-    }
+//    public static double logGeneralizedDeterminant(SymmTridiagMatrix X) {
+//        //Set up the eigenvalue solver
+//        SymmTridiagEVD eigen = new SymmTridiagEVD(X.numRows(), false);
+//        //Solve for the eigenvalues
+//        try {
+//            eigen.factor(X);
+//        } catch (NotConvergedException e) {
+//            throw new RuntimeException("Not converged error in generalized determinate calculation.\n" + e.getMessage());
+//        }
+//
+//        //Get the eigenvalues
+//        double[] x = eigen.getEigenvalues();
+//
+//        double a = 0;
+//        for (double d : x) {
+//            if (d > 0.00001)
+//                a += Math.log(d);
+//        }
+//
+//        return a;
+//    }
 
 
     public Parameter getPrecisionParameter() {
@@ -536,9 +540,33 @@ public class GMRFSkyrideLikelihood extends OldAbstractCoalescentLikelihood imple
 
     }
 
-    // ****************************************************************
-    // Private and protected stuff
-    // ****************************************************************
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Skyride coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("VN", "Minin"),
+                    new Author("EW", "Bloomquist"),
+                    new Author("MA", "Suchard")
+            },
+            "Smooth skyride through a rough skyline: Bayesian coalescent-based inference of population dynamics",
+            2008,
+            "Mol Biol Evol",
+            25, 1459, 1471,
+            "10.1093/molbev/msn090"
+    );
 }
 
 /*
diff --git a/src/dr/evomodel/coalescent/LogisticGrowthModel.java b/src/dr/evomodel/coalescent/LogisticGrowthModel.java
index 8c8976f..91e533d 100644
--- a/src/dr/evomodel/coalescent/LogisticGrowthModel.java
+++ b/src/dr/evomodel/coalescent/LogisticGrowthModel.java
@@ -29,6 +29,12 @@ import dr.evolution.coalescent.DemographicFunction;
 import dr.evolution.coalescent.LogisticGrowth;
 import dr.evomodelxml.coalescent.LogisticGrowthModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Logistic growth.
@@ -37,7 +43,7 @@ import dr.inference.model.Parameter;
  * @author Andrew Rambaut
  * @version $Id: LogisticGrowthModel.java,v 1.21 2005/05/24 20:25:57 rambaut Exp $
  */
-public class LogisticGrowthModel extends DemographicModel {
+public class LogisticGrowthModel extends DemographicModel implements Citable {
 
     //
     // Public stuff
@@ -110,4 +116,30 @@ public class LogisticGrowthModel extends DemographicModel {
     double alpha = 0.5;
     LogisticGrowth logisticGrowth = null;
     boolean usingGrowthRate = true;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Logistic Growth Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("OG", "Pybus"),
+                    new Author("A", "Rambaut")
+            },
+            "GENIE: estimating demographic history from molecular phylogenies",
+            2001,
+            "Bioinformatics",
+            18, 1404, 1405
+    );
 }
diff --git a/src/dr/evomodel/coalescent/SkylineLikelihood.java b/src/dr/evomodel/coalescent/SkylineLikelihood.java
index 8a2868a..ab3f052 100644
--- a/src/dr/evomodel/coalescent/SkylineLikelihood.java
+++ b/src/dr/evomodel/coalescent/SkylineLikelihood.java
@@ -32,8 +32,14 @@ import dr.evomodel.tree.TreeModel;
 import dr.evomodelxml.coalescent.CoalescentLikelihoodParser;
 import dr.inference.model.Likelihood;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 import dr.xml.*;
 
+import java.util.Collections;
+import java.util.List;
+
 /**
  * A likelihood function for the coalescent. Takes a tree and a exponential markov model.
  * *
@@ -41,7 +47,7 @@ import dr.xml.*;
  *
  * @author Alexei Drummond
  */
-public class SkylineLikelihood extends OldAbstractCoalescentLikelihood {
+public class SkylineLikelihood extends OldAbstractCoalescentLikelihood implements Citable {
 
 	// PUBLIC STUFF
 
@@ -157,4 +163,32 @@ public class SkylineLikelihood extends OldAbstractCoalescentLikelihood {
 
 	/** The demographic model. */
 	Parameter popSizeParameter = null;
+
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.TREE_PRIORS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "Bayesian Skyline Coalescent";
+	}
+
+	@Override
+	public List<Citation> getCitations() {
+		return Collections.singletonList(CITATION);
+	}
+
+	public static Citation CITATION = new Citation(
+			new Author[]{
+					new Author("AJ", "Drummond"),
+					new Author("A", "Rambaut"),
+					new Author("B", "Shapiro"),
+					new Author("OG", "Pybus")
+			},
+			"Bayesian coalescent inference of past population dynamics from molecular sequences",
+			2005,
+			"Mol Biol Evol",
+			22, 1185, 1192
+	);
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/coalescent/VariableDemographicModel.java b/src/dr/evomodel/coalescent/VariableDemographicModel.java
index 8c3d548..3c2e82f 100644
--- a/src/dr/evomodel/coalescent/VariableDemographicModel.java
+++ b/src/dr/evomodel/coalescent/VariableDemographicModel.java
@@ -32,12 +32,18 @@ import dr.evomodelxml.coalescent.VariableDemographicModelParser;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Arrays;
+import java.util.List;
 
 /**
  * @author Joseph Heled
  * @version $Id$
  */
-public class VariableDemographicModel extends DemographicModel implements MultiLociTreeSet {
+public class VariableDemographicModel extends DemographicModel implements MultiLociTreeSet, Citable {
 
     private final Parameter popSizeParameter;
     private final Parameter indicatorParameter;
@@ -204,4 +210,31 @@ public class VariableDemographicModel extends DemographicModel implements MultiL
         demoFunction = savedDemoFunction;
         savedDemoFunction = null;
     }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Extended Bayesian Skyline multi-locus coalescent model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Arrays.asList(new Citation(
+                        new Author[]{
+                                new Author("J", "Heled"),
+                                new Author("AJ", "Drummond"),
+                        },
+                        "Bayesian inference of population size history from multiple loci",
+                        2008,
+                        "BMC Evolutionary Biology",
+                        8,
+                        "289",
+                        "10.1186/1471-2148-8-289"
+                ));
+    }
+
 }
diff --git a/src/dr/evomodel/coalescent/VariableSkylineLikelihood.java b/src/dr/evomodel/coalescent/VariableSkylineLikelihood.java
index 893bbcf..386a70a 100644
--- a/src/dr/evomodel/coalescent/VariableSkylineLikelihood.java
+++ b/src/dr/evomodel/coalescent/VariableSkylineLikelihood.java
@@ -32,8 +32,12 @@ import dr.evomodel.tree.TreeModel;
 import dr.evomodelxml.coalescent.VariableSkylineLikelihoodParser;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 
 /**
@@ -42,7 +46,7 @@ import java.util.List;
  *
  * @author Alexei Drummond
  */
-public class VariableSkylineLikelihood extends OldAbstractCoalescentLikelihood {
+public class VariableSkylineLikelihood extends OldAbstractCoalescentLikelihood implements Citable {
 
     // PUBLIC STUFF
 
@@ -361,4 +365,31 @@ public class VariableSkylineLikelihood extends OldAbstractCoalescentLikelihood {
     private final Type type;
 
     private boolean logSpace = false;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Extended Skyline Coalescent";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("J", "Heled"),
+                    new Author("AJ", "Drummond")
+            },
+            "",
+            0,
+            "",
+            0, 0, 0,
+            ""
+    );
 }
diff --git a/src/dr/evomodel/coalescent/operators/GMRFMultilocusSkyrideBlockUpdateOperator.java b/src/dr/evomodel/coalescent/operators/GMRFMultilocusSkyrideBlockUpdateOperator.java
index c9b888f..d7e3f5c 100644
--- a/src/dr/evomodel/coalescent/operators/GMRFMultilocusSkyrideBlockUpdateOperator.java
+++ b/src/dr/evomodel/coalescent/operators/GMRFMultilocusSkyrideBlockUpdateOperator.java
@@ -27,11 +27,13 @@ package dr.evomodel.coalescent.operators;
 
 import dr.evomodel.coalescent.GMRFMultilocusSkyrideLikelihood;
 import dr.evomodelxml.coalescent.operators.GMRFSkyrideBlockUpdateOperatorParser;
+import dr.inference.model.MatrixParameter;
 import dr.inference.model.Parameter;
 import dr.inference.operators.*;
 import dr.math.MathUtils;
 import no.uib.cipr.matrix.*;
 
+import java.util.List;
 import java.util.logging.Logger;
 
 /* A Metropolis-Hastings operator to update the log population sizes and precision parameter jointly under a Gaussian Markov random field prior
@@ -53,6 +55,8 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
     private Parameter popSizeParameter;
     private Parameter precisionParameter;
     private Parameter lambdaParameter;
+    private List<Parameter> betaParameter;
+    private List<MatrixParameter> covariates;
 
     GMRFMultilocusSkyrideLikelihood gmrfField;
 
@@ -66,6 +70,8 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
         popSizeParameter = gmrfLikelihood.getPopSizeParameter();
         precisionParameter = gmrfLikelihood.getPrecisionParameter();
         lambdaParameter = gmrfLikelihood.getLambdaParameter();
+        betaParameter = gmrfLikelihood.getBetaListParameter();
+        covariates = gmrfLikelihood.getCovariates();
 
         this.scaleFactor = scaleFactor;
         lambdaScaleFactor = 0.0;
@@ -174,13 +180,38 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
 
         return returnValue;
     }
-   
-    public DenseVector newtonRaphson(double[] data1, double[] data2, DenseVector currentGamma, SymmTridiagMatrix proposedQ) throws OperatorFailedException {
-        return newNewtonRaphson(data1, data2, currentGamma, proposedQ, maxIterations, stopValue);
+
+    public DenseVector getZBeta(List<MatrixParameter> covariates, List<Parameter> beta){
+
+        DenseVector temporaryVect = new DenseVector(fieldLength);
+        
+        // TODO: Update for covariateMatrix block as well !!!
+        
+        if(covariates != null) {
+           // DenseVector currentBeta = new DenseVector(beta.getParameterValues());
+            DenseVector currentBeta = new DenseVector(beta.size());
+            for(int i =0; i < beta.size(); i++){
+                currentBeta.set(i, beta.get(i).getParameterValue(0));
+            }
+
+            for (int i = 0; i < covariates.size(); i++) {
+                for (int j = 0; j < covariates.get(i).getColumnDimension(); j++) {
+                    temporaryVect.set(j, covariates.get(i).getParameterValue(0, j) * currentBeta.get(i));
+                }
+            }
+            return temporaryVect;
+        }else{
+            return temporaryVect.zero();
+        }
+    }
+
+    public DenseVector newtonRaphson(double[] data1, double[] data2, DenseVector currentGamma,
+                                     SymmTridiagMatrix proposedQ, DenseVector ZBeta) throws OperatorFailedException {
+        return newNewtonRaphson(data1, data2, currentGamma, proposedQ, maxIterations, stopValue, ZBeta);
     }
 
     public static DenseVector newNewtonRaphson(double[] data1, double[] data2, DenseVector currentGamma, SymmTridiagMatrix proposedQ,
-                                               int maxIterations, double stopValue) throws OperatorFailedException {
+                                               int maxIterations, double stopValue, DenseVector ZBeta) throws OperatorFailedException {
 
         DenseVector iterateGamma = currentGamma.copy();
         DenseVector tempValue = currentGamma.copy();
@@ -188,9 +219,9 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
         int numberIterations = 0;
 
 
-        while (gradient(data1, data2, iterateGamma, proposedQ).norm(Vector.Norm.Two) > stopValue) {
+        while (gradient(data1, data2, iterateGamma, proposedQ, ZBeta).norm(Vector.Norm.Two) > stopValue) {
            try {
-                jacobian(data2, iterateGamma, proposedQ).solve(gradient(data1, data2, iterateGamma, proposedQ), tempValue);
+                jacobian(data2, iterateGamma, proposedQ).solve(gradient(data1, data2, iterateGamma, proposedQ, ZBeta), tempValue);
            } catch (no.uib.cipr.matrix.MatrixNotSPDException e) {
                 Logger.getLogger("dr.evomodel.coalescent.operators.GMRFMultilocusSkyrideBlockUpdateOperator").fine("Newton-Raphson F");
                 throw new OperatorFailedException("");
@@ -214,12 +245,16 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
 
     }
 
-    private static DenseVector gradient(double[] data1, double[] data2, DenseVector value, SymmTridiagMatrix Q) {
+    private static DenseVector gradient(double[] data1, double[] data2, DenseVector value,
+                                        SymmTridiagMatrix Q, DenseVector ZBeta) {
 
         DenseVector returnValue = new DenseVector(value.size());
+        DenseVector returnValueCov = new DenseVector(ZBeta.size());
         Q.mult(value, returnValue);
+        //check this
+        Q.mult(ZBeta, returnValueCov);
         for (int i = 0; i < value.size(); i++) {
-            returnValue.set(i, -returnValue.get(i) - data1[i] + data2[i] * Math.exp(-value.get(i)));
+            returnValue.set(i, -returnValue.get(i) +returnValueCov.get(i) - data1[i] + data2[i] * Math.exp(-value.get(i)));
         }
         return returnValue;
     }
@@ -262,8 +297,13 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
         DenseVector diagonal1 = new DenseVector(fieldLength);
         DenseVector diagonal2 = new DenseVector(fieldLength);
         DenseVector diagonal3 = new DenseVector(fieldLength);
+        DenseVector ZBetaVector = getZBeta(covariates, betaParameter);
+        DenseVector QZBetaProp = new DenseVector(fieldLength);
+        DenseVector QZBetaCurrent = new DenseVector(fieldLength);
+        forwardQW.mult(ZBetaVector, QZBetaProp);
+        backwardQW.mult(ZBetaVector, QZBetaCurrent);
 
-        DenseVector modeForward = newtonRaphson(numCoalEv, wNative, currentGamma, proposedQ.copy());
+        DenseVector modeForward = newtonRaphson(numCoalEv, wNative, currentGamma, proposedQ.copy(), ZBetaVector);
        
         for (int i = 0; i < fieldLength; i++) {
             diagonal1.set(i, wNative[i] * Math.exp(-modeForward.get(i)));
@@ -271,7 +311,7 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
 
             forwardQW.set(i, i, diagonal1.get(i) + forwardQW.get(i, i));
             //diagonal1.set(i, diagonal1.get(i) * diagonal2.get(i) - 1);
-            diagonal1.set(i, diagonal1.get(i) * diagonal2.get(i) - numCoalEv[i]);
+            diagonal1.set(i, QZBetaProp.get(i) + diagonal1.get(i) * diagonal2.get(i) - numCoalEv[i]);
         }
 
         forwardCholesky.factor(forwardQW.copy());
@@ -308,7 +348,7 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
         diagonal2.zero();
         diagonal3.zero();
 
-        DenseVector modeBackward = newtonRaphson(numCoalEv, wNative, proposedGamma, currentQ.copy());
+        DenseVector modeBackward = newtonRaphson(numCoalEv, wNative, proposedGamma, currentQ.copy(), ZBetaVector);
 
         for (int i = 0; i < fieldLength; i++) {
             diagonal1.set(i, wNative[i] * Math.exp(-modeBackward.get(i)));
@@ -316,7 +356,7 @@ public class GMRFMultilocusSkyrideBlockUpdateOperator extends AbstractCoercableO
 
             backwardQW.set(i, i, diagonal1.get(i) + backwardQW.get(i, i));
             //diagonal1.set(i, diagonal1.get(i) * diagonal2.get(i) - 1);
-            diagonal1.set(i, diagonal1.get(i) * diagonal2.get(i) - numCoalEv[i]);
+            diagonal1.set(i, QZBetaCurrent.get(i) + diagonal1.get(i) * diagonal2.get(i) - numCoalEv[i]);
         }
 
         backwardCholesky.factor(backwardQW.copy());
diff --git a/src/dr/evomodel/continuous/AbstractMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/AbstractMultivariateTraitLikelihood.java
index 1bdc387..f08bd5f 100644
--- a/src/dr/evomodel/continuous/AbstractMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/AbstractMultivariateTraitLikelihood.java
@@ -1,7 +1,7 @@
 /*
  * AbstractMultivariateTraitLikelihood.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2013 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -45,8 +45,7 @@ import dr.xml.*;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 import java.util.logging.Logger;
 
 /**
@@ -80,21 +79,21 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
     public static final String STRENGTH_OF_SELECTION = "strengthOfSelection";
     public static final String OPTIMAL_TRAITS = "optimalTraits";
 
-    public AbstractMultivariateTraitLikelihood(String traitName,
-                                               MultivariateTraitTree treeModel,
-                                               MultivariateDiffusionModel diffusionModel,
-                                               CompoundParameter traitParameter,
-                                               List<Integer> missingIndices,
-                                               boolean cacheBranches,
-                                               boolean scaleByTime,
-                                               boolean useTreeLength,
-                                               BranchRateModel rateModel,
-                                               Model samplingDensity,
-                                               boolean reportAsMultivariate,
-                                               boolean reciprocalRates) {
-        this(traitName, treeModel, diffusionModel, traitParameter, null, missingIndices, cacheBranches,
-                scaleByTime, useTreeLength, rateModel, null, samplingDensity, reportAsMultivariate, reciprocalRates);
-    }
+//    public AbstractMultivariateTraitLikelihood(String traitName,
+//                                               MultivariateTraitTree treeModel,
+//                                               MultivariateDiffusionModel diffusionModel,
+//                                               CompoundParameter traitParameter,
+//                                               List<Integer> missingIndices,
+//                                               boolean cacheBranches,
+//                                               boolean scaleByTime,
+//                                               boolean useTreeLength,
+//                                               BranchRateModel rateModel,
+//                                               Model samplingDensity,
+//                                               boolean reportAsMultivariate,
+//                                               boolean reciprocalRates) {
+//        this(traitName, treeModel, diffusionModel, traitParameter, null, missingIndices, cacheBranches,
+//                scaleByTime, useTreeLength, rateModel, null, samplingDensity, reportAsMultivariate, reciprocalRates);
+//    }
 
     public AbstractMultivariateTraitLikelihood(String traitName,
                                                MultivariateTraitTree treeModel,
@@ -107,6 +106,8 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
                                                boolean useTreeLength,
                                                BranchRateModel rateModel,
                                                List<BranchRateModel> driftModels,
+                                               List<BranchRateModel> optimalValues,
+                                               BranchRateModel strengthOfSelection,
                                                Model samplingDensity,
                                                boolean reportAsMultivariate,
                                                boolean reciprocalRates) {
@@ -117,6 +118,8 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
         this.treeModel = treeModel;
         this.branchRateModel = rateModel;
         this.driftModels = driftModels;
+        this.optimalValues = optimalValues;
+        this.strengthOfSelection = strengthOfSelection;
         this.diffusionModel = diffusionModel;
         this.traitParameter = traitParameter;
         this.missingIndices = missingIndices;
@@ -139,79 +142,6 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
             }
         }
 
-        if (samplingDensity != null) {
-            addModel(samplingDensity);
-        }
-
-        if (traitParameter != null)
-            addVariable(traitParameter);
-
-        this.reportAsMultivariate = reportAsMultivariate;
-
-        this.cacheBranches = cacheBranches;
-        if (cacheBranches) {
-            cachedLogLikelihoods = new double[treeModel.getNodeCount()];
-            storedCachedLogLikelihood = new double[treeModel.getNodeCount()];
-            validLogLikelihoods = new boolean[treeModel.getNodeCount()];
-            storedValidLogLikelihoods = new boolean[treeModel.getNodeCount()];
-        }
-
-        this.scaleByTime = scaleByTime;
-        this.useTreeLength = useTreeLength;
-        this.reciprocalRates = reciprocalRates;
-
-        dimTrait = diffusionModel.getPrecisionmatrix().length;
-        dim = traitParameter != null ? traitParameter.getParameter(0).getDimension() : 0;
-        numData = dim / dimTrait;
-
-        if (dim % dimTrait != 0)
-            throw new RuntimeException("dim is not divisible by dimTrait");
-
-        recalculateTreeLength();
-        printInformtion();
-
-    }
-
-    public AbstractMultivariateTraitLikelihood(String traitName,
-                                               MultivariateTraitTree treeModel,
-                                               MultivariateDiffusionModel diffusionModel,
-                                               CompoundParameter traitParameter,
-                                               Parameter deltaParameter,
-                                               List<Integer> missingIndices,
-                                               boolean cacheBranches,
-                                               boolean scaleByTime,
-                                               boolean useTreeLength,
-                                               BranchRateModel rateModel,
-                                               List<BranchRateModel> optimalValues,
-                                               BranchRateModel strengthOfSelection,
-                                               Model samplingDensity,
-                                               boolean reportAsMultivariate,
-                                               boolean reciprocalRates) {
-
-        super(TRAIT_LIKELIHOOD);
-
-        this.traitName = traitName;
-        this.treeModel = treeModel;
-        this.branchRateModel = rateModel;
-        this.optimalValues = optimalValues;
-        this.strengthOfSelection = strengthOfSelection;
-        this.diffusionModel = diffusionModel;
-        this.traitParameter = traitParameter;
-        this.missingIndices = missingIndices;
-        addModel(treeModel);
-        addModel(diffusionModel);
-
-        this.deltaParameter = deltaParameter;
-        if (deltaParameter != null) {
-            addVariable(deltaParameter);
-        }
-
-
-        if (rateModel != null) {
-            hasBranchRateModel = true;
-            addModel(rateModel);
-        }
-
         if (optimalValues != null) {
             for (BranchRateModel optVal : optimalValues) {
                 addModel(optVal);
@@ -255,6 +185,89 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
 
     }
 
+//    public AbstractMultivariateTraitLikelihood(String traitName,
+//                                               MultivariateTraitTree treeModel,
+//                                               MultivariateDiffusionModel diffusionModel,
+//                                               CompoundParameter traitParameter,
+//                                               Parameter deltaParameter,
+//                                               List<Integer> missingIndices,
+//                                               boolean cacheBranches,
+//                                               boolean scaleByTime,
+//                                               boolean useTreeLength,
+//                                               BranchRateModel rateModel,
+//                                               List<BranchRateModel> optimalValues,
+//                                               BranchRateModel strengthOfSelection,
+//                                               Model samplingDensity,
+//                                               boolean reportAsMultivariate,
+//                                               boolean reciprocalRates) {
+//
+//        super(TRAIT_LIKELIHOOD);
+//
+//        this.traitName = traitName;
+//        this.treeModel = treeModel;
+//        this.branchRateModel = rateModel;
+//        this.optimalValues = optimalValues;
+//        this.strengthOfSelection = strengthOfSelection;
+//        this.diffusionModel = diffusionModel;
+//        this.traitParameter = traitParameter;
+//        this.missingIndices = missingIndices;
+//        addModel(treeModel);
+//        addModel(diffusionModel);
+//
+//        this.deltaParameter = deltaParameter;
+//        if (deltaParameter != null) {
+//            addVariable(deltaParameter);
+//        }
+//
+//
+//        if (rateModel != null) {
+//            hasBranchRateModel = true;
+//            addModel(rateModel);
+//        }
+//
+//        if (optimalValues != null) {
+//            for (BranchRateModel optVal : optimalValues) {
+//                addModel(optVal);
+//            }
+//        }
+//
+//        if (strengthOfSelection != null) {
+//            addModel(strengthOfSelection);
+//        }
+//
+//        if (samplingDensity != null) {
+//            addModel(samplingDensity);
+//        }
+//
+//        if (traitParameter != null)
+//            addVariable(traitParameter);
+//
+//        this.reportAsMultivariate = reportAsMultivariate;
+//
+//        this.cacheBranches = cacheBranches;
+//        if (cacheBranches) {
+//            cachedLogLikelihoods = new double[treeModel.getNodeCount()];
+//            storedCachedLogLikelihood = new double[treeModel.getNodeCount()];
+//            validLogLikelihoods = new boolean[treeModel.getNodeCount()];
+//            storedValidLogLikelihoods = new boolean[treeModel.getNodeCount()];
+//        }
+//
+//        this.scaleByTime = scaleByTime;
+//        this.useTreeLength = useTreeLength;
+//        this.reciprocalRates = reciprocalRates;
+//
+//        dimTrait = diffusionModel.getPrecisionmatrix().length;
+//        dim = traitParameter != null ? traitParameter.getParameter(0).getDimension() : 0;
+//        numData = dim / dimTrait;
+//
+//        if (dim % dimTrait != 0)
+//            throw new RuntimeException("dim is not divisible by dimTrait");
+//
+//        recalculateTreeLength();
+//        printInformtion();
+//
+//    }
+
 
     protected void printInformtion() {
         StringBuffer sb = new StringBuffer("Creating multivariate diffusion model:\n");
@@ -281,11 +294,22 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
         Logger.getLogger("dr.evomodel").info(sb.toString());
     }
 
-    private static Citable TraitAscertainmentCitation = new Citable() {//} implements Citable {
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Multivariate Diffusion model";
+    }
 
-        public List<Citation> getCitations() {
-            List<Citation> list = new ArrayList<Citation>();
-            list.add(
+    @Override
+    public List<Citation> getCitations() {
+        List<Citation> citations = new ArrayList<Citation>();
+        citations.add(CommonCitations.LEMEY_2010_PHYLOGEOGRAPHY);
+        if (doAscertainmentCorrect) {
+            citations.add(
                     new Citation(
                             new Author[]{
                                     new Author("MA", "Suchard"),
@@ -296,15 +320,7 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
                             Citation.Status.IN_PREPARATION
                     )
             );
-            return list;
         }
-    };
-
-    public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.LEMEY_2010
-        );
         return citations;
     }
 
@@ -323,8 +339,6 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
         StringBuilder sb = new StringBuilder("Enabling ascertainment correction for multivariate trait model: ");
         sb.append(getId()).append("\n");
         sb.append("\tTaxon: ").append(taxon.getId()).append("\n");
-        sb.append("\tPlease cite:\n");
-        sb.append(Citable.Utils.getCitationString(TraitAscertainmentCitation));
         Logger.getLogger("dr.evomodel").info(sb.toString());
     }
 
@@ -790,16 +804,17 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
                 deltaParameter = (Parameter) cxo.getChild(Parameter.class);
             }
 
+
             if (standardizeTraits) {
-//                standardize(traitParameter);
-//                dimTrait = diffusionModel.getPrecisionmatrix().length;
-//                        dim = traitParameter != null ? traitParameter.getParameter(0).getDimension() : 0;
-//                        numData = dim / dimTrait;
-
-//                System.err.println(traitParameter.getDimension());
-//                System.err.println(traitParameter.getParameterCount());
-//                System.err.println(traitParameter.getParameter(0).getDimension());
-//                System.exit(-1);
+                //                standardize(traitParameter);
+                //                dimTrait = diffusionModel.getPrecisionmatrix().length;
+                //                        dim = traitParameter != null ? traitParameter.getParameter(0).getDimension() : 0;
+                //                        numData = dim / dimTrait;
+
+                //                System.err.println(traitParameter.getDimension());
+                //                System.err.println(traitParameter.getParameterCount());
+                //                System.err.println(traitParameter.getParameter(0).getDimension());
+                //                System.exit(-1);
                 int numTraits = traitParameter.getParameter(0).getDimension();
                 int numObservations = traitParameter.getParameterCount();
 
@@ -825,7 +840,7 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
                 Logger.getLogger("dr.evomodel").info(sb.toString());
 
             }
-
+            
             AbstractMultivariateTraitLikelihood like;
 
             if (integrate) {
@@ -881,18 +896,24 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
                             if (strengthOfSelection == null) {
                                 like = new FullyConjugateMultivariateTraitLikelihood(traitName, treeModel, diffusionModel,
                                         traitParameter, deltaParameter, missingIndices, cacheBranches,
-                                        scaleByTime, useTreeLength, rateModel, samplingDensity, reportAsMultivariate,
+                                        scaleByTime, useTreeLength,
+                                        rateModel, null, null, null,
+                                        samplingDensity, reportAsMultivariate,
                                         mean, pseudoObservations, reciprocalRates);
                             } else {
                                 like = new FullyConjugateMultivariateTraitLikelihood(traitName, treeModel, diffusionModel,
                                         traitParameter, deltaParameter, missingIndices, cacheBranches,
-                                        scaleByTime, useTreeLength, rateModel, optimalValues, strengthOfSelection, samplingDensity, reportAsMultivariate,
+                                        scaleByTime, useTreeLength,
+                                        rateModel, null, optimalValues, strengthOfSelection,
+                                        samplingDensity, reportAsMultivariate,
                                         mean, pseudoObservations, reciprocalRates);
                             }
                         } else {
                             like = new FullyConjugateMultivariateTraitLikelihood(traitName, treeModel, diffusionModel,
                                     traitParameter, deltaParameter, missingIndices, cacheBranches,
-                                    scaleByTime, useTreeLength, rateModel, driftModels, samplingDensity, reportAsMultivariate,
+                                    scaleByTime, useTreeLength,
+                                    rateModel, driftModels, null, null,
+                                    samplingDensity, reportAsMultivariate,
                                     mean, pseudoObservations, reciprocalRates);
                         }
                     }
@@ -980,7 +1001,6 @@ public abstract class AbstractMultivariateTraitLikelihood extends AbstractModelL
                 AttributeRule.newBooleanRule(USE_TREE_LENGTH, true),
                 AttributeRule.newBooleanRule(SCALE_BY_TIME, true),
                 AttributeRule.newBooleanRule(RECIPROCAL_RATES, true),
-                AttributeRule.newBooleanRule(STANDARDIZE_TRAITS, true),
                 AttributeRule.newBooleanRule(CACHE_BRANCHES, true),
                 AttributeRule.newIntegerRule(RANDOM_SAMPLE, true),
                 AttributeRule.newBooleanRule(IGNORE_PHYLOGENY, true),
diff --git a/src/dr/evomodel/continuous/BinaryLatentLiabilityLikelihood.java b/src/dr/evomodel/continuous/BinaryLatentLiabilityLikelihood.java
index f8e8766..caab3e3 100644
--- a/src/dr/evomodel/continuous/BinaryLatentLiabilityLikelihood.java
+++ b/src/dr/evomodel/continuous/BinaryLatentLiabilityLikelihood.java
@@ -37,7 +37,9 @@ import dr.util.CommonCitations;
 import dr.xml.*;
 
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.logging.Logger;
 
 
@@ -254,11 +256,20 @@ public class BinaryLatentLiabilityLikelihood extends AbstractModelLikelihood imp
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Latent Liability model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
         List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.SUCHARD_2012_LATENT
-        );
+        citations.add(CommonCitations.CYBIS_2015_ASSESSING);
         return citations;
     }
 
diff --git a/src/dr/evomodel/continuous/ContinuousDiffusionStatistic.java b/src/dr/evomodel/continuous/ContinuousDiffusionStatistic.java
index 8775cde..2e978ad 100644
--- a/src/dr/evomodel/continuous/ContinuousDiffusionStatistic.java
+++ b/src/dr/evomodel/continuous/ContinuousDiffusionStatistic.java
@@ -33,11 +33,17 @@ import dr.evolution.tree.Tree;
 import dr.evolution.util.TaxonList;
 import dr.evomodel.branchratemodel.BranchRateModel;
 import dr.evomodel.tree.TreeStatistic;
+import dr.geo.KMLCoordinates;
+import dr.geo.Polygon2D;
+import dr.geo.contouring.ContourMaker;
+import dr.geo.contouring.ContourPath;
+import dr.geo.contouring.ContourWithSynder;
 import dr.geo.math.SphericalPolarCoordinates;
 import dr.inference.model.Statistic;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.stats.DiscreteStatistics;
 import dr.xml.*;
+import org.jdom.Element;
 
 import javax.swing.event.TreeModelListener;
 import javax.swing.plaf.basic.BasicInternalFrameTitlePane;
@@ -62,11 +68,13 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
     public static final String COEFFICIENT_OF_VARIATION = "coefficientOfVariation"; // weighted average (=total distance/total time)
     public static final String STATISTIC = "statistic";
     public static final String TRAIT = "trait";
+    public static final String TRAIT2DAREA = "trait2Darea";
     public static final String DIMENSION = "dimension";
     public static final String DIFFUSION_TIME = "diffusionTime";
     public static final String DIFFUSION_DISTANCE = "diffusionDistance";
     public static final String DIFFUSION_RATE = "diffusionRate"; // weighted average (=total distance/total time)
     public static final String WAVEFRONT_DISTANCE = "wavefrontDistance"; // weighted average (=total distance/total time)
+    public static final String WAVEFRONT_DISTANCE_PHYLO = "wavefrontDistancePhylo"; // weighted average (=total brnach distance/total time)
     public static final String WAVEFRONT_RATE = "wavefrontRate"; // weighted average (=total distance/total time)
     public static final String DIFFUSION_COEFFICIENT = "diffusionCoefficient";
     public static final String HEIGHT_UPPER = "heightUpper";
@@ -127,14 +135,14 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
         double treeLength = 0;
         double treeDistance = 0;
         double totalMaxDistanceFromRoot = 0;
-        double maxDistanceFromRoot = 0; // can only be used when cumulative and not associated with discrete state (not based on the distances on the branches from the root up that point)
+        double maxDistanceFromRootCumulative = 0; // can only be used when cumulative and not associated with discrete state (not based on the distances on the branches from the root up that point)
         double maxBranchDistanceFromRoot = 0;
         double maxDistanceOverTimeFromRootWA = 0;  // can only be used when cumulative and not associated with discrete state (not based on the distances on the branches from the root up that point)
         double maxBranchDistanceOverTimeFromRootWA = 0;
 
-        //double[] rates =  null;
         List<Double> rates = new ArrayList<Double>();
         List<Double> traits = new ArrayList<Double>();
+        List<double[]> traits2D = new ArrayList<double[]>();
         //double[] diffusionCoefficients =  null;
         List<Double> diffusionCoefficients = new ArrayList<Double>();
         double waDiffusionCoefficient =  0;
@@ -228,6 +236,10 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
                                 traits.add(traitLow[(dimension - 1)]);
                             }
 
+                            if (traitLow.length == 2){
+                                traits2D.add(traitLow);
+                            }
+
                             double time;
                             if (stateString != null) {
                                 time = history.getStateTime(stateString);
@@ -262,29 +274,31 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
                                 }
 
                                 SphericalPolarCoordinates rootCoord = new SphericalPolarCoordinates(rootTrait[0], rootTrait[1]);
-                                double tempDistanceFromRoot = rootCoord.distance(new SphericalPolarCoordinates(traitUp[0], traitUp[1]));
-                                if (tempDistanceFromRoot > totalMaxDistanceFromRoot) {
-                                    totalMaxDistanceFromRoot = tempDistanceFromRoot;
+                                double tempDistanceFromRootLow = rootCoord.distance(new SphericalPolarCoordinates(traitUp[0], traitUp[1]));
+
+
+                                if (tempDistanceFromRootLow > totalMaxDistanceFromRoot) {
+                                    totalMaxDistanceFromRoot = tempDistanceFromRootLow;
                                     if (stateString != null) {
                                         double[] stateTimeDistance = getStateTimeAndDistanceFromRoot(tree, node, timeLow, traitLikelihood, traitName, traitLow, precision, branchRates, true);
                                         if (stateTimeDistance[0] > 0) {
-                                            maxDistanceFromRoot = tempDistanceFromRoot * (stateTimeDistance[0] / timeFromRoot);
-                                            maxDistanceOverTimeFromRootWA = maxDistanceFromRoot / stateTimeDistance[0];
+                                            maxDistanceFromRootCumulative = tempDistanceFromRootLow * (stateTimeDistance[0] / timeFromRoot);
+                                            maxDistanceOverTimeFromRootWA = maxDistanceFromRootCumulative / stateTimeDistance[0];
                                             maxBranchDistanceFromRoot = stateTimeDistance[1];
                                             maxBranchDistanceOverTimeFromRootWA = stateTimeDistance[1] / stateTimeDistance[0];
                                         }
                                     } else {
-                                        maxDistanceFromRoot = tempDistanceFromRoot;
-                                        maxDistanceOverTimeFromRootWA = tempDistanceFromRoot / timeFromRoot;
+                                        maxDistanceFromRootCumulative = tempDistanceFromRootLow;
+                                        maxDistanceOverTimeFromRootWA = tempDistanceFromRootLow / timeFromRoot;
                                         double[] timeDistance = getTimeAndDistanceFromRoot(tree, node, timeLow, traitLikelihood, traitName, traitLow, true);
                                         maxBranchDistanceFromRoot = timeDistance[1];
                                         maxBranchDistanceOverTimeFromRootWA = timeDistance[1] / timeDistance[0];
 
                                     }
-                                    //distance between traitLow and traitUp for maxDistanceFromRoot
+                                    //distance between traitLow and traitUp for maxDistanceFromRootCumulative
                                     if (timeUp == upperHeight) {
                                         if (time > 0) {
-                                            maxDistanceFromRoot = distance;
+                                            maxDistanceFromRootCumulative = distance;
                                             maxDistanceOverTimeFromRootWA = distance / time;
                                             maxBranchDistanceFromRoot = distance;
                                             maxBranchDistanceOverTimeFromRootWA = distance / time;
@@ -314,22 +328,22 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
                                     if (stateString != null) {
                                         double[] stateTimeDistance = getStateTimeAndDistanceFromRoot(tree, node, timeLow, traitLikelihood, traitName, traitLow, precision, branchRates, false);
                                         if (stateTimeDistance[0] > 0) {
-                                            maxDistanceFromRoot = tempDistanceFromRoot * (stateTimeDistance[0] / timeFromRoot);
-                                            maxDistanceOverTimeFromRootWA = maxDistanceFromRoot / stateTimeDistance[0];
+                                            maxDistanceFromRootCumulative = tempDistanceFromRoot * (stateTimeDistance[0] / timeFromRoot);
+                                            maxDistanceOverTimeFromRootWA = maxDistanceFromRootCumulative / stateTimeDistance[0];
                                             maxBranchDistanceFromRoot = stateTimeDistance[1];
                                             maxBranchDistanceOverTimeFromRootWA = stateTimeDistance[1] / stateTimeDistance[0];
                                         }
                                     } else {
-                                        maxDistanceFromRoot = tempDistanceFromRoot;
+                                        maxDistanceFromRootCumulative = tempDistanceFromRoot;
                                         maxDistanceOverTimeFromRootWA = tempDistanceFromRoot / timeFromRoot;
                                         double[] timeDistance = getTimeAndDistanceFromRoot(tree, node, timeLow, traitLikelihood, traitName, traitLow, false);
                                         maxBranchDistanceFromRoot = timeDistance[1];
                                         maxBranchDistanceOverTimeFromRootWA = timeDistance[1] / timeDistance[0];
                                     }
-                                    //distance between traitLow and traitUp for maxDistanceFromRoot
+                                    //distance between traitLow and traitUp for maxDistanceFromRootCumulative
                                     if (timeUp == upperHeight) {
                                         if (time > 0) {
-                                            maxDistanceFromRoot = distance;
+                                            maxDistanceFromRootCumulative = distance;
                                             maxDistanceOverTimeFromRootWA = distance / time;
                                             maxBranchDistanceFromRoot = distance;
                                             maxBranchDistanceOverTimeFromRootWA = distance / time;
@@ -366,6 +380,9 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
             } else {
                 return DiscreteStatistics.mean(toArray(traits));
             }
+        } else if (summaryStat == summaryStatistic.TRAIT2DAREA) {
+            double area = getAreaFrom2Dtraits(traits2D, 0.99);
+            return  area;
         }  else if (summaryStat == summaryStatistic.DIFFUSION_COEFFICIENT) {
             if (summaryMode == Mode.AVERAGE) {
                 return DiscreteStatistics.mean(toArray(diffusionCoefficients));
@@ -381,8 +398,10 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
             //wavefront distance
             //TODO: restrict to non state-specific wavefrontDistance/rate
         }  else if (summaryStat == summaryStatistic.WAVEFRONT_DISTANCE) {
-            return maxDistanceFromRoot;
+            return maxDistanceFromRootCumulative;
 //            return maxBranchDistanceFromRoot;
+        } else if (summaryStat == summaryStatistic.WAVEFRONT_DISTANCE_PHYLO) {
+            return maxBranchDistanceFromRoot;
             //wavefront rate, only weighted average TODO: extend for average, median, COEFFICIENT_OF_VARIATION?
         }  else if (summaryStat == summaryStatistic.WAVEFRONT_RATE)  {
             return maxDistanceOverTimeFromRootWA;
@@ -644,7 +663,6 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
     public double[] getTimeAndDistanceFromRoot(MultivariateTraitTree tree, NodeRef node, double timeLow, AbstractMultivariateTraitLikelihood traitLikelihood, String traitName, double[] traitLow, boolean useGreatCircleDistance){
 
         NodeRef nodeOfInterest = node;
-
         double[] timeDistance = new double[]{0,0};
 
         double[] rootTrait = traitLikelihood.getTraitForNode(tree, tree.getRoot(), traitName);
@@ -663,7 +681,7 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
             timeDistance[0] += tree.getNodeHeight(parentNode) - nodeHeight;
 
             if (useGreatCircleDistance){
-                timeDistance[1] += getGreatCircleDistance(nodeTrait,rootTrait) - getGreatCircleDistance(parentTrait,rootTrait);
+                timeDistance[1] += getGreatCircleDistance(nodeTrait, rootTrait) - getGreatCircleDistance(parentTrait, rootTrait);
             }  else {
                 timeDistance[1] += getNativeDistance(nodeTrait,rootTrait) - getNativeDistance(parentTrait,rootTrait);
             }
@@ -760,6 +778,36 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
         }
     }
 
+    private static double getAreaFrom2Dtraits(List<double[]> traits2D,  double hpdValue){
+
+        boolean bandwidthlimit = true;
+
+        double totalArea = 0;
+        double[][] y = new double[2][traits2D.size()];
+        for (int a=0; a<traits2D.size(); a++){
+            double[] trait = traits2D.get(a);
+            y[0][a]= trait[0];
+            y[1][a]= trait[1];
+//            System.err.println(trait[0]+"\t"+trait[1]);
+        }
+
+        ContourMaker contourMaker;
+        contourMaker = new ContourWithSynder(y[0], y[1], bandwidthlimit);
+
+        ContourPath[] paths = contourMaker.getContourPaths(hpdValue);
+        int pathCounter = 1;
+        for (ContourPath path : paths) {
+            KMLCoordinates coords = new KMLCoordinates(path.getAllX(), path.getAllY());
+            Element testElement = new Element("test");
+            testElement.addContent(coords.toXML());
+            Polygon2D testPolygon = new Polygon2D(testElement);
+            totalArea += testPolygon.calculateArea();
+//            System.err.println("area: "+testPolygon.calculateArea());
+        }
+
+        return totalArea;
+    }
+
 //    private int getStateInt(String state){
 //        int returnInt = -1;
 //        int counter = 0;
@@ -790,11 +838,13 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
 
     enum summaryStatistic {
         TRAIT,
+        TRAIT2DAREA,
         DIFFUSION_TIME,
         DIFFUSION_DISTANCE,
         DIFFUSION_RATE,
         DIFFUSION_COEFFICIENT,
         WAVEFRONT_DISTANCE,
+        WAVEFRONT_DISTANCE_PHYLO,
         WAVEFRONT_RATE,
     }
 
@@ -873,29 +923,40 @@ public class ContinuousDiffusionStatistic extends Statistic.Abstract {
             } else if (statistic.equals(WAVEFRONT_DISTANCE)) {
                 summaryStat = summaryStatistic.WAVEFRONT_DISTANCE;
                 if (!mode.equals(WEIGHTED_AVERAGE)) {
-                    System.err.println(name+": mode = "+mode+" ignored for "+WAVEFRONT_DISTANCE);
+                    System.err.println(name + ": mode = " + mode + " ignored for " + WAVEFRONT_DISTANCE);
+                }
+            } else if (statistic.equals(WAVEFRONT_DISTANCE_PHYLO)) {
+                summaryStat = summaryStatistic.WAVEFRONT_DISTANCE_PHYLO;
+                if (!mode.equals(WEIGHTED_AVERAGE)) {
+                    System.err.println(name + ": mode = " + mode + " ignored for " + WAVEFRONT_DISTANCE);
                 }
             } else if (statistic.equals(TRAIT)) {
                 summaryStat = summaryStatistic.TRAIT;
                 if (mode.equals(WEIGHTED_AVERAGE)) {
-                    System.err.println(name+": mode = "+mode+" ignored for "+TRAIT+", resorting to "+AVERAGE);
+                    System.err.println(name + ": mode = " + mode + " ignored for " + TRAIT + ", resorting to " + AVERAGE);
                     averageMode = Mode.AVERAGE;
                 }
                 if (upperHeight < Double.MAX_VALUE) {
-                    System.err.println(name+": only "+HEIGHT_LOWER+" or " + HEIGHT_LOWER_SERIE + " are relevant for "+TRAIT);
+                    System.err.println(name + ": only " + HEIGHT_LOWER + " or " + HEIGHT_LOWER_SERIE + " are relevant for " + TRAIT);
                 }
-                dimension = xo.getAttribute(DIMENSION,1);
+                dimension = xo.getAttribute(DIMENSION, 1);
                 if (dimension == 0) {
-                    System.err.println(name+": trait dimensions start from 1. Setting dimension to 1");
+                    System.err.println(name + ": trait dimensions start from 1. Setting dimension to 1");
                     dimension = 1;
                 }
-                if (cumulative){
-                    System.err.println(name+": "+CUMULATIVE+" is ignored for " +TRAIT);
+                if (cumulative) {
+                    System.err.println(name + ": " + CUMULATIVE + " is ignored for " + TRAIT);
+                }
+                if (greatCircleDistances) {
+                    System.err.println(name + ": " + USE_GREATCIRCLEDISTANCES + " is ignored for " + TRAIT);
                 }
-                if (greatCircleDistances){
-                    System.err.println(name+": "+USE_GREATCIRCLEDISTANCES+" is ignored for " +TRAIT);
+            } else if (statistic.equals(TRAIT2DAREA)) {
+                summaryStat = summaryStatistic.TRAIT2DAREA;
+                dimension = xo.getAttribute(DIMENSION, 2);
+                if (dimension != 2){
+                    System.err.println(name + ": trait dimension ("+dimension+") is not 2. Cannot calculate 2D area for the traits, 0's will be returned");
                 }
-            } else if (statistic.equals(WAVEFRONT_RATE)) {
+          } else if (statistic.equals(WAVEFRONT_RATE)) {
                 summaryStat = summaryStatistic.WAVEFRONT_RATE;
             } else if (statistic.equals(DIFFUSION_COEFFICIENT)) {
                 summaryStat = summaryStatistic.DIFFUSION_COEFFICIENT;
diff --git a/src/dr/evomodel/continuous/FullyConjugateMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/FullyConjugateMultivariateTraitLikelihood.java
index c0f72bd..fe0695c 100644
--- a/src/dr/evomodel/continuous/FullyConjugateMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/FullyConjugateMultivariateTraitLikelihood.java
@@ -40,6 +40,7 @@ import dr.math.matrixAlgebra.Matrix;
 import dr.math.matrixAlgebra.Vector;
 import dr.xml.Reportable;
 
+import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -53,32 +54,59 @@ import java.util.Set;
  */
 public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultivariateTraitLikelihood implements ConjugateWishartStatisticsProvider, Reportable {
 
-    public FullyConjugateMultivariateTraitLikelihood(String traitName,
-                                                     MultivariateTraitTree treeModel,
-                                                     MultivariateDiffusionModel diffusionModel,
-                                                     CompoundParameter traitParameter,
-                                                     Parameter deltaParameter,
-                                                     List<Integer> missingIndices,
-                                                     boolean cacheBranches,
-                                                     boolean scaleByTime,
-                                                     boolean useTreeLength,
-                                                     BranchRateModel rateModel,
-                                                     Model samplingDensity,
-                                                     boolean reportAsMultivariate,
-                                                     double[] rootPriorMean,
-                                                     double rootPriorSampleSize,
-                                                     boolean reciprocalRates) {
-
-        super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches, scaleByTime,
-                useTreeLength, rateModel, samplingDensity, reportAsMultivariate, reciprocalRates);
-
-        // fully-conjugate multivariate normal with own mean and prior sample size
-        this.rootPriorMean = rootPriorMean;
-        this.rootPriorSampleSize = rootPriorSampleSize;
-
-        priorInformationKnown = false;
-    }
-
+//    public FullyConjugateMultivariateTraitLikelihood(String traitName,
+//                                                     MultivariateTraitTree treeModel,
+//                                                     MultivariateDiffusionModel diffusionModel,
+//                                                     CompoundParameter traitParameter,
+//                                                     Parameter deltaParameter,
+//                                                     List<Integer> missingIndices,
+//                                                     boolean cacheBranches,
+//                                                     boolean scaleByTime,
+//                                                     boolean useTreeLength,
+//                                                     BranchRateModel rateModel,
+//                                                     Model samplingDensity,
+//                                                     boolean reportAsMultivariate,
+//                                                     double[] rootPriorMean,
+//                                                     double rootPriorSampleSize,
+//                                                     boolean reciprocalRates) {
+//
+//        super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches, scaleByTime,
+//                useTreeLength, rateModel, samplingDensity, reportAsMultivariate, reciprocalRates);
+//
+//        // fully-conjugate multivariate normal with own mean and prior sample size
+//        this.rootPriorMean = rootPriorMean;
+//        this.rootPriorSampleSize = rootPriorSampleSize;
+//
+//        priorInformationKnown = false;
+//    }
+//
+//
+//    public FullyConjugateMultivariateTraitLikelihood(String traitName,
+//                                                     MultivariateTraitTree treeModel,
+//                                                     MultivariateDiffusionModel diffusionModel,
+//                                                     CompoundParameter traitParameter,
+//                                                     Parameter deltaParameter,
+//                                                     List<Integer> missingIndices,
+//                                                     boolean cacheBranches,
+//                                                     boolean scaleByTime,
+//                                                     boolean useTreeLength,
+//                                                     BranchRateModel rateModel,
+//                                                     List<BranchRateModel> driftModels,
+//                                                     Model samplingDensity,
+//                                                     boolean reportAsMultivariate,
+//                                                     double[] rootPriorMean,
+//                                                     double rootPriorSampleSize,
+//                                                     boolean reciprocalRates) {
+//
+//        super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches, scaleByTime,
+//                useTreeLength, rateModel, driftModels, samplingDensity, reportAsMultivariate, reciprocalRates);
+//
+//        // fully-conjugate multivariate normal with own mean and prior sample size
+//        this.rootPriorMean = rootPriorMean;
+//        this.rootPriorSampleSize = rootPriorSampleSize;
+//
+//        priorInformationKnown = false;
+//    }
 
     public FullyConjugateMultivariateTraitLikelihood(String traitName,
                                                      MultivariateTraitTree treeModel,
@@ -91,32 +119,6 @@ public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultiva
                                                      boolean useTreeLength,
                                                      BranchRateModel rateModel,
                                                      List<BranchRateModel> driftModels,
-                                                     Model samplingDensity,
-                                                     boolean reportAsMultivariate,
-                                                     double[] rootPriorMean,
-                                                     double rootPriorSampleSize,
-                                                     boolean reciprocalRates) {
-
-        super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches, scaleByTime,
-                useTreeLength, rateModel, driftModels, samplingDensity, reportAsMultivariate, reciprocalRates);
-
-        // fully-conjugate multivariate normal with own mean and prior sample size
-        this.rootPriorMean = rootPriorMean;
-        this.rootPriorSampleSize = rootPriorSampleSize;
-
-        priorInformationKnown = false;
-    }
-
-    public FullyConjugateMultivariateTraitLikelihood(String traitName,
-                                                     MultivariateTraitTree treeModel,
-                                                     MultivariateDiffusionModel diffusionModel,
-                                                     CompoundParameter traitParameter,
-                                                     Parameter deltaParameter,
-                                                     List<Integer> missingIndices,
-                                                     boolean cacheBranches,
-                                                     boolean scaleByTime,
-                                                     boolean useTreeLength,
-                                                     BranchRateModel rateModel,
                                                      List<BranchRateModel> optimalValues,
                                                      BranchRateModel strengthOfSelection,
                                                      Model samplingDensity,
@@ -126,7 +128,7 @@ public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultiva
                                                      boolean reciprocalRates) {
 
         super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches, scaleByTime,
-                useTreeLength, rateModel, optimalValues, strengthOfSelection, samplingDensity, reportAsMultivariate, reciprocalRates);
+                useTreeLength, rateModel, driftModels, optimalValues, strengthOfSelection, samplingDensity, reportAsMultivariate, reciprocalRates);
 
         // fully-conjugate multivariate normal with own mean and prior sample size
         this.rootPriorMean = rootPriorMean;
@@ -579,6 +581,54 @@ public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultiva
     private double[] ascertainedData = null;
     private static final boolean DEBUG_ASCERTAINMENT = false;
 
+    private double vectorMin(double[] vec) {
+        double min = Double.MAX_VALUE;
+        for (int i = 0; i < vec.length; ++i) {
+            min = Math.min(min, vec[i]);
+        }
+        return min;
+    }
+
+    private double matrixMin(double[][] mat) {
+        double min = Double.MAX_VALUE;
+        for (int i = 0; i < mat.length; ++i) {
+            min = Math.min(min, vectorMin(mat[i]));
+        }
+        return min;
+    }
+
+    private double vectorMax(double[] vec) {
+        double max = - Double.MAX_VALUE;
+        for (int i = 0; i < vec.length; ++i) {
+            max = Math.max(max, vec[i]);
+        }
+        return max;
+    }
+
+    private double matrixMax(double[][] mat) {
+        double max = -Double.MAX_VALUE;
+        for (int i = 0; i < mat.length; ++i) {
+            max = Math.max(max, vectorMax(mat[i]));
+        }
+        return max;
+    }
+
+    private double vectorSum(double[] vec) {
+        double sum = 0.0;
+        for (int i = 0; i < vec.length; ++i) {
+            sum += vec[i];
+        }
+        return sum;
+    }
+
+    private double matrixSum(double[][] mat) {
+        double sum = 0.0;
+        for (int i = 0; i < mat.length; ++i) {
+            sum += vectorSum(mat[i]);
+        }
+        return sum;
+    }
+
     @Override
     public String getReport() {
         StringBuilder sb = new StringBuilder();
@@ -596,19 +646,22 @@ public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultiva
 
         sb.append("Tree variance:\n");
         sb.append(new Matrix(treeVariance));
+        sb.append(matrixMin(treeVariance)).append("\t").append(matrixMax(treeVariance)).append("\t").append(matrixSum(treeVariance));
         sb.append("\n\n");
         sb.append("Trait variance:\n");
         sb.append(traitVariance);
         sb.append("\n\n");
-        sb.append("Joint variance:\n");
-        sb.append(new Matrix(jointVariance));
-        sb.append("\n\n");
+//        sb.append("Joint variance:\n");
+//        sb.append(new Matrix(jointVariance));
+//        sb.append("\n\n");
 
         double[] data = new double[jointVariance.length];
         System.arraycopy(meanCache, 0, data, 0, jointVariance.length);
 
         sb.append("Data:\n");
-        sb.append(new Vector(data));
+        sb.append(new Vector(data)).append("\n");
+        sb.append(data.length).append("\t").append(vectorMin(data)).append("\t").append(vectorMax(data)).append("\t").append(vectorSum(data));
+        sb.append(treeModel.getNodeTaxon(treeModel.getExternalNode(0)).getId());
         sb.append("\n\n");
 
         MultivariateNormalDistribution mvn = new MultivariateNormalDistribution(new double[data.length], new Matrix(jointVariance).inverse().toComponents());
@@ -648,7 +701,102 @@ public class FullyConjugateMultivariateTraitLikelihood extends IntegratedMultiva
     }
 
 
-    private double[][] computeTreeVariance(boolean includeRoot) {
+    class NodeToRootDistance {
+        NodeRef node;
+        double distance;
+
+        NodeToRootDistance(NodeRef node, double distance) {
+            this.node = node;
+            this.distance = distance;
+        }
+    }
+
+    class NodeToRootDistanceList extends ArrayList<NodeToRootDistance> {
+
+        NodeToRootDistanceList(NodeToRootDistanceList parentList) {
+            super(parentList);
+        }
+
+        NodeToRootDistanceList() {
+            super();
+        }
+    }
+
+    private void addNodeToList(final NodeRef thisNode, NodeToRootDistanceList parentList, NodeToRootDistanceList[] tipLists) {
+
+        if (!treeModel.isRoot(thisNode)) {
+            double increment = getRescaledBranchLengthForPrecision(thisNode);
+            if (parentList.size() > 0) {
+                increment += parentList.get(parentList.size() - 1).distance;
+            }
+            parentList.add(new NodeToRootDistance(thisNode, increment));
+        }
+
+        if (treeModel.isExternal(thisNode)) {
+            tipLists[thisNode.getNumber()] =  parentList;
+        } else { // recurse
+            NodeToRootDistanceList shallowCopy = new NodeToRootDistanceList(parentList);
+            addNodeToList(treeModel.getChild(thisNode, 0), shallowCopy, tipLists);
+            addNodeToList(treeModel.getChild(thisNode, 1), parentList, tipLists);
+        }
+    }
+
+    private double getTimeBetweenNodeToRootLists(List<NodeToRootDistance> x, List<NodeToRootDistance> y) {
+        if (x.get(0) != y.get(0)) {
+            return 0.0;
+        }
+
+        int index = 1;
+        while (x.get(index) == y.get(index)) {
+            ++index;
+        }
+        return x.get(index - 1).distance;
+    }
+
+    public double[][] computeTreeVariance2(boolean includeRoot) {
+
+        final int tipCount = treeModel.getExternalNodeCount();
+        double[][] variance = new double[tipCount][tipCount];
+
+        NodeToRootDistanceList[] tipToRootDistances = new NodeToRootDistanceList[tipCount];
+
+        // Recurse down tree to generate lists
+        addNodeToList(treeModel.getRoot(), new NodeToRootDistanceList(), tipToRootDistances);
+
+        for (int i = 0; i < tipCount; ++i) {
+            // Fill in diagonal
+            List<NodeToRootDistance> iList = tipToRootDistances[i];
+            double marginalTime = iList.get(iList.size() - 1).distance;
+            variance[i][i] = marginalTime;
+
+            for (int j = i + 1; j < tipCount; ++j) {
+                List<NodeToRootDistance> jList = tipToRootDistances[j];
+
+                double time = getTimeBetweenNodeToRootLists(iList, jList);
+                variance[j][i] = variance[i][j] = time;
+            }
+        }
+
+        variance = removeMissingTipsInTreeVariance(variance); // Automatically prune missing tips
+
+        if (DEBUG) {
+            System.err.println("");
+            System.err.println("New tree (trimmed) conditional variance:\n" + new Matrix(variance));
+        }
+
+        if (includeRoot) {
+            for (int i = 0; i < variance.length; ++i) {
+                for (int j = 0; j < variance[i].length; ++j) {
+                    variance[i][j] += 1.0 / getPriorSampleSize();
+                }
+            }
+        }
+
+        return variance;
+
+    }
+
+    public double[][] computeTreeVariance(boolean includeRoot) {
         final int tipCount = treeModel.getExternalNodeCount();
         double[][] variance = new double[tipCount][tipCount];
 
diff --git a/src/dr/evomodel/continuous/GaussianProcessFromTree.java b/src/dr/evomodel/continuous/GaussianProcessFromTree.java
index 53a2357..ba6f1e3 100644
--- a/src/dr/evomodel/continuous/GaussianProcessFromTree.java
+++ b/src/dr/evomodel/continuous/GaussianProcessFromTree.java
@@ -28,10 +28,12 @@ package dr.evomodel.continuous;
 import dr.evolution.tree.NodeRef;
 import dr.inference.model.Likelihood;
 import dr.inference.model.Parameter;
+import dr.math.KroneckerOperation;
 import dr.math.distributions.GaussianProcessRandomGenerator;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.math.matrixAlgebra.CholeskyDecomposition;
 import dr.math.matrixAlgebra.IllegalDimension;
+import dr.math.matrixAlgebra.Matrix;
 import dr.math.matrixAlgebra.SymmetricMatrix;
 
 /**
@@ -46,10 +48,63 @@ public class GaussianProcessFromTree implements GaussianProcessRandomGenerator {
         this.traitModel = traitModel;
     }
 
+    @Override
     public Likelihood getLikelihood() {
         return traitModel;
     }
 
+    @Override
+    public int getDimension() {
+        return traitModel.getTreeModel().getExternalNodeCount() * traitModel.getDimTrait();
+    }
+
+    @Override
+    public double[][] getPrecisionMatrix() {
+        final boolean includeRoot = false; // TODO make an option
+
+        double[][] treeVariance;
+//        long startTime1 = System.nanoTime();
+        treeVariance = traitModel.computeTreeVariance2(includeRoot);
+//        long estimatedTime1 = System.nanoTime() - startTime1;
+
+//        long startTime2 = System.nanoTime();
+//        treeVariance = traitModel.computeTreeVariance(includeRoot);
+//        long estimatedTime2 = System.nanoTime() - startTime2;
+
+        double[][] traitPrecision = traitModel.getDiffusionModel().getPrecisionmatrix();
+
+
+//        for (int i = 0; i < treeVariance2.length; ++i) {
+//            for (int j = 0; j < treeVariance2[i].length; ++j) {
+//                if (treeVariance2[i][j] != treeVariance[i][j]) {
+//                    System.err.println(i + " " + j);
+//                    System.err.println(treeVariance2[i][j] + " " + treeVariance[i][j]);
+//                    System.exit(-1);
+//                }
+//            }
+//        }
+
+//        System.err.println("T1: " + estimatedTime1);
+//        System.err.println("T2: " + estimatedTime2);
+
+//        System.err.println("\t\tSTART prec");
+        Matrix treePrecision = new Matrix(treeVariance).inverse();
+
+//        System.err.println("\t\tSTART kron");
+
+        double[][] jointPrecision = KroneckerOperation.product(treePrecision.toComponents(), traitPrecision); // TODO Double-check order
+
+        return jointPrecision;
+    }
+
+    private static void scale(double[][] matrix, double scale) {
+        for (int i = 0; i < matrix.length; ++i) {
+            for (int j = 0; j < matrix[i].length; ++j) {
+                matrix[i][j] *= scale;
+            }
+        }
+    }
+
     public double getLogLikelihood() { return traitModel.getLogLikelihood(); }
 
     //    boolean firstTime=true;
diff --git a/src/dr/evomodel/continuous/IntegratedMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/IntegratedMultivariateTraitLikelihood.java
index 5059d69..25ac803 100644
--- a/src/dr/evomodel/continuous/IntegratedMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/IntegratedMultivariateTraitLikelihood.java
@@ -42,9 +42,9 @@ import dr.math.matrixAlgebra.SymmetricMatrix;
 import dr.math.matrixAlgebra.Vector;
 import dr.util.Author;
 import dr.util.Citation;
+import dr.util.CommonCitations;
 
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * A multivariate trait likelihood that analytically integrates out the unobserved trait values at all internal
@@ -56,33 +56,33 @@ public abstract class IntegratedMultivariateTraitLikelihood extends AbstractMult
 
     public static final double LOG_SQRT_2_PI = 0.5 * Math.log(2 * Math.PI);
 
-    public IntegratedMultivariateTraitLikelihood(String traitName,
-                                                 MultivariateTraitTree treeModel,
-                                                 MultivariateDiffusionModel diffusionModel,
-                                                 CompoundParameter traitParameter,
-                                                 List<Integer> missingIndices,
-                                                 boolean cacheBranches, boolean scaleByTime, boolean useTreeLength,
-                                                 BranchRateModel rateModel, Model samplingDensity,
-                                                 boolean reportAsMultivariate,
-                                                 boolean reciprocalRates) {
-
-        this(traitName, treeModel, diffusionModel, traitParameter, null, missingIndices, cacheBranches, scaleByTime,
-                useTreeLength, rateModel, samplingDensity, reportAsMultivariate, reciprocalRates);
-    }
-
-    public IntegratedMultivariateTraitLikelihood(String traitName,
-                                                 MultivariateTraitTree treeModel,
-                                                 MultivariateDiffusionModel diffusionModel,
-                                                 CompoundParameter traitParameter,
-                                                 Parameter deltaParameter,
-                                                 List<Integer> missingIndices,
-                                                 boolean cacheBranches, boolean scaleByTime, boolean useTreeLength,
-                                                 BranchRateModel rateModel, Model samplingDensity,
-                                                 boolean reportAsMultivariate,
-                                                 boolean reciprocalRates) {
-        this(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches,
-                scaleByTime, useTreeLength, rateModel, null, samplingDensity, reportAsMultivariate, reciprocalRates);
-    }
+//    public IntegratedMultivariateTraitLikelihood(String traitName,
+//                                                 MultivariateTraitTree treeModel,
+//                                                 MultivariateDiffusionModel diffusionModel,
+//                                                 CompoundParameter traitParameter,
+//                                                 List<Integer> missingIndices,
+//                                                 boolean cacheBranches, boolean scaleByTime, boolean useTreeLength,
+//                                                 BranchRateModel rateModel, Model samplingDensity,
+//                                                 boolean reportAsMultivariate,
+//                                                 boolean reciprocalRates) {
+//
+//        this(traitName, treeModel, diffusionModel, traitParameter, null, missingIndices, cacheBranches, scaleByTime,
+//                useTreeLength, rateModel, samplingDensity, reportAsMultivariate, reciprocalRates);
+//    }
+//
+//    public IntegratedMultivariateTraitLikelihood(String traitName,
+//                                                 MultivariateTraitTree treeModel,
+//                                                 MultivariateDiffusionModel diffusionModel,
+//                                                 CompoundParameter traitParameter,
+//                                                 Parameter deltaParameter,
+//                                                 List<Integer> missingIndices,
+//                                                 boolean cacheBranches, boolean scaleByTime, boolean useTreeLength,
+//                                                 BranchRateModel rateModel, Model samplingDensity,
+//                                                 boolean reportAsMultivariate,
+//                                                 boolean reciprocalRates) {
+//        this(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches,
+//                scaleByTime, useTreeLength, rateModel, null, samplingDensity, reportAsMultivariate, reciprocalRates);
+//    }
 
 
     protected final CacheHelper cacheHelper;
@@ -96,73 +96,28 @@ public abstract class IntegratedMultivariateTraitLikelihood extends AbstractMult
                                                  boolean cacheBranches, boolean scaleByTime, boolean useTreeLength,
                                                  BranchRateModel rateModel,
                                                  List<BranchRateModel> driftModels,
+                                                 List<BranchRateModel> optimalValues,
+                                                 BranchRateModel strengthOfSelection,
                                                  Model samplingDensity,
                                                  boolean reportAsMultivariate,
                                                  boolean reciprocalRates) {
 
         super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches, scaleByTime,
-                useTreeLength, rateModel, driftModels, samplingDensity, reportAsMultivariate, reciprocalRates);
+                useTreeLength, rateModel, driftModels,
+                optimalValues, strengthOfSelection,
+                samplingDensity, reportAsMultivariate, reciprocalRates);
 
 
         // Delegate caches to helper
         meanCache = new double[dim * treeModel.getNodeCount()];
         if (driftModels != null) {
             cacheHelper = new DriftCacheHelper(dim * treeModel.getNodeCount(), cacheBranches); // new DriftCacheHelper ....
-        } else {
-            cacheHelper = new CacheHelper(dim * treeModel.getNodeCount(), cacheBranches);
-        }
-
-        drawnStates = new double[dim * treeModel.getNodeCount()];
-        upperPrecisionCache = new double[treeModel.getNodeCount()];
-        lowerPrecisionCache = new double[treeModel.getNodeCount()];
-        logRemainderDensityCache = new double[treeModel.getNodeCount()];
-
-        if (cacheBranches) {
-            storedMeanCache = new double[dim * treeModel.getNodeCount()];
-            storedUpperPrecisionCache = new double[treeModel.getNodeCount()];
-            storedLowerPrecisionCache = new double[treeModel.getNodeCount()];
-            storedLogRemainderDensityCache = new double[treeModel.getNodeCount()];
-        }
-
-        // Set up reusable temporary storage
-        Ay = new double[dimTrait];
-        tmpM = new double[dimTrait][dimTrait];
-        tmp2 = new double[dimTrait];
-
-        zeroDimVector = new double[dim];
-
-        missingTraits = new MissingTraits.CompletelyMissing(treeModel, missingIndices, dim);
-        setTipDataValuesForAllNodes();
-
-    }
-
-    public IntegratedMultivariateTraitLikelihood(String traitName,
-                                                 MultivariateTraitTree treeModel,
-                                                 MultivariateDiffusionModel diffusionModel,
-                                                 CompoundParameter traitParameter,
-                                                 Parameter deltaParameter,
-                                                 List<Integer> missingIndices,
-                                                 boolean cacheBranches, boolean scaleByTime, boolean useTreeLength,
-                                                 BranchRateModel rateModel,
-                                                 List<BranchRateModel> optimalValues,
-                                                 BranchRateModel strengthOfSelection,
-                                                 Model samplingDensity,
-                                                 boolean reportAsMultivariate,
-                                                 boolean reciprocalRates) {
-
-        super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches, scaleByTime,
-                useTreeLength, rateModel, optimalValues, strengthOfSelection, samplingDensity, reportAsMultivariate, reciprocalRates);
-
-        // Delegate caches to helper
-        meanCache = new double[dim * treeModel.getNodeCount()];
-
-        if (optimalValues != null) {
+        } else if (optimalValues != null) {
             cacheHelper = new OUCacheHelper(dim * treeModel.getNodeCount(), cacheBranches);
         } else {
             cacheHelper = new CacheHelper(dim * treeModel.getNodeCount(), cacheBranches);
         }
 
-
         drawnStates = new double[dim * treeModel.getNodeCount()];
         upperPrecisionCache = new double[treeModel.getNodeCount()];
         lowerPrecisionCache = new double[treeModel.getNodeCount()];
@@ -187,6 +142,57 @@ public abstract class IntegratedMultivariateTraitLikelihood extends AbstractMult
 
     }
 
+//    public IntegratedMultivariateTraitLikelihood(String traitName,
+//                                                 MultivariateTraitTree treeModel,
+//                                                 MultivariateDiffusionModel diffusionModel,
+//                                                 CompoundParameter traitParameter,
+//                                                 Parameter deltaParameter,
+//                                                 List<Integer> missingIndices,
+//                                                 boolean cacheBranches, boolean scaleByTime, boolean useTreeLength,
+//                                                 BranchRateModel rateModel,
+//                                                 List<BranchRateModel> optimalValues,
+//                                                 BranchRateModel strengthOfSelection,
+//                                                 Model samplingDensity,
+//                                                 boolean reportAsMultivariate,
+//                                                 boolean reciprocalRates) {
+//
+//        super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches, scaleByTime,
+//                useTreeLength, rateModel, optimalValues, strengthOfSelection, samplingDensity, reportAsMultivariate, reciprocalRates);
+//
+//        // Delegate caches to helper
+//        meanCache = new double[dim * treeModel.getNodeCount()];
+//
+//        if (optimalValues != null) {
+//            cacheHelper = new OUCacheHelper(dim * treeModel.getNodeCount(), cacheBranches);
+//        } else {
+//            cacheHelper = new CacheHelper(dim * treeModel.getNodeCount(), cacheBranches);
+//        }
+//
+//
+//        drawnStates = new double[dim * treeModel.getNodeCount()];
+//        upperPrecisionCache = new double[treeModel.getNodeCount()];
+//        lowerPrecisionCache = new double[treeModel.getNodeCount()];
+//        logRemainderDensityCache = new double[treeModel.getNodeCount()];
+//
+//        if (cacheBranches) {
+//            storedMeanCache = new double[dim * treeModel.getNodeCount()];
+//            storedUpperPrecisionCache = new double[treeModel.getNodeCount()];
+//            storedLowerPrecisionCache = new double[treeModel.getNodeCount()];
+//            storedLogRemainderDensityCache = new double[treeModel.getNodeCount()];
+//        }
+//
+//        // Set up reusable temporary storage
+//        Ay = new double[dimTrait];
+//        tmpM = new double[dimTrait][dimTrait];
+//        tmp2 = new double[dimTrait];
+//
+//        zeroDimVector = new double[dim];
+//
+//        missingTraits = new MissingTraits.CompletelyMissing(treeModel, missingIndices, dim);
+//        setTipDataValuesForAllNodes();
+//
+//    }
+
 
     private void setTipDataValuesForAllNodes() {
         for (int i = 0; i < treeModel.getExternalNodeCount(); i++) {
@@ -235,33 +241,40 @@ public abstract class IntegratedMultivariateTraitLikelihood extends AbstractMult
         return "\tSample internal node traits: false\n";
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return super.getDescription() + " (first citation) with efficiently integrated internal traits (second citation)";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = super.getCitations();
-        citations.add(
-                new Citation(
-                        new Author[]{
-                                new Author("OG", "Pybus"),
-                                new Author("MA", "Suchard"),
-                                new Author("P", "Lemey"),
-                                new Author("F", "Bernadin"),
-                                new Author("A", "Rambaut"),
-                                new Author("FW", "Crawford"),
-                                new Author("RR", "Gray"),
-                                new Author("N", "Arinaminpathy"),
-                                new Author("S", "Stramer"),
-                                new Author("MP", "Busch"),
-                                new Author("E", "Delwart")
-
-                        },
-                        "Unifying the spatial epidemiology and evolution of emerging epidemics",
-                        2012,
-                        "Proceedings of the National Academy of Sciences",
-                        109,
-                        15066, 15071,
-                        Citation.Status.PUBLISHED
-                )
-        );
-        return citations;
+        List<Citation> rtn = super.getCitations();
+        rtn.add(new Citation(
+                new Author[] {
+                        new Author("OG", "Pybus"),
+                        new Author("MA", "Suchard"),
+                        new Author("P", "Lemey"),
+                        new Author("F", "Bernadin"),
+                        new Author("A", "Rambaut"),
+                        new Author("FW", "Crawford"),
+                        new Author("RR", "Gray"),
+                        new Author("N", "Arinaminpathy"),
+                        new Author("S", "Stramer"),
+                        new Author("MP", "Busch"),
+                        new Author("E", "Delwart")
+                },
+                "Unifying the spatial epidemiology and evolution of emerging epidemics",
+                2012,
+                "Proceedings of the National Academy of Sciences",
+                109,
+                15066, 15071,
+                Citation.Status.PUBLISHED
+        ));
+        return rtn;
     }
 
     public double getLogDataLikelihood() {
@@ -609,8 +622,8 @@ public abstract class IntegratedMultivariateTraitLikelihood extends AbstractMult
 
                 for (int i = 0; i < dimTrait; ++i) {
                     System.err.println("\t"
-                                    + cacheHelper.getCorrectedMeanCache()[childOffset0 + 0 * dimTrait + i] + " "
-                                    + cacheHelper.getCorrectedMeanCache()[childOffset1 + 0 * dimTrait + i]
+                            + cacheHelper.getCorrectedMeanCache()[childOffset0 + 0 * dimTrait + i] + " "
+                            + cacheHelper.getCorrectedMeanCache()[childOffset1 + 0 * dimTrait + i]
                     );
                 }
                 System.exit(-1);
diff --git a/src/dr/evomodel/continuous/IntervalLatentLiabilityLikelihood.java b/src/dr/evomodel/continuous/IntervalLatentLiabilityLikelihood.java
index 2f693a1..4626190 100644
--- a/src/dr/evomodel/continuous/IntervalLatentLiabilityLikelihood.java
+++ b/src/dr/evomodel/continuous/IntervalLatentLiabilityLikelihood.java
@@ -35,7 +35,9 @@ import dr.util.CommonCitations;
 import dr.xml.*;
 
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.logging.Logger;
 
 
@@ -282,11 +284,20 @@ public class IntervalLatentLiabilityLikelihood extends AbstractModelLikelihood i
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Intervaled latent liability model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
         List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.SUCHARD_2012_LATENT
-        );
+        citations.add(CommonCitations.CYBIS_2015_ASSESSING);
         return citations;
     }
 
diff --git a/src/dr/evomodel/continuous/MultinomialLatentLiabilityLikelihood.java b/src/dr/evomodel/continuous/MultinomialLatentLiabilityLikelihood.java
index 4ae0e0b..8b4e5fa 100644
--- a/src/dr/evomodel/continuous/MultinomialLatentLiabilityLikelihood.java
+++ b/src/dr/evomodel/continuous/MultinomialLatentLiabilityLikelihood.java
@@ -36,7 +36,9 @@ import dr.util.CommonCitations;
 import dr.xml.*;
 
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.logging.Logger;
 
 public class MultinomialLatentLiabilityLikelihood extends AbstractModelLikelihood implements LatentTruncation, Citable, SoftThresholdLikelihood {
@@ -308,11 +310,20 @@ public class MultinomialLatentLiabilityLikelihood extends AbstractModelLikelihoo
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Latent Liability model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
         List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.SUCHARD_2012_LATENT
-        );
+        citations.add(CommonCitations.CYBIS_2015_ASSESSING);
         return citations;
     }
 
diff --git a/src/dr/evomodel/continuous/NonPhylogeneticMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/NonPhylogeneticMultivariateTraitLikelihood.java
index 8761bb7..fe2dbe5 100644
--- a/src/dr/evomodel/continuous/NonPhylogeneticMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/NonPhylogeneticMultivariateTraitLikelihood.java
@@ -70,7 +70,7 @@ public class NonPhylogeneticMultivariateTraitLikelihood extends FullyConjugateMu
                                                      boolean reciprocalRates,
                                                      boolean exchangeableTips) {
         super(traitName, treeModel, diffusionModel, traitParameter, deltaParameter, missingIndices, cacheBranches,
-                scaleByTime, useTreeLength, rateModel, samplingDensity, reportAsMultivariate, rootPriorMean,
+                scaleByTime, useTreeLength, rateModel, null, null, null, samplingDensity, reportAsMultivariate, rootPriorMean,
                 rootPriorSampleSize, reciprocalRates);
         this.exchangeableTips = exchangeableTips;
         this.zeroHeightTip = findZeroHeightTip(treeModel);
@@ -122,21 +122,6 @@ public class NonPhylogeneticMultivariateTraitLikelihood extends FullyConjugateMu
         return treeLength;
     }
         
-    public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                new Citation(
-                        new Author[]{                                                           
-                                new Author("MA", "Suchard"),
-                                new Author("P", "Lemey"),
-                                new Author("A", "Rambaut"),
-                        },
-                        Citation.Status.IN_PREPARATION
-                )
-        );
-        return citations;
-    }
-
     private class SufficientStatistics {
         double sumWeight;
         double productWeight;
diff --git a/src/dr/evomodel/continuous/OrderedLatentLiabilityLikelihood.java b/src/dr/evomodel/continuous/OrderedLatentLiabilityLikelihood.java
index f8978b3..0ee56d6 100644
--- a/src/dr/evomodel/continuous/OrderedLatentLiabilityLikelihood.java
+++ b/src/dr/evomodel/continuous/OrderedLatentLiabilityLikelihood.java
@@ -36,7 +36,9 @@ import dr.util.CommonCitations;
 import dr.xml.*;
 
 import java.util.ArrayList;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.logging.Logger;
 
 
@@ -488,11 +490,20 @@ public class OrderedLatentLiabilityLikelihood extends AbstractModelLikelihood im
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Latent Liability model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
         List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.SUCHARD_2012_LATENT
-        );
+        citations.add(CommonCitations.CYBIS_2015_ASSESSING);
         return citations;
     }
 
diff --git a/src/dr/evomodel/continuous/SampledMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/SampledMultivariateTraitLikelihood.java
index 18bb86c..498097b 100644
--- a/src/dr/evomodel/continuous/SampledMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/SampledMultivariateTraitLikelihood.java
@@ -52,8 +52,9 @@ public class SampledMultivariateTraitLikelihood extends AbstractMultivariateTrai
                                               BranchRateModel rateModel, Model samplingDensity,
                                               boolean reportAsMultivariate,
                                               boolean reciprocalRates) {
-        super(traitName, treeModel, diffusionModel, traitParameter, missingIndices, cacheBranches, scaleByTime,
-                useTreeLength, rateModel, samplingDensity, reportAsMultivariate, reciprocalRates);
+        super(traitName, treeModel, diffusionModel, traitParameter, null, missingIndices, cacheBranches, scaleByTime,
+                useTreeLength, rateModel,
+                null, null, null, samplingDensity, reportAsMultivariate, reciprocalRates);
     }
 
     protected String extraInfo() {
diff --git a/src/dr/evomodel/continuous/SemiConjugateMultivariateTraitLikelihood.java b/src/dr/evomodel/continuous/SemiConjugateMultivariateTraitLikelihood.java
index 438b333..2d8b066 100644
--- a/src/dr/evomodel/continuous/SemiConjugateMultivariateTraitLikelihood.java
+++ b/src/dr/evomodel/continuous/SemiConjugateMultivariateTraitLikelihood.java
@@ -58,8 +58,8 @@ public class SemiConjugateMultivariateTraitLikelihood extends IntegratedMultivar
                                                     MultivariateNormalDistribution rootPrior,
                                                     boolean reciprocalRates) {
 
-        super(traitName, treeModel, diffusionModel, traitParameter, missingIndices, cacheBranches, scaleByTime,
-                useTreeLength, rateModel, samplingDensity, reportAsMultivariate, reciprocalRates);
+        super(traitName, treeModel, diffusionModel, traitParameter, null, missingIndices, cacheBranches, scaleByTime,
+                useTreeLength, rateModel, null, null, null, samplingDensity, reportAsMultivariate, reciprocalRates);
 
         setRootPrior(rootPrior); // Semi-conjugate multivariate normal with own mean and precision
     }
diff --git a/src/dr/evomodel/continuous/plink/PlinkImporter.java b/src/dr/evomodel/continuous/plink/PlinkImporter.java
index 52fc158..dd2d8a9 100644
--- a/src/dr/evomodel/continuous/plink/PlinkImporter.java
+++ b/src/dr/evomodel/continuous/plink/PlinkImporter.java
@@ -55,17 +55,24 @@ public class PlinkImporter implements Citable {
         parse(reader);
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.DATA_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "PLink";
+    }
+
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                new Citation(
+        return Arrays.asList(new Citation(
                         new Author[]{
                                 new Author("MA", "Suchard"),
                                 new Author("A", "Boyko"),
                         },
                         Citation.Status.IN_PREPARATION)
         );
-        return citations;
     }
 
     private String formatTransformedValue(double value) {
diff --git a/src/dr/evomodel/epidemiology/casetocase/AbstractCase.java b/src/dr/evomodel/epidemiology/casetocase/AbstractCase.java
index 4cbd3e4..ad94bf9 100644
--- a/src/dr/evomodel/epidemiology/casetocase/AbstractCase.java
+++ b/src/dr/evomodel/epidemiology/casetocase/AbstractCase.java
@@ -25,12 +25,13 @@
 
 package dr.evomodel.epidemiology.casetocase;
 
-import dr.app.beagle.tools.Partition;
 import dr.evolution.util.Date;
 import dr.evolution.util.Taxa;
 import dr.inference.model.AbstractModel;
 import dr.inference.model.Parameter;
 
+import java.util.ArrayList;
+
 /**
  * Abstract class for outbreak; best implemented as an inner class in implementations of AbstractOutbreak
  *
@@ -47,7 +48,6 @@ public abstract class AbstractCase extends AbstractModel {
 
     protected String caseID;
     protected Taxa associatedTaxa;
-    protected double examTime;
     protected double endOfInfectiousTime;
     protected boolean wasEverInfected;
     protected Parameter infectionBranchPosition;
@@ -60,15 +60,11 @@ public abstract class AbstractCase extends AbstractModel {
         return associatedTaxa;
     }
 
-    public double getExamTime() {
-        return examTime;
-    }
-
-    public double getCullTime(){
+    public double getEndTime(){
         return endOfInfectiousTime;
     }
 
-    public abstract boolean culledYet(double time);
+    public abstract boolean noninfectiousYet(double time);
 
     public String toString(){
         return caseID;
@@ -84,6 +80,7 @@ public abstract class AbstractCase extends AbstractModel {
         return infectionBranchPosition;
     }
 
+    public abstract ArrayList<Date> getExaminationTimes();
 
     public void setInfectionBranchPosition(double value){
         infectionBranchPosition.setParameterValue(0, value);
diff --git a/src/dr/evomodel/epidemiology/casetocase/AbstractOutbreak.java b/src/dr/evomodel/epidemiology/casetocase/AbstractOutbreak.java
index de4cac2..14ef48b 100644
--- a/src/dr/evomodel/epidemiology/casetocase/AbstractOutbreak.java
+++ b/src/dr/evomodel/epidemiology/casetocase/AbstractOutbreak.java
@@ -37,8 +37,8 @@ import dr.inference.model.Parameter;
 import java.util.*;
 
 /**
- * Abstract class for outbreaks. Implements PatternList for ease of compatibility with AbstractTreeLikelihood, but there
- * is one and only one pattern.
+ * Abstract class for outbreaks. Implements PatternList for ease of compatibility with AbstractTreeLikelihood,
+ * but there is one and only one pattern.
  *
  * User: Matthew Hall
  * Date: 14/04/13
@@ -50,7 +50,7 @@ public abstract class AbstractOutbreak extends AbstractModel implements PatternL
     protected TaxonList taxa;
     private boolean hasLatentPeriods;
     protected final boolean hasGeography;
-    private final String CASE_NAME = "caseID";
+    private final String CASE_NAME = "hostID";
     protected ArrayList<AbstractCase> cases;
     protected int infectedSize = 0;
 
@@ -86,7 +86,6 @@ public abstract class AbstractOutbreak extends AbstractModel implements PatternL
 
     public abstract double getLatentPeriod(AbstractCase aCase);
 
-
     public double getKernelValue(AbstractCase a, AbstractCase b, SpatialKernel kernel){
         if(!hasGeography){
             return 1;
@@ -95,7 +94,6 @@ public abstract class AbstractOutbreak extends AbstractModel implements PatternL
         }
     }
 
-
     // all the kernel values going TO case a (this is symmetric, usually, but potentially might not be)
 
     public double[] getKernelValues(AbstractCase aCase, SpatialKernel kernel){
@@ -156,8 +154,6 @@ public abstract class AbstractOutbreak extends AbstractModel implements PatternL
         return taxa.getTaxonCount();
     }
 
-    // with an exact correspondence between taxa and states, the following five methods are ill-fitting, but here if
-    // needed.
     // @todo if these are never going to be used, get them to throw exceptions
 
     public int[] getPattern(int patternIndex){
diff --git a/src/dr/evomodel/epidemiology/casetocase/BranchMapModel.java b/src/dr/evomodel/epidemiology/casetocase/BranchMapModel.java
index 4673595..e14ec5d 100644
--- a/src/dr/evomodel/epidemiology/casetocase/BranchMapModel.java
+++ b/src/dr/evomodel/epidemiology/casetocase/BranchMapModel.java
@@ -106,7 +106,7 @@ public class BranchMapModel extends AbstractModel {
         // nothing to do
     }
 
-    public int size(){
+    public int size() {
         return map.length;
     }
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTransmissionLikelihood.java b/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTransmissionLikelihood.java
index b5aca13..a4a68d2 100644
--- a/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTransmissionLikelihood.java
+++ b/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTransmissionLikelihood.java
@@ -25,6 +25,7 @@
 
 package dr.evomodel.epidemiology.casetocase;
 
+import dr.app.tools.NexusExporter;
 import dr.evomodel.coalescent.DemographicModel;
 import dr.evomodel.epidemiology.casetocase.periodpriors.AbstractPeriodPriorDistribution;
 import dr.inference.distribution.ParametricDistributionModel;
@@ -33,6 +34,7 @@ import dr.inference.loggers.Loggable;
 import dr.inference.model.*;
 import dr.xml.*;
 
+import java.io.PrintStream;
 import java.util.*;
 
 /**
@@ -74,7 +76,7 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
     private double treeLogProb;
     private double storedTreeLogProb;
 
-    private ParametricDistributionModel intialInfectionTimePrior;
+    private ParametricDistributionModel initialInfectionTimePrior;
     private HashMap<AbstractCase, Double> indexCasePrior;
 
     private final boolean hasGeography;
@@ -107,7 +109,7 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
         hasGeography = spatialKernal!=null;
         this.hasLatentPeriods = treeLikelihood.hasLatentPeriods();
 
-        this.intialInfectionTimePrior = intialInfectionTimePrior;
+        this.initialInfectionTimePrior = intialInfectionTimePrior;
 
 
         HashMap<AbstractCase, Double> weightMap = outbreak.getWeightMap();
@@ -115,13 +117,17 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
         double totalWeights = 0;
 
         for(AbstractCase aCase : weightMap.keySet()){
-            totalWeights += weightMap.get(aCase);
+            if(aCase.wasEverInfected) {
+                totalWeights += weightMap.get(aCase);
+            }
         }
 
         indexCasePrior = new HashMap<AbstractCase, Double>();
 
         for(AbstractCase aCase : outbreak.getCases()){
-            indexCasePrior.put(aCase, weightMap.get(aCase)/totalWeights );
+            if(aCase.wasEverInfected) {
+                indexCasePrior.put(aCase, weightMap.get(aCase) / totalWeights);
+            }
         }
 
         sortEvents();
@@ -208,10 +214,6 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
 
     public double getLogLikelihood() {
 
-        if(DEBUG){
-            treeLikelihood.debugOutputTree("blah.nex", true);
-        }
-
         if(!likelihoodKnown) {
             if (!treeProbKnown) {
                 treeLikelihood.prepareTimings();
@@ -245,8 +247,8 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
                                 if (indexCasePrior != null) {
                                     transLogProb += Math.log(indexCasePrior.get(thisCase));
                                 }
-                                if (intialInfectionTimePrior != null) {
-                                    transLogProb += intialInfectionTimePrior.logPdf(currentEventTime);
+                                if (initialInfectionTimePrior != null) {
+                                    transLogProb += initialInfectionTimePrior.logPdf(currentEventTime);
                                 }
 
                                 if (!hasLatentPeriods) {
@@ -355,6 +357,7 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
 
                     transProbKnown = true;
                 } catch (BadPartitionException e) {
+
                     transLogProb = Double.NEGATIVE_INFINITY;
                     transProbKnown = true;
                     logLikelihood = Double.NEGATIVE_INFINITY;
@@ -394,7 +397,7 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
                 for (String category : outbreak.getInfectiousCategories()) {
 
                     Double[] infPeriodsInThisCategory = infectiousPeriodsByCategory.get(category)
-                            .toArray(new Double[infectiousPeriodsByCategory.size()]);
+                            .toArray(new Double[infectiousPeriodsByCategory.get(category).size()]);
 
                     AbstractPeriodPriorDistribution hyperprior = outbreak.getInfectiousCategoryPrior(category);
 
@@ -441,23 +444,6 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
     }
 
 
-    // Gibbs operator needs this
-
-    public double calculateTempLogLikelihood(AbstractCase[] map){
-
-        // todo probably this should tell PartitionedTreeModel what needs recalculating
-
-        BranchMapModel branchMap = treeLikelihood.getBranchMap();
-
-        AbstractCase[] trueMap = branchMap.getArrayCopy();
-        branchMap.setAll(map, false);
-        double out = getLogLikelihood();
-        branchMap.setAll(trueMap, false);
-
-        return out;
-    }
-
-
     public void makeDirty() {
         likelihoodKnown = false;
         transProbKnown = false;
@@ -510,10 +496,6 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
 
         indexCase = out.get(0).getCase();
 
-        if(indexCase == null){
-            System.out.println();
-        }
-
         sortedTreeEvents = out;
 
     }
@@ -600,7 +582,8 @@ public class CaseToCaseTransmissionLikelihood extends AbstractModelLikelihood im
                 new ElementRule(CaseToCaseTreeLikelihood.class, "The tree likelihood"),
                 new ElementRule(SpatialKernel.class, "The spatial kernel", 0, 1),
                 new ElementRule(TRANSMISSION_RATE, Parameter.class, "The transmission rate"),
-                new ElementRule(INITIAL_INFECTION_TIME_PRIOR, ParametricDistributionModel.class, "The prior probability distibution of the first infection", true)
+                new ElementRule(INITIAL_INFECTION_TIME_PRIOR, ParametricDistributionModel.class, "The prior " +
+                        "probability distibution of the first infection", true)
         };
 
     };
diff --git a/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTreeLikelihood.java b/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTreeLikelihood.java
index 0a0eeb5..c551829 100644
--- a/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTreeLikelihood.java
+++ b/src/dr/evomodel/epidemiology/casetocase/CaseToCaseTreeLikelihood.java
@@ -24,17 +24,9 @@
  */
 
 package dr.evomodel.epidemiology.casetocase;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.FileReader;
-import java.io.FileWriter;
 import java.io.IOException;
 import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
+import java.util.*;
 
 import dr.app.tools.NexusExporter;
 import dr.evolution.tree.FlexibleNode;
@@ -53,7 +45,6 @@ import dr.inference.loggers.Loggable;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
-import dr.math.MathUtils;
 import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
@@ -74,6 +65,8 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     protected static final boolean DEBUG = false;
 
+    protected static double tolerance = 1E-10;
+
     /* The phylogenetic tree. */
 
     protected int noTips;
@@ -81,11 +74,9 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     /* Mapping of outbreak to branches on the tree; old version is stored before operators are applied */
 
-    protected BranchMapModel branchMap;
 
     /* Matches outbreak to external nodes */
 
-    protected HashMap<AbstractCase, Integer> tipMap;
     private double estimatedLastSampleTime;
     protected TreeTraitProvider.Helper treeTraits = new Helper();
 
@@ -104,6 +95,9 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
     private double[] storedLatentPeriods;
     protected boolean[] recalculateCaseFlags;
 
+    protected HashMap<AbstractCase,Treelet> elementsAsTrees;
+    protected HashMap<AbstractCase,Treelet> storedElementsAsTrees;
+
     //because of the way the former works, we need a maximum value of the time from first infection to root node.
 
     protected Parameter maxFirstInfToRoot;
@@ -123,24 +117,26 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     // Basic constructor.
 
-    public CaseToCaseTreeLikelihood(PartitionedTreeModel virusTree, AbstractOutbreak caseData,
+    public CaseToCaseTreeLikelihood(PartitionedTreeModel tree, AbstractOutbreak caseData,
                                     Parameter maxFirstInfToRoot)
             throws TaxonList.MissingTaxonException {
-        this(CASE_TO_CASE_TREE_LIKELIHOOD, virusTree, caseData, maxFirstInfToRoot);
+        this(CASE_TO_CASE_TREE_LIKELIHOOD, tree, caseData, maxFirstInfToRoot);
     }
 
     // Constructor for an instance with a non-default name
 
-    public CaseToCaseTreeLikelihood(String name, PartitionedTreeModel virusTree, AbstractOutbreak caseData,
+    public CaseToCaseTreeLikelihood(String name, PartitionedTreeModel tree, AbstractOutbreak caseData,
                                     Parameter maxFirstInfToRoot) {
-        super(name, caseData, virusTree);
+
+
+        super(name, caseData, tree);
 
 
         if(stateCount!=treeModel.getExternalNodeCount()){
             throw new RuntimeException("There are duplicate tip outbreak.");
         }
 
-        noTips = virusTree.getExternalNodeCount();
+        noTips = tree.getExternalNodeCount();
 
 
         //subclasses should add outbreak as a model if it contains any information that ever changes
@@ -155,26 +151,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
         //map outbreak to tips
 
-        branchMap = virusTree.getBranchMap();
-
-        addModel(branchMap);
-
-        tipMap = new HashMap<AbstractCase, Integer>();
-
-        //map the outbreak to the external nodes
-        for(int i=0; i<virusTree.getExternalNodeCount(); i++){
-            TreeModel.Node currentExternalNode = (TreeModel.Node)virusTree.getExternalNode(i);
-            Taxon currentTaxon = currentExternalNode.taxon;
-            for(AbstractCase thisCase : outbreak.getCases()){
-                if(thisCase.wasEverInfected()) {
-                    for (Taxon caseTaxon : thisCase.getAssociatedTaxa()) {
-                        if (caseTaxon.equals(currentTaxon)) {
-                            tipMap.put(thisCase, currentExternalNode.getNumber());
-                        }
-                    }
-                }
-            }
-        }
+        addModel(tree.getBranchMap());
 
         hasLatentPeriods = outbreak.hasLatentPeriods();
 
@@ -228,16 +205,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
         likelihoodKnown = false;
     }
 
-    protected void prepareTree(String startingNetworkFileName){
-        if(startingNetworkFileName==null){
-            partitionAccordingToRandomTT(true);
-        } else {
-            partitionAccordingToSpecificTT(startingNetworkFileName);
-        }
 
-        prepareTimings();
-        likelihoodKnown = false;
-    }
 
     public AbstractOutbreak getOutbreak(){
         return outbreak;
@@ -251,10 +219,12 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     private double getLatestTaxonTime(){
         double latestTime = Double.NEGATIVE_INFINITY;
-        for(AbstractCase thisCase : outbreak.getCases()){
-            if (thisCase.wasEverInfected() && thisCase.getExamTime() > latestTime) {
-                latestTime = thisCase.getExamTime();
+        for(int i=0; i<treeModel.getExternalNodeCount(); i++){
+            Taxon taxon = treeModel.getNodeTaxon(treeModel.getExternalNode(i));
+            if(taxon.getDate().getTimeValue() > latestTime){
+                latestTime = taxon.getDate().getTimeValue();
             }
+
         }
         return latestTime;
     }
@@ -269,35 +239,93 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
 
 
+    protected void explodeTree(){
 
+        for(int i=0; i<outbreak.size(); i++){
+            AbstractCase aCase = outbreak.getCase(i);
+            if(aCase.wasEverInfected() && elementsAsTrees.get(aCase)==null){
 
-    // ************************************************************************************
-    // EXTENDED VERSION METHODS
-    // ************************************************************************************
+                NodeRef partitionRoot = ((PartitionedTreeModel)treeModel).getEarliestNodeInElement(aCase);
 
-    /* check if the given node is tip-linked under the current painting (the tip corresponding to its painting is
-    a descendant of it
-     */
+                double extraHeight;
 
-    public boolean isAncestral(NodeRef node){
-        return isAncestral(node, branchMap);
-    }
+                if(treeModel.isRoot(partitionRoot)){
+                    extraHeight = maxFirstInfToRoot.getParameterValue(0)
+                            * aCase.getInfectionBranchPosition().getParameterValue(0);
+                } else {
+                    extraHeight = treeModel.getBranchLength(partitionRoot)
+                            * aCase.getInfectionBranchPosition().getParameterValue(0);
+                }
+
+                FlexibleNode newRoot = new FlexibleNode();
+
+                FlexibleTree littleTree = new FlexibleTree(newRoot);
+                littleTree.beginTreeEdit();
+
+                if (!treeModel.isExternal(partitionRoot)) {
+                    for (int j = 0; j < treeModel.getChildCount(partitionRoot); j++) {
+                        copyElementToTreelet(littleTree, treeModel.getChild(partitionRoot, j), newRoot, aCase);
+                    }
+                }
 
-    private boolean isAncestral(NodeRef node, BranchMapModel map){
-        NodeRef tip = treeModel.getNode(tipMap.get(map.get(node.getNumber())));
-        if(tip==node){
-            return true;
+                littleTree.endTreeEdit();
+
+                littleTree.resolveTree();
+
+                Treelet treelet = new Treelet(littleTree,
+                        littleTree.getRootHeight() + extraHeight);
+
+                elementsAsTrees.put(aCase, treelet);
+            }
         }
-        NodeRef parent = tip;
-        while(parent!= treeModel.getRoot()){
-            parent = treeModel.getParent(parent);
-            if(parent==node){
-                return true;
+    }
+
+    private void copyElementToTreelet(FlexibleTree littleTree, NodeRef oldNode, NodeRef newParent,
+                                      AbstractCase element){
+        if(element.wasEverInfected()) {
+            if (getBranchMap().get(oldNode.getNumber()) == element) {
+                if (treeModel.isExternal(oldNode)) {
+                    NodeRef newTip = new FlexibleNode(new Taxon(treeModel.getNodeTaxon(oldNode).getId()));
+                    littleTree.addChild(newParent, newTip);
+                    littleTree.setBranchLength(newTip, treeModel.getBranchLength(oldNode));
+                } else {
+                    NodeRef newChild = new FlexibleNode();
+                    littleTree.addChild(newParent, newChild);
+                    littleTree.setBranchLength(newChild, treeModel.getBranchLength(oldNode));
+                    for (int i = 0; i < treeModel.getChildCount(oldNode); i++) {
+                        copyElementToTreelet(littleTree, treeModel.getChild(oldNode, i), newChild, element);
+                    }
+                }
+            } else {
+                // we need a new tip
+                NodeRef transmissionTip = new FlexibleNode(
+                        new Taxon("Transmission_" + getBranchMap().get(oldNode.getNumber()).getName()));
+                double parentTime = getNodeTime(treeModel.getParent(oldNode));
+                double childTime = getInfectionTime(getBranchMap().get(oldNode.getNumber()));
+                littleTree.addChild(newParent, transmissionTip);
+                littleTree.setBranchLength(transmissionTip, childTime - parentTime);
             }
         }
-        return false;
     }
 
+    protected class Treelet extends FlexibleTree {
+
+        private double zeroHeight;
+
+        protected Treelet(FlexibleTree tree, double zeroHeight){
+            super(tree);
+            this.zeroHeight = zeroHeight;
+
+        }
+
+        protected double getZeroHeight(){
+            return zeroHeight;
+        }
+
+        protected void setZeroHeight(double rootBranchLength){
+            this.zeroHeight = zeroHeight;
+        }
+    }
 
 
 
@@ -307,7 +335,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
     public HashSet<AbstractCase> descendantTipPartitions(NodeRef node, HashMap<Integer, HashSet<AbstractCase>> map){
         HashSet<AbstractCase> out = new HashSet<AbstractCase>();
         if(treeModel.isExternal(node)){
-            out.add(branchMap.get(node.getNumber()));
+            out.add(getBranchMap().get(node.getNumber()));
             if(map!=null){
                 map.put(node.getNumber(), out);
             }
@@ -361,7 +389,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
                     }
                 }
-            } else if (model == branchMap){
+            } else if (model == getBranchMap()){
                 if(object instanceof ArrayList){
 
                     for(int i=0; i<((ArrayList) object).size(); i++){
@@ -375,7 +403,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                         NodeRef parent = treeModel.getParent(node);
 
                         if(parent!=null){
-                            recalculateCase(branchMap.get(parent.getNumber()));
+                            recalculateCase(getBranchMap().get(parent.getNumber()));
                         }
                     }
                 } else {
@@ -461,7 +489,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
     // **************************************************************
 
     public final BranchMapModel getBranchMap(){
-        return branchMap;
+        return ((PartitionedTreeModel)treeModel).getBranchMap();
     }
 
     public final PartitionedTreeModel getTreeModel(){
@@ -509,11 +537,11 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     private boolean isAllowed(NodeRef node){
         if(!treeModel.isRoot(node)){
-            AbstractCase childCase = branchMap.get(node.getNumber());
-            AbstractCase parentCase = branchMap.get(treeModel.getParent(node).getNumber());
+            AbstractCase childCase = getBranchMap().get(node.getNumber());
+            AbstractCase parentCase = getBranchMap().get(treeModel.getParent(node).getNumber());
             if(childCase!=parentCase){
                 double infectionTime = infectionTimes[outbreak.getCaseIndex(childCase)];
-                if(infectionTime>parentCase.getCullTime()
+                if(infectionTime>parentCase.getEndTime()
                         || (hasLatentPeriods && infectionTime<infectiousTimes[outbreak.getCaseIndex(parentCase)])){
                     return false;
                 }
@@ -550,7 +578,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
             return infectionTimes[outbreak.getCaseIndex(thisCase)];
         } else {
             if(thisCase.wasEverInfected()) {
-                NodeRef child = ((PartitionedTreeModel)treeModel).getEarliestNodeInPartition(thisCase);
+                NodeRef child = ((PartitionedTreeModel)treeModel).getEarliestNodeInElement(thisCase);
                 NodeRef parent = treeModel.getParent(child);
 
                 if (parent != null) {
@@ -564,7 +592,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
                     return getInfectionTime(min, max, thisCase);
                 } else {
-                    return getRootInfectionTime(branchMap);
+                    return getRootInfectionTime(getBranchMap());
                 }
             } else {
                 return Double.POSITIVE_INFINITY;
@@ -597,7 +625,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     public void setInfectionHeight(AbstractCase thisCase, double height){
         if(thisCase.wasEverInfected()) {
-            NodeRef child = ((PartitionedTreeModel)treeModel).getEarliestNodeInPartition(thisCase);
+            NodeRef child = ((PartitionedTreeModel)treeModel).getEarliestNodeInElement(thisCase);
             NodeRef parent = treeModel.getParent(child);
 
             double minHeight = treeModel.getNodeHeight(child);
@@ -654,11 +682,11 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
                 if (!hasLatentPeriods) {
                     double infectionTime = getInfectionTime(thisCase);
-                    double cullTime = thisCase.getCullTime();
+                    double cullTime = thisCase.getEndTime();
                     infectiousPeriods[outbreak.getCaseIndex(thisCase)] = cullTime - infectionTime;
                 } else {
                     double infectiousTime = getInfectiousTime(thisCase);
-                    double cullTime = thisCase.getCullTime();
+                    double cullTime = thisCase.getEndTime();
                     infectiousPeriods[outbreak.getCaseIndex(thisCase)] = cullTime - infectiousTime;
                 }
             } else {
@@ -766,7 +794,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     public double getInfectedPeriod(AbstractCase thisCase){
         if(thisCase.wasEverInfected) {
-            return thisCase.getCullTime() - getInfectionTime(thisCase);
+            return thisCase.getEndTime() - getInfectionTime(thisCase);
         }
         return 0;
     }
@@ -804,293 +832,19 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
     }
 
     protected double getRootInfectionTime(){
-        AbstractCase rootCase = branchMap.get(treeModel.getRoot().getNumber());
+        AbstractCase rootCase = getBranchMap().get(treeModel.getRoot().getNumber());
         return getInfectionTime(rootCase);
     }
 
-
-
-    /* Populates the branch map for external nodes */
-
-    private AbstractCase[] prepareExternalNodeMap(AbstractCase[] map){
-        for(int i=0; i< treeModel.getExternalNodeCount(); i++){
-            TreeModel.Node currentExternalNode = (TreeModel.Node) treeModel.getExternalNode(i);
-            Taxon currentTaxon = currentExternalNode.taxon;
-            for(AbstractCase thisCase : outbreak.getCases()){
-                if(thisCase.wasEverInfected()) {
-                    for (Taxon caseTaxon : thisCase.getAssociatedTaxa()) {
-                        if (caseTaxon.equals(currentTaxon)) {
-                            map[currentExternalNode.getNumber()] = thisCase;
-                        }
-                    }
-                }
-            }
-        }
-        return map;
-    }
-
-/*  The CSV file should have a header, and then lines matching each case to its infector*/
-
-    private void partitionAccordingToSpecificTT(String networkFileName){
-        System.out.println("Using specified starting transmission tree.");
-        try{
-            BufferedReader reader = new BufferedReader (new FileReader(networkFileName));
-            HashMap<AbstractCase, AbstractCase> specificParentMap = new HashMap<AbstractCase, AbstractCase>();
-            // skip header line
-            reader.readLine();
-            String currentLine = reader.readLine();
-            while(currentLine!=null){
-                currentLine = currentLine.replace("\"", "");
-                String[] splitLine = currentLine.split("\\,");
-                if(!splitLine[1].equals("Start")){
-                    specificParentMap.put(outbreak.getCase(splitLine[0]), outbreak.getCase(splitLine[1]));
-                } else {
-                    specificParentMap.put(outbreak.getCase(splitLine[0]), null);
-                }
-                currentLine = reader.readLine();
-            }
-            reader.close();
-            partitionAccordingToSpecificTT(specificParentMap);
-        } catch(IOException e){
-            throw new RuntimeException("Cannot read file: " + networkFileName );
-        }
-    }
-
-    private void partitionAccordingToSpecificTT(HashMap<AbstractCase, AbstractCase> map){
-        branchMap.setAll(prepareExternalNodeMap(new AbstractCase[treeModel.getNodeCount()]), true);
-
-        AbstractCase firstCase=null;
-        for(AbstractCase aCase : outbreak.getCases()){
-            if(aCase.wasEverInfected() && map.get(aCase)==null){
-                firstCase = aCase;
-            }
-        }
-        if(firstCase==null){
-            throw new RuntimeException("Given starting network is not compatible with the starting tree");
-        }
-        NodeRef root = treeModel.getRoot();
-        specificallyPartitionUpwards(root, firstCase, map);
-        if(!((PartitionedTreeModel)treeModel).checkPartitions()){
-            throw new RuntimeException("Given starting network is not compatible with the starting tree");
-        }
-
-    }
-
-    private void specificallyPartitionUpwards(NodeRef node, AbstractCase thisCase,
-                                              HashMap<AbstractCase, AbstractCase> map){
-        if(treeModel.isExternal(node)){
-            return;
-        }
-        branchMap.set(node.getNumber(), thisCase, true);
-        if(isAncestral(node)){
-            for(int i=0; i<treeModel.getChildCount(node); i++){
-                specificallyPartitionUpwards(treeModel.getChild(node, i), thisCase, map);
-            }
-        } else {
-            branchMap.set(node.getNumber(), null, true);
-            HashSet<AbstractCase> children = new HashSet<AbstractCase>();
-            for(AbstractCase aCase : outbreak.getCases()){
-                if(map.get(aCase)==thisCase){
-                    children.add(aCase);
-                }
-            }
-            HashSet<AbstractCase> relevantChildren = new HashSet<AbstractCase>(children);
-            for(AbstractCase child: children){
-                int tipNo = tipMap.get(child);
-                NodeRef currentNode = treeModel.getExternalNode(tipNo);
-                while(currentNode!=node && currentNode!=null){
-                    currentNode = treeModel.getParent(currentNode);
-                }
-                if(currentNode==null){
-                    relevantChildren.remove(child);
-                }
-            }
-            if(relevantChildren.size()==1){
-                //no creep
-                AbstractCase child = relevantChildren.iterator().next();
-                branchMap.set(node.getNumber(), child, true);
-            } else {
-                branchMap.set(node.getNumber(), thisCase, true);
-            }
-            for(int i=0; i<treeModel.getChildCount(node); i++){
-                specificallyPartitionUpwards(treeModel.getChild(node, i), branchMap.get(node.getNumber()), map);
-            }
-        }
-
-    }
-
-
-    /* Assigns a phylogenetic tree node and its children to a partition according to a specified map of child to parent
-    outbreak. This only works on the non-extended version right now, watch it. */
-
-    private AbstractCase specificallyAssignNode(TreeModel.Node node, AbstractCase[] map,
-                                                HashMap<AbstractCase, AbstractCase> parents){
-        if(node.isExternal()){
-            return map[node.getNumber()];
-        } else {
-            AbstractCase[] childPaintings = new AbstractCase[2];
-            for(int i=0; i<node.getChildCount(); i++){
-                childPaintings[i] = specificallyAssignNode(node.getChild(i), map, parents);
-            }
-            if(parents.get(childPaintings[1])==childPaintings[0]){
-                map[node.getNumber()]=childPaintings[0];
-            } else if(parents.get(childPaintings[0])==childPaintings[1]){
-                map[node.getNumber()]=childPaintings[1];
-            } else {
-                throw new RuntimeException("This network does not appear to be compatible with the tree");
-            }
-            return map[node.getNumber()];
-        }
-    }
-
-    public void writeNetworkToFile(String fileName){
-        try{
-            BufferedWriter writer = new BufferedWriter(new FileWriter(fileName));
-            writer.write("Case,Parent");
-            writer.newLine();
-            for(int i=0; i< treeModel.getExternalNodeCount(); i++){
-                TreeModel.Node extNode = (TreeModel.Node) treeModel.getExternalNode(i);
-                String tipName = extNode.taxon.toString();
-                String infector;
-                try{
-                    infector = ((PartitionedTreeModel)treeModel).getInfector(extNode).getName();
-                } catch(NullPointerException e){
-                    infector = "Start";
-                }
-                writer.write(tipName + "," + infector);
-                writer.newLine();
-            }
-            writer.close();
-        } catch(IOException e) {
-            System.out.println("Failed to write to file");
-        }
-
-    }
-
-    /*Given a new tree with no labels, associates each of the terminal branches with the relevant case and then
-    * generates a random partition of the rest of the tree to start off with. If checkNonZero is true in
-    * randomlyAssignNode then the network will be checked to prohibit links with zero (or rounded to zero)
-    * likelihood first. This always uses a non-extended partition. */
-
-    private void partitionAccordingToRandomTT(boolean checkNonZero){
-        boolean gotOne = false;
-        int tries = 1;
-        System.out.println("Generating a random starting partition of the tree (checking nonzero likelihood for all " +
-                "branches and repeating up to 100 times until a start with nonzero likelihood is found)");
-        System.out.print("Attempt: ");
-        while(!gotOne){
-
-            likelihoodKnown = false;
-            boolean failed = false;
-            System.out.print(tries + "...");
-            branchMap.setAll(prepareExternalNodeMap(new AbstractCase[treeModel.getNodeCount()]), true);
-            //Warning - if the BadPartitionException in randomlyAssignNode might be caused by a bug rather than both
-            //likelihoods rounding to zero, you want to stop catching this to investigate.
-
-            try{
-                partitionAccordingToRandomTT(branchMap, checkNonZero);
-            } catch(BadPartitionException e){
-                failed = true;
-            }
-
-            makeDirty();
-
-            infectionTimes = getInfectionTimes(true);
-            if(hasLatentPeriods){
-                infectiousTimes = getInfectiousTimes(true);
-            }
-
-            infectiousPeriods = getInfectiousPeriods(true);
-            if(hasLatentPeriods){
-                latentPeriods = getLatentPeriods(true);
-            }
-
-            if(!failed && calculateLogLikelihood()!=Double.NEGATIVE_INFINITY){
-                gotOne = true;
-                System.out.println("found.");
-            }
-            tries++;
-            if(tries==101){
-                System.out.println("giving " +
-                        "up.");
-                throw new RuntimeException("Failed to find a starting transmission tree with nonzero likelihood");
-            }
-        }
-    }
-
-
-
-    /* Partitions a phylogenetic tree randomly; if checkNonZero is true, make sure all branch likelihoods are nonzero
-    in the process (this sometimes still results in a zero likelihood for the whole tree, but is much less likely to).
-    */
-
-    private BranchMapModel partitionAccordingToRandomTT(BranchMapModel map, boolean checkNonZero){
-        makeDirty();
-        TreeModel.Node root = (TreeModel.Node) treeModel.getRoot();
-        randomlyAssignNode(root, map, checkNonZero);
-
-        return map;
+    public void outputTreeToFile(String fileName, boolean includeTransmissionNodes){
+        outputTreeToFile(getBranchMap(), fileName, includeTransmissionNodes);
     }
 
-    private AbstractCase randomlyAssignNode(TreeModel.Node node, BranchMapModel map, boolean checkNonZero){
-        //this makes a non-extended partition. This is OK, but if it keeps giving zero likelihoods then you could do
-        //something else
 
-        if(node.isExternal()){
-            return map.get(node.getNumber());
-        } else {
-
-            AbstractCase[] choices = new AbstractCase[2];
-            for(int i=0; i<node.getChildCount(); i++){
-                if((map.get(node.getChild(i).getNumber())==null)){
-                    choices[i] = randomlyAssignNode(node.getChild(i), map, checkNonZero);
-                } else {
-                    choices[i] = map.get(node.getChild(i).getNumber());
-                }
-            }
-            int randomSelection = MathUtils.nextInt(2);
-            int decision;
-            if(checkNonZero){
-                Boolean[] branchLogLs = new Boolean[2];
-                for(int i=0; i<2; i++){
-                    double nodeTime = getNodeTime(node);
-                    double branchLength = getNodeTime(treeModel.getChild(node, 1-i)) - getNodeTime(node);
-                    AbstractCase infector = choices[i];
-                    AbstractCase infectee = choices[1-i];
-
-                    branchLogLs[i] = !infector.culledYet(nodeTime
-                            + infectee.getInfectionBranchPosition().getParameterValue(0)*branchLength);
-                }
-                if(!branchLogLs[0] && !branchLogLs[1]){
-                    throw new BadPartitionException("Both branch possibilities have zero likelihood: "
-                            +node.toString()+", outbreak " + choices[0].getName() + " and " + choices[1].getName() + ".");
-                } else if(!branchLogLs[0] || !branchLogLs[1]){
-                    if(!branchLogLs[0]){
-                        decision = 1;
-                    } else {
-                        decision = 0;
-                    }
-                } else {
-                    decision = randomSelection;
-                }
-            } else {
-                decision = randomSelection;
-            }
-            AbstractCase winner = choices[decision];
-            map.getArray()[node.getNumber()]=winner;
-            return winner;
-        }
-    }
-
-    public void debugOutputTree(String fileName, boolean rewire){
-        debugOutputTree(branchMap, fileName, rewire);
-    }
-
-
-    public void debugOutputTree(BranchMapModel map, String fileName, boolean rewire){
+    public void outputTreeToFile(BranchMapModel map, String fileName, boolean includeTransmissionNodes){
         try{
             FlexibleTree treeCopy;
-            if(!rewire){
+            if(!includeTransmissionNodes){
                 treeCopy = new FlexibleTree(treeModel);
                 for(int j=0; j<treeCopy.getNodeCount(); j++){
                     FlexibleNode node = (FlexibleNode)treeCopy.getNode(j);
@@ -1099,14 +853,14 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                     node.setAttribute(PARTITIONS_KEY, map.get(node.getNumber()));
                 }
             } else {
-                treeCopy = rewireTree(treeModel);
+                treeCopy = addTransmissionNodes(treeModel);
             }
             NexusExporter testTreesOut = new NexusExporter(new PrintStream(fileName));
             testTreesOut.exportTree(treeCopy);
         } catch (IOException ignored) {System.out.println("IOException");}
     }
 
-    public FlexibleTree rewireTree(Tree tree){
+    public FlexibleTree addTransmissionNodes(Tree tree){
         prepareTimings();
 
         FlexibleTree outTree = new FlexibleTree(tree, true);
@@ -1115,12 +869,12 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
             FlexibleNode node = (FlexibleNode)outTree.getNode(j);
             node.setAttribute("Number", node.getNumber());
             node.setAttribute("Time", heightToTime(node.getHeight()));
-            node.setAttribute(PARTITIONS_KEY, branchMap.get(node.getNumber()));
+            node.setAttribute(PARTITIONS_KEY, getBranchMap().get(node.getNumber()));
         }
 
         for(AbstractCase aCase : outbreak.getCases()){
             if(aCase.wasEverInfected()) {
-                NodeRef originalNode = ((PartitionedTreeModel)treeModel).getEarliestNodeInPartition(aCase);
+                NodeRef originalNode = ((PartitionedTreeModel)treeModel).getEarliestNodeInElement(aCase);
 
                 int infectionNodeNo = originalNode.getNumber();
                 if (!treeModel.isRoot(originalNode)) {
@@ -1152,7 +906,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                     FlexibleNode infectionNode = new FlexibleNode();
                     infectionNode.setHeight(heightToInstallRoot);
                     infectionNode.setAttribute("Time", heightToTime(heightToInstallRoot));
-                    infectionNode.setAttribute(PARTITIONS_KEY, "The_Ether");
+                    infectionNode.setAttribute(PARTITIONS_KEY, "Origin");
                     outTree.addChild(infectionNode, newNode);
                     newNode.setLength(heightToInstallRoot - getHeight(originalNode));
                     outTree.setRoot(infectionNode);
@@ -1364,13 +1118,23 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
 
     }
 
-    public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(new Author[]{new Author("M", "Hall"), new Author("A", "Rambaut")},
-                Citation.Status.IN_PREPARATION));
-        return citations;
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Case to Case Transmission Tree model";
     }
 
+    public List<Citation> getCitations() {
+        return Arrays.asList(new Citation(
+                        new Author[]{new Author("M", "Hall"), new Author("M", "Woolhouse"), new Author("A", "Rambaut")},
+            "Epidemic Reconstruction in a Phylogenetics Framework: Transmission Trees as Partitions of the Node Set",
+            "PLOS Comput Biol", Citation.Status.IN_PRESS));
+}
+
     // **************************************************************
     // TreeTraitProvider IMPLEMENTATION
     // **************************************************************
@@ -1392,7 +1156,7 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                     throw new RuntimeException("Can only reconstruct states on treeModel given to constructor or a " +
                             "partitioned tree derived from it");
                 } else {
-                    return branchMap.get(oldNode.getNumber()).toString();
+                    return getBranchMap().get(oldNode.getNumber()).toString();
                 }
             } catch(NullPointerException e){
                 if(tree.isRoot(node)){
@@ -1400,15 +1164,11 @@ public abstract class CaseToCaseTreeLikelihood extends AbstractTreeLikelihood im
                 } else {
                     NodeRef parent = tree.getParent(node);
                     int originalParentNumber = (Integer)tree.getNodeAttribute(parent,"Number");
-                    return branchMap.get(originalParentNumber).toString();
+                    return getBranchMap().get(originalParentNumber).toString();
                 }
             }
         } else {
-            if (!likelihoodKnown) {
-                calculateLogLikelihood();
-                likelihoodKnown = true;
-            }
-            return branchMap.get(node.getNumber()).toString();
+            return getBranchMap().get(node.getNumber()).toString();
         }
     }
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/CategoryOutbreak.java b/src/dr/evomodel/epidemiology/casetocase/CategoryOutbreak.java
index 1718396..6aa6b0a 100644
--- a/src/dr/evomodel/epidemiology/casetocase/CategoryOutbreak.java
+++ b/src/dr/evomodel/epidemiology/casetocase/CategoryOutbreak.java
@@ -25,6 +25,7 @@
 
 package dr.evomodel.epidemiology.casetocase;
 
+import dr.evolution.util.Date;
 import dr.evolution.util.Taxa;
 import dr.evolution.util.Taxon;
 import dr.evomodel.epidemiology.casetocase.periodpriors.AbstractPeriodPriorDistribution;
@@ -82,17 +83,17 @@ public class CategoryOutbreak extends AbstractOutbreak {
     }
 
 
-    private void addCase(String caseID, double examTime, double cullTime, Parameter coords,
+    private void addCase(String caseID, double endTime, Parameter coords,
                          Parameter infectionPosition, Taxa associatedTaxa, double indexPriorWeight,
                          String infectiousCategory, String latentCategory){
         CategoryCase thisCase;
 
         if(latentCategory==null){
-            thisCase =  new CategoryCase(caseID, examTime, cullTime, coords, infectionPosition, associatedTaxa,
+            thisCase =  new CategoryCase(caseID, endTime, coords, infectionPosition, associatedTaxa,
                     indexPriorWeight, infectiousCategory);
         } else {
             thisCase =
-                    new CategoryCase(caseID, examTime, cullTime, coords, infectionPosition, associatedTaxa,
+                    new CategoryCase(caseID, endTime, coords, infectionPosition, associatedTaxa,
                             indexPriorWeight, infectiousCategory, latentCategory);
             latentCategories.add(latentCategory);
         }
@@ -105,8 +106,7 @@ public class CategoryOutbreak extends AbstractOutbreak {
     }
 
     private void addNoninfectedCase(String caseID, Parameter coords){
-        CategoryCase thisCase = new CategoryCase(caseID, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, coords,
-                null, null, 0.0, null);
+        CategoryCase thisCase = new CategoryCase(caseID, Double.POSITIVE_INFINITY, coords, null, null, 0.0, null);
         thisCase.setEverInfected(false);
 
         cases.add(thisCase);
@@ -215,57 +215,59 @@ public class CategoryOutbreak extends AbstractOutbreak {
         private String latentCategory;
         private Parameter coords;
         private double indexPriorWeight;
+        private final ArrayList<Date> examinationTimes;
 
-
-        private CategoryCase(String name, String caseID, double examTime, double cullTime, Parameter coords,
+        private CategoryCase(String name, String caseID, double endTime, Parameter coords,
                              Parameter infectionBranchPosition, Taxa associatedTaxa, double indexPriorWeight,
-                             String infectiousCategory){
+                             String infectiousCategory, String latentCategory){
             super(name);
-            wasEverInfected = true;
+
+            wasEverInfected = associatedTaxa != null;
+
             this.caseID = caseID;
             this.infectiousCategory = infectiousCategory;
             this.infectionBranchPosition = infectionBranchPosition;
             if(infectionBranchPosition!=null) {
                 addVariable(infectionBranchPosition);
             }
-            this.examTime = examTime;
-            endOfInfectiousTime = cullTime;
+            endOfInfectiousTime = endTime;
             this.associatedTaxa = associatedTaxa;
             this.coords = coords;
             this.indexPriorWeight = indexPriorWeight;
-            latentCategory = null;
+            this.latentCategory = latentCategory;
+
+            examinationTimes = new ArrayList<Date>();
+
+            if(wasEverInfected) {
+                for (Taxon taxon : associatedTaxa) {
+                    examinationTimes.add(taxon.getDate());
+                }
+            }
+
         }
 
-        private CategoryCase(String name, String caseID, double examTime, double cullTime, Parameter coords,
+        private CategoryCase(String name, String caseID, double endTime, Parameter coords,
                              Parameter infectionBranchPosition, Taxa associatedTaxa,
                              String infectiousCategory){
-            this(name, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa, 1.0,
-                    infectiousCategory);
+            this(name, caseID, endTime, coords, infectionBranchPosition, associatedTaxa, 1.0,
+                    infectiousCategory, null);
 
         }
 
 
-        private CategoryCase(String name, String caseID, double examTime, double cullTime, Parameter coords,
-                             Parameter infectionBranchPosition, Taxa associatedTaxa, double indexPriorWeight,
-                             String infectiousCategory, String latentCategory){
-            this(name, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa, indexPriorWeight,
-                    infectiousCategory);
-            this.latentCategory = latentCategory;
-        }
 
-
-        private CategoryCase(String caseID, double examTime, double cullTime, Parameter coords,
+        private CategoryCase(String caseID, double endTime, Parameter coords,
                              Parameter infectionBranchPosition, Taxa associatedTaxa, double indexPriorWeight,
                              String infectiousCategory){
-            this(CATEGORY_CASE, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa,
-                    indexPriorWeight, infectiousCategory);
+            this(CATEGORY_CASE, caseID, endTime, coords, infectionBranchPosition, associatedTaxa,
+                    indexPriorWeight, infectiousCategory, null);
         }
 
 
-        private CategoryCase(String caseID, double examTime, double cullTime, Parameter coords,
+        private CategoryCase(String caseID, double endTime, Parameter coords,
                              Parameter infectionBranchPosition, Taxa associatedTaxa, double indexPriorWeight,
                              String infectiousCategory, String latentCategory){
-            this(CATEGORY_CASE, caseID, examTime, cullTime, coords, infectionBranchPosition, associatedTaxa,
+            this(CATEGORY_CASE, caseID, endTime, coords, infectionBranchPosition, associatedTaxa,
                     indexPriorWeight, infectiousCategory, latentCategory);
         }
 
@@ -281,12 +283,12 @@ public class CategoryOutbreak extends AbstractOutbreak {
 
         public double getIndexPriorWeight() { return indexPriorWeight;}
 
-        public boolean culledYet(double time) {
+        public boolean noninfectiousYet(double time) {
             return time>endOfInfectiousTime;
         }
 
-        public boolean examinedYet(double time) {
-            return time>examTime;
+        public ArrayList<Date> getExaminationTimes(){
+            return examinationTimes;
         }
 
         protected void handleModelChangedEvent(Model model, Object object, int index) {
@@ -326,9 +328,8 @@ public class CategoryOutbreak extends AbstractOutbreak {
 
         //for the cases
 
-        public static final String CASE_ID = "caseID";
-        public static final String CULL_TIME = "cullTime";
-        public static final String EXAMINATION_TIME = "examTime";
+        public static final String CASE_ID = "hostID";
+        public static final String END_TIME = "endTime";
         public static final String COORDINATES = "spatialCoordinates";
         public static final String INFECTION_TIME_BRANCH_POSITION = "infectionTimeBranchPosition";
         public static final String LATENT_CATEGORY = "latentCategory";
@@ -434,16 +435,14 @@ public class CategoryOutbreak extends AbstractOutbreak {
             if(wasEverInfected) {
 
                 if(!xo.hasAttribute(INFECTIOUS_CATEGORY)
-                        || !xo.hasAttribute(CULL_TIME)
-                        || !xo.hasAttribute(EXAMINATION_TIME)
+                        || !xo.hasAttribute(END_TIME)
                         || !xo.hasChildNamed(INFECTION_TIME_BRANCH_POSITION)){
                     throw new XMLParseException("Case " + farmID + " wasEverInfected but lacks infection-related data");
                 }
 
 
                 String infectiousCategory = (String) xo.getAttribute(INFECTIOUS_CATEGORY);
-                final double cullTime = Double.parseDouble((String) xo.getAttribute(CULL_TIME));
-                final double examTime = Double.parseDouble((String) xo.getAttribute(EXAMINATION_TIME));
+                final double endTime = Double.parseDouble((String) xo.getAttribute(END_TIME));
                 String latentCategory = null;
                 if (xo.hasAttribute(LATENT_CATEGORY)) {
                     latentCategory = (String) xo.getAttribute(LATENT_CATEGORY);
@@ -466,7 +465,7 @@ public class CategoryOutbreak extends AbstractOutbreak {
                         taxa.addTaxon((Taxon) xo.getChild(i));
                     }
                 }
-                outbreak.addCase(farmID, examTime, cullTime, coords, ibp, taxa, indexPriorWeight, infectiousCategory, latentCategory);
+                outbreak.addCase(farmID, endTime, coords, ibp, taxa, indexPriorWeight, infectiousCategory, latentCategory);
             } else {
                 outbreak.addNoninfectedCase(farmID, coords);
 
@@ -493,9 +492,8 @@ public class CategoryOutbreak extends AbstractOutbreak {
 
         private final XMLSyntaxRule[] caseRules = {
                 AttributeRule.newBooleanRule(WAS_EVER_INFECTED),
-                new StringAttributeRule(CASE_ID, "The unique identifier for this farm"),
-                new StringAttributeRule(CULL_TIME, "The time this farm was culled", true),
-                new StringAttributeRule(EXAMINATION_TIME, "The date this farm was examined", true),
+                new StringAttributeRule(CASE_ID, "The unique identifier for this host"),
+                new StringAttributeRule(END_TIME, "The time of noninfectiousness of this host", true),
                 new ElementRule(Taxon.class, 0, Integer.MAX_VALUE),
                 new ElementRule(INFECTION_TIME_BRANCH_POSITION, Parameter.class, "The exact position on the branch" +
                         " along which the infection of this case occurs that it actually does occur", true),
@@ -512,10 +510,10 @@ public class CategoryOutbreak extends AbstractOutbreak {
                 new ElementRule(ProductStatistic.class, 0,2),
                 new ElementRule(CategoryCase.CATEGORY_CASE, caseRules, 1, Integer.MAX_VALUE),
                 new ElementRule(Taxa.class),
-                new ElementRule(INFECTIOUS_PERIOD_PRIOR, AbstractPeriodPriorDistribution.class, "blah", 1,
-                        Integer.MAX_VALUE),
-                new ElementRule(LATENT_PERIODS, Parameter.class, "blah", 0,
-                        Integer.MAX_VALUE),
+                new ElementRule(INFECTIOUS_PERIOD_PRIOR, AbstractPeriodPriorDistribution.class, "A prior " +
+                        "distribution for the length of infectious periods", 1, Integer.MAX_VALUE),
+                new ElementRule(LATENT_PERIODS, Parameter.class, "A prior distribution for the length of latent" +
+                        " periods", 0, Integer.MAX_VALUE),
                 AttributeRule.newBooleanRule(HAS_GEOGRAPHY, true),
                 new ElementRule(DISTANCE_MATRIX, Parameter.class, "A matrix of distances between the cases in this " +
                         "outbreak", true)
diff --git a/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeLogger.java b/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeLogger.java
index b088644..d706349 100644
--- a/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeLogger.java
+++ b/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeLogger.java
@@ -27,10 +27,7 @@ package dr.evomodel.epidemiology.casetocase;
 
 import dr.evolution.tree.*;
 import dr.evomodel.tree.TreeLogger;
-import dr.evomodelxml.tree.TreeLoggerParser;
 import dr.inference.loggers.LogFormatter;
-import dr.xml.AbstractXMLObjectParser;
-import dr.xml.XMLObjectParser;
 
 import java.text.NumberFormat;
 
@@ -67,7 +64,7 @@ public class PartitionedTreeLogger extends TreeLogger {
         if(!doIt)
             return;
 
-        setTree(c2cTL.rewireTree(originalTree));
+        setTree(c2cTL.addTransmissionNodes(originalTree));
 
         super.log(state);
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModel.java b/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModel.java
index fccd55d..eb88fe6 100644
--- a/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModel.java
+++ b/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModel.java
@@ -25,15 +25,16 @@
 
 package dr.evomodel.epidemiology.casetocase;
 
-import dr.app.beauti.util.TreeUtils;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
+import dr.evolution.util.Taxon;
 import dr.evomodel.tree.TreeModel;
-import dr.evomodelxml.tree.TreeModelParser;
 import dr.inference.model.*;
-import dr.xml.XMLObject;
-import dr.xml.XMLParseException;
+import dr.math.MathUtils;
 
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
 import java.util.*;
 
 /**
@@ -43,26 +44,35 @@ import java.util.*;
  */
 public class PartitionedTreeModel extends TreeModel {
 
+    private final AbstractOutbreak outbreak;
     private BranchMapModel branchMap;
+    private final int elementCount;
+
     public final static String PARTITIONED_TREE_MODEL = "partitionedTreeModel";
     Set<NodeRef> partitionsQueue = new HashSet<NodeRef>();
 
-    public PartitionedTreeModel(String id, Tree tree, BranchMapModel branchMap){
+    public PartitionedTreeModel(String id, Tree tree, AbstractOutbreak outbreak){
         super(id, tree);
-        this.branchMap = branchMap;
+        this.outbreak = outbreak;
+        elementCount = outbreak.infectedSize();
+        branchMap = new BranchMapModel(this);
+        partitionAccordingToRandomTT(false);
     }
 
-    public PartitionedTreeModel(String id, Tree tree){
+    public PartitionedTreeModel(String id, Tree tree, AbstractOutbreak outbreak, String startingTTFileName){
         super(id, tree);
+        this.outbreak = outbreak;
+        elementCount = outbreak.infectedSize();
         branchMap = new BranchMapModel(this);
+        partitionAccordingToSpecificTT(startingTTFileName);
     }
 
-    public PartitionedTreeModel(TreeModel treeModel, BranchMapModel branchMap){
-        this(PARTITIONED_TREE_MODEL, treeModel, branchMap);
+    public PartitionedTreeModel(TreeModel treeModel, AbstractOutbreak outbreak){
+        this(PARTITIONED_TREE_MODEL, treeModel, outbreak);
     }
 
-    public PartitionedTreeModel(TreeModel treeModel){
-        this(PARTITIONED_TREE_MODEL, treeModel);
+    public PartitionedTreeModel(TreeModel treeModel, AbstractOutbreak outbreak, String startingTTFileName){
+        this(PARTITIONED_TREE_MODEL, treeModel, outbreak, startingTTFileName);
     }
 
     public void partitionsChangingAlert(HashSet<AbstractCase> casesToRecalculate){
@@ -127,11 +137,11 @@ public class PartitionedTreeModel extends TreeModel {
 
             final NodeRef node = getNodeOfParameter((Parameter) variable);
 
-            partitionsChangingAlert(adjacentPartitions(node));
+            partitionsChangingAlert(adjacentElements(node));
         }
     }
 
-    public HashSet<AbstractCase> adjacentPartitions(NodeRef node){
+    public HashSet<AbstractCase> adjacentElements(NodeRef node){
         HashSet<AbstractCase> changedCases = new HashSet<AbstractCase>();
         ArrayList<NodeRef> affectedNodes = new ArrayList<NodeRef>();
 
@@ -156,8 +166,8 @@ public class PartitionedTreeModel extends TreeModel {
         }
 
         for(NodeRef node : partitionsQueue){
-            AbstractCase nodePartition = branchMap.get(node.getNumber());
-            partitionChangingAlert(nodePartition);
+            AbstractCase nodeElement = branchMap.get(node.getNumber());
+            partitionChangingAlert(nodeElement);
             NodeRef parent = getParent(node);
             if(parent!=null && branchMap.get(node.getNumber())!=branchMap.get(parent.getNumber())){
                 partitionChangingAlert(branchMap.get(parent.getNumber()));
@@ -187,7 +197,7 @@ public class PartitionedTreeModel extends TreeModel {
 
     public void setNodeHeight(NodeRef n, double height) {
 
-        partitionsChangingAlert(adjacentPartitions(n));
+        partitionsChangingAlert(adjacentElements(n));
 
         super.setNodeHeight(n, height);
     }
@@ -225,26 +235,41 @@ public class PartitionedTreeModel extends TreeModel {
             }
 
         }
+
+        // @todo wasteful - something accessible should keep a list of cases
+
+        for(int i=0; i<getExternalNodeCount(); i++){
+            AbstractCase aCase = branchMap.get(i);
+
+
+            NodeRef tipMRCA = caseMRCA(aCase);
+
+            if(branchMap.get(tipMRCA.getNumber())!=aCase){
+                throw new BadPartitionException("Node partition disconnected");
+            }
+
+
+        }
+
+
         return !foundProblem;
     }
 
     //Return a set of nodes that are not descendants of (or equal to) the current node and are in the same partition as
-    // it. If flagForRecalc is true, then this also sets the flags for likelihood recalculation for all these nodes
-    // to true
-
+    // it.
 
-    public HashSet<Integer> samePartitionDownTree(NodeRef node){
 
+    public HashSet<Integer> samePartitionElementUpTree(NodeRef node){
         HashSet<Integer> out = new HashSet<Integer>();
-        AbstractCase painting = branchMap.get(node.getNumber());
+        AbstractCase elementCase = branchMap.get(node.getNumber());
         NodeRef currentNode = node;
         NodeRef parentNode = getParent(node);
-        while(parentNode!=null && branchMap.get(parentNode.getNumber())==painting){
+        while(parentNode!=null && branchMap.get(parentNode.getNumber())==elementCase){
             out.add(parentNode.getNumber());
-            if(countChildrenInSamePartition(parentNode)==2){
+            if(countChildrenInSameElement(parentNode)==2){
                 NodeRef otherChild = sibling(this, currentNode);
                 out.add(otherChild.getNumber());
-                out.addAll(samePartitionUpTree(otherChild));
+                out.addAll(samePartitionElementDownTree(otherChild));
             }
             currentNode = parentNode;
             parentNode = getParent(currentNode);
@@ -255,15 +280,13 @@ public class PartitionedTreeModel extends TreeModel {
     //Return a set of nodes that are descendants (and not equal to) the current node and are in the same partition as
     // it.
 
-
-
-    public HashSet<Integer> samePartitionUpTree(NodeRef node){
+    public HashSet<Integer> samePartitionElementDownTree(NodeRef node){
         HashSet<Integer> out = new HashSet<Integer>();
-        AbstractCase painting = branchMap.get(node.getNumber());
+        AbstractCase elementCase = branchMap.get(node.getNumber());
         for(int i=0; i< getChildCount(node); i++){
-            if(branchMap.get(getChild(node,i).getNumber())==painting){
+            if(branchMap.get(getChild(node,i).getNumber())==elementCase){
                 out.add(getChild(node,i).getNumber());
-                out.addAll(samePartitionUpTree(getChild(node, i)));
+                out.addAll(samePartitionElementDownTree(getChild(node, i)));
             }
         }
         return out;
@@ -273,12 +296,12 @@ public class PartitionedTreeModel extends TreeModel {
     public Integer[] samePartitionElement(NodeRef node){
         HashSet<Integer> out = new HashSet<Integer>();
         out.add(node.getNumber());
-        out.addAll(samePartitionDownTree(node));
-        out.addAll(samePartitionUpTree(node));
+        out.addAll(samePartitionElementUpTree(node));
+        out.addAll(samePartitionElementDownTree(node));
         return out.toArray(new Integer[out.size()]);
     }
 
-    private int[] allTipsForThisCase(AbstractCase thisCase){
+    public int[] allTipsForThisCase(AbstractCase thisCase){
         ArrayList<Integer> listOfRefs = new ArrayList<Integer>();
 
         for(int i=0; i<getExternalNodeCount(); i++){
@@ -297,21 +320,20 @@ public class PartitionedTreeModel extends TreeModel {
     }
 
 
-    public NodeRef getEarliestNodeInPartition(AbstractCase thisCase){
+    public NodeRef getEarliestNodeInElement(AbstractCase thisCase){
         if(thisCase.wasEverInfected()) {
 
-            int[] tips = allTipsForThisCase(thisCase);
-
-            NodeRef tipMRCA = Tree.Utils.getCommonAncestor(this, tips);
+            NodeRef tipMRCA = caseMRCA(thisCase);
 
             if(branchMap.get(tipMRCA.getNumber())!=thisCase){
-                throw new BadPartitionException("Node partition disconnected");
+                throw new BadPartitionException("Node partition element disconnected");
             }
 
             NodeRef child = tipMRCA;
             NodeRef parent = getParent(child);
-            boolean transmissionFound = false;
+            boolean transmissionFound = parent == null;
             while (!transmissionFound) {
+
                 if (branchMap.get(child.getNumber()) != branchMap.get(parent.getNumber())) {
                     transmissionFound = true;
                 } else {
@@ -321,6 +343,7 @@ public class PartitionedTreeModel extends TreeModel {
                         transmissionFound = true;
                     }
                 }
+
             }
             return child;
         }
@@ -338,21 +361,15 @@ public class PartitionedTreeModel extends TreeModel {
         return out;
     }
 
-
-
-
     /* Return the case that infected this case */
 
-    /* Return the case which was the infector in the infection event represented by this node */
-
     public AbstractCase getInfector(AbstractCase thisCase){
         if(thisCase.wasEverInfected()) {
-            int[] tips = allTipsForThisCase(thisCase);
 
-            NodeRef tipMRCA = Tree.Utils.getCommonAncestor(this, tips);
+            NodeRef tipMRCA = caseMRCA(thisCase);
 
             if(branchMap.get(tipMRCA.getNumber())!=thisCase){
-                throw new BadPartitionException("Node partition disconnected");
+                throw new BadPartitionException("Node partition element disconnected");
             }
 
             NodeRef currentNode = tipMRCA;
@@ -378,7 +395,7 @@ public class PartitionedTreeModel extends TreeModel {
 
     public HashSet<AbstractCase> getInfectees(AbstractCase thisCase){
         if(thisCase.wasEverInfected()) {
-            return getInfecteesInClade(getEarliestNodeInPartition(thisCase));
+            return getInfecteesInClade(getEarliestNodeInElement(thisCase));
         }
         return new HashSet<AbstractCase>();
     }
@@ -418,18 +435,23 @@ public class PartitionedTreeModel extends TreeModel {
     }
 
 
-    /* Return the partition of the parent of this node */
+    /* Return the partition element of the parent of this node */
 
     public AbstractCase getParentCase(NodeRef node){
         return branchMap.get(getParent(node).getNumber());
     }
 
 
+    public int getElementCount(){
+        return elementCount;
+    }
+
+
     //Counts the children of the current node which are in the same partition element as itself
 
 
 
-    public int countChildrenInSamePartition(NodeRef node){
+    public int countChildrenInSameElement(NodeRef node){
         if(isExternal(node)){
             return -1;
         } else {
@@ -460,4 +482,375 @@ public class PartitionedTreeModel extends TreeModel {
         return null;
     }
 
+
+    public NodeRef caseMRCA(AbstractCase aCase, boolean checkConnectedness){
+        int[] caseTips = allTipsForThisCase(aCase);
+        NodeRef mrca =  Tree.Utils.getCommonAncestor(this, caseTips);
+
+        if(checkConnectedness) {
+            if (branchMap.get(mrca.getNumber()) != aCase) {
+                throw new BadPartitionException("A partition element is disconnected");
+            }
+        }
+
+        return mrca;
+    }
+
+    public NodeRef caseMRCA(AbstractCase aCase){
+        return caseMRCA(aCase, true);
+    }
+
+    private HashSet<NodeRef> getDescendantTips(NodeRef node){
+        HashSet<NodeRef> out = new HashSet<NodeRef>();
+        if(isExternal(node)){
+            out.add(node);
+            return out;
+        } else {
+            out.addAll(getDescendantTips(getChild(node, 0)));
+            out.addAll(getDescendantTips(getChild(node, 1)));
+        }
+        return out;
+    }
+
+    public boolean isAncestral(NodeRef node){
+        AbstractCase currentCase = branchMap.get(node.getNumber());
+
+        for(NodeRef tip : getDescendantTips(node)){
+            if(branchMap.get(tip.getNumber())==currentCase){
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+    public boolean isRootBlockedBy(AbstractCase aCase, AbstractCase potentialBlocker){
+        return directDescendant(caseMRCA(aCase), caseMRCA(potentialBlocker));
+    }
+
+    public boolean isRootBlocked(AbstractCase aCase){
+        for(AbstractCase anotherCase : outbreak.getCases()){
+            if(anotherCase.wasEverInfected && anotherCase!=aCase){
+                if(isRootBlockedBy(aCase, anotherCase)){
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+    private HashSet<NodeRef> getTipsInThisPartitionElement(AbstractCase aCase){
+        HashSet<NodeRef> out = new HashSet<NodeRef>();
+        // todo check that external nodes come first
+
+        for(int i=0; i<getExternalNodeCount(); i++){
+            if(branchMap.get(i)==aCase){
+                out.add(getExternalNode(i));
+            }
+        }
+
+        return out;
+    }
+
+    private boolean directDescendant(NodeRef node, NodeRef possibleAncestor){
+        NodeRef currentNode = node;
+
+        while(currentNode!=null){
+            if(currentNode==possibleAncestor){
+                return true;
+            }
+            currentNode = getParent(currentNode);
+        }
+        return false;
+    }
+
+    private boolean directRelationship(NodeRef node1, NodeRef node2){
+        return directDescendant(node1, node2) || directDescendant(node2, node1);
+    }
+
+       /* Populates the branch map for external nodes */
+
+    private AbstractCase[] prepareExternalNodeMap(AbstractCase[] map){
+        for(int i=0; i< getExternalNodeCount(); i++){
+            TreeModel.Node currentExternalNode = (TreeModel.Node) getExternalNode(i);
+            Taxon currentTaxon = currentExternalNode.taxon;
+            for(AbstractCase thisCase : outbreak.getCases()){
+                if(thisCase.wasEverInfected()) {
+                    for (Taxon caseTaxon : thisCase.getAssociatedTaxa()) {
+                        if (caseTaxon.equals(currentTaxon)) {
+                            map[currentExternalNode.getNumber()] = thisCase;
+                        }
+                    }
+                }
+            }
+        }
+        return map;
+    }
+
+/*  The CSV file should have a header, and then lines matching each case to its infector*/
+
+    private void partitionAccordingToSpecificTT(String networkFileName){
+        System.out.println("Using specified starting transmission tree.");
+        try{
+            BufferedReader reader = new BufferedReader (new FileReader(networkFileName));
+            HashMap<AbstractCase, AbstractCase> specificParentMap = new HashMap<AbstractCase, AbstractCase>();
+            // skip header line
+            reader.readLine();
+            String currentLine = reader.readLine();
+            while(currentLine!=null){
+                currentLine = currentLine.replace("\"", "");
+                String[] splitLine = currentLine.split("\\,");
+                if(!splitLine[1].equals("Start")){
+                    specificParentMap.put(outbreak.getCase(splitLine[0]), outbreak.getCase(splitLine[1]));
+                } else {
+                    specificParentMap.put(outbreak.getCase(splitLine[0]), null);
+                }
+                currentLine = reader.readLine();
+            }
+            reader.close();
+            partitionAccordingToSpecificTT(specificParentMap);
+        } catch(IOException e){
+            throw new RuntimeException("Cannot read file: " + networkFileName );
+        }
+    }
+
+
+    private void partitionAccordingToSpecificTT(HashMap<AbstractCase, AbstractCase> map){
+        branchMap.setAll(prepareExternalNodeMap(new AbstractCase[getNodeCount()]), true);
+
+        //various sanity checks
+
+        for(AbstractCase aCase : map.keySet()){
+            if(!aCase.wasEverInfected){
+                throw new RuntimeException("This starting transmission tree involves never-infected cases");
+            }
+        }
+
+        AbstractCase firstCase=null;
+        int indexCaseCount = 0;
+
+        for(AbstractCase aCase : outbreak.getCases()){
+            if(aCase.wasEverInfected()) {
+                if (map.get(aCase) == null) {
+                    firstCase = aCase;
+                    indexCaseCount++;
+                }
+            }
+        }
+        if(indexCaseCount==0){
+            throw new RuntimeException("Given starting transmission tree appears to have a cycle");
+        }
+        if(indexCaseCount>1){
+            throw new RuntimeException("Given starting transmission tree appears not to be connected");
+        }
+
+
+        NodeRef root = getRoot();
+        specificallyPartitionDownwards(root, firstCase, map);
+        if(!checkPartitions()){
+            throw new RuntimeException("Given starting transmission tree is not compatible with the starting tree");
+        }
+
+    }
+
+    private void specificallyPartitionDownwards(NodeRef node, AbstractCase thisCase,
+                                                HashMap<AbstractCase, AbstractCase> map){
+        if(isExternal(node)){
+            return;
+        }
+        branchMap.set(node.getNumber(), thisCase, true);
+        if(isAncestral(node)){
+            for(int i=0; i<getChildCount(node); i++){
+                specificallyPartitionDownwards(getChild(node, i), thisCase, map);
+            }
+        } else {
+            branchMap.set(node.getNumber(), null, true);
+            HashSet<AbstractCase> children = new HashSet<AbstractCase>();
+            for(AbstractCase aCase : outbreak.getCases()){
+                if(map.get(aCase)==thisCase){
+                    children.add(aCase);
+                }
+            }
+            HashSet<AbstractCase> relevantChildren = new HashSet<AbstractCase>(children);
+            for(AbstractCase child: children){
+
+                NodeRef caseMRCA = caseMRCA(child);
+
+                //either ALL the tips need to be a descendant of this node, or none. Otherwise not compatible.
+
+                if(directDescendant(node, caseMRCA)){
+                    throw new RuntimeException("Starting transmission tree is incompatible with starting phylogeny");
+                }
+
+                if(caseMRCA==node){
+                    //I'm afraid I must insist...
+                    relevantChildren = new HashSet<AbstractCase>();
+                    relevantChildren.add(child);
+                    break;
+                }
+
+                NodeRef currentNode = caseMRCA;
+                while(currentNode!=node && currentNode!=null){
+                    currentNode = getParent(currentNode);
+                }
+                if(currentNode==null){
+                    relevantChildren.remove(child);
+                }
+            }
+            if(relevantChildren.size()==1){
+                //this ends an infection branch
+                AbstractCase child = relevantChildren.iterator().next();
+                branchMap.set(node.getNumber(), child, true);
+            } else {
+
+                //this can't end an infection branch
+                branchMap.set(node.getNumber(), thisCase, true);
+            }
+            for(int i=0; i<getChildCount(node); i++){
+                specificallyPartitionDownwards(getChild(node, i), branchMap.get(node.getNumber()), map);
+            }
+        }
+
+    }
+
+
+    /*
+     todo - The trouble with initialising this without the likelihood class is that lots of starting trees might
+     todo - fail. Need to think about how best to deal with this.
+
+     Generally allowCreep is a bad idea, since it tends to place infections after tip times and tip times
+     are frequently noninfectiousness times. Might be useful for some pathogens, however.
+    */
+
+
+    private void partitionAccordingToRandomTT(boolean allowCreep){
+
+        System.out.println("Generating a random starting partition of the tree");
+
+        branchMap.setAll(prepareExternalNodeMap(new AbstractCase[getNodeCount()]), true);
+
+        NodeRef root = getRoot();
+        randomlyAssignNode(root, allowCreep);
+
+    }
+
+
+    private AbstractCase randomlyAssignNode(NodeRef node, boolean allowCreep){
+
+        if(isExternal(node)){
+            return branchMap.get(node.getNumber());
+        } else {
+
+            //If this is a descendant of a case MRCA and an ancestor of one of that case's tips, it must be
+            //assigned that case. If it is that of two cases then this tree is incompatible
+
+            ArrayList<AbstractCase> forcedByTopology = new ArrayList<AbstractCase>();
+
+            for(AbstractCase aCase : outbreak.getCases()){
+                if(aCase.wasEverInfected) {
+                    NodeRef caseMRCA = caseMRCA(aCase, false);
+                    HashSet<NodeRef> caseTips = getTipsInThisPartitionElement(aCase);
+
+                    for (NodeRef caseTip : caseTips) {
+                        if (directDescendant(node, caseMRCA) && directDescendant(caseTip, node)) {
+                            if(!forcedByTopology.contains(aCase)) {
+                                forcedByTopology.add(aCase);
+                            }
+                        }
+                    }
+                }
+            }
+
+            if(forcedByTopology.size()>1){
+                throw new RuntimeException("Starting phylogeny is incompatible with this tip partition");
+            } else if(forcedByTopology.size()==1){
+                branchMap.set(node.getNumber(), forcedByTopology.get(0), true);
+
+                for (int i = 0; i < getChildCount(node); i++) {
+                    if(!isExternal(getChild(node, i))){
+                        randomlyAssignNode(getChild(node, i), allowCreep);
+                    }
+                }
+
+                return forcedByTopology.get(0);
+            } else {
+                //not mandated by the topology
+                //three choices - case of child 1, case of child 2, case of parent, unless this is the root
+
+                AbstractCase[] choices = new AbstractCase[2];
+
+
+
+                for (int i = 0; i < getChildCount(node); i++) {
+                    if(!isExternal(getChild(node, i))){
+                        choices[i] = randomlyAssignNode(getChild(node, i), allowCreep);
+                    } else {
+                        choices[i] = branchMap.get(getChild(node,i).getNumber());
+                    }
+                }
+                //if both choices are null and we're at the root, try again
+
+                while(isRoot(node) && choices[0]==null && choices[1]==null){
+                    for (int i = 0; i < getChildCount(node); i++) {
+                        if(!isExternal(getChild(node, i))){
+                            choices[i] = randomlyAssignNode(getChild(node, i), allowCreep);
+                        } else {
+                            choices[i] = branchMap.get(getChild(node,i).getNumber());
+                        }
+                    }
+                }
+
+                int randomSelection;
+                if (isRoot(node)) {
+                    //must make a choice at this point
+                    randomSelection = MathUtils.nextInt(2);
+                    //they can't both be null
+                    if(choices[randomSelection]==null){
+                        randomSelection = 1-randomSelection;
+                    }
+                    AbstractCase winner = choices[randomSelection];
+                    fillDownTree(node, winner);
+                    return winner;
+
+                } else {
+                    randomSelection = MathUtils.nextInt(allowCreep ? 3 : 2);
+                }
+                if (randomSelection != 2) {
+                    AbstractCase winner = choices[randomSelection];
+                    AbstractCase loser = choices[1-randomSelection];
+
+                    // check that this isn't going to cause a timings problem
+
+                    if(getNodeHeight(getChild(node, randomSelection)) >
+                            loser.getInfectionBranchPosition().getParameterValue(0)
+                                    *getBranchLength(getChild(node, 1-randomSelection))
+                                    + getNodeHeight(getChild(node, 1-randomSelection))) {
+                        winner = loser;
+                    }
+
+                    if(winner!=null) {
+                        fillDownTree(node, winner);
+                    } else {
+                        branchMap.set(node.getNumber(), null, true);
+                    }
+
+                    return winner;
+
+                } else {
+                    //parent partition element will creep to here, but we don't know what that is yet
+                    return null;
+                }
+            }
+        }
+    }
+
+    private void fillDownTree(NodeRef node, AbstractCase aCase){
+        if(branchMap.get(node.getNumber())==null){
+            branchMap.set(node.getNumber(), aCase, true);
+            for(int i=0; i<2; i++){
+                fillDownTree(getChild(node, i), aCase);
+            }
+        }
+    }
+
 }
diff --git a/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModelParser.java b/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModelParser.java
index 2585997..93b7ded 100644
--- a/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModelParser.java
+++ b/src/dr/evomodel/epidemiology/casetocase/PartitionedTreeModelParser.java
@@ -69,6 +69,9 @@ public class PartitionedTreeModelParser extends AbstractXMLObjectParser {
     public static final String TAXON = "taxon";
     public static final String NAME = "name";
 
+    public static final String OUTBREAK = "outbreak";
+    public static final String STARTING_TT_FILE = "startingTransmissionTreeFile";
+
     public PartitionedTreeModelParser() {
         rules = new XMLSyntaxRule[]{
                 new ElementRule(Tree.class),
@@ -113,7 +116,9 @@ public class PartitionedTreeModelParser extends AbstractXMLObjectParser {
                         new XMLSyntaxRule[]{
                                 new ElementRule(TaxonList.class, "A set of taxa for which leaf heights are required"),
                                 new ElementRule(Parameter.class, "A compound parameter containing the leaf heights")
-                        }, true)
+                        }, true),
+                new ElementRule(OUTBREAK, AbstractOutbreak.class, "The case data"),
+                AttributeRule.newStringRule(STARTING_TT_FILE, true)
         };
     }
 
@@ -127,7 +132,15 @@ public class PartitionedTreeModelParser extends AbstractXMLObjectParser {
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
         Tree tree = (Tree) xo.getChild(Tree.class);
-        TreeModel treeModel = new PartitionedTreeModel(xo.getId(), tree);
+
+        AbstractOutbreak outbreak = (AbstractOutbreak)xo.getElementFirstChild(OUTBREAK);
+        PartitionedTreeModel treeModel;
+
+        if(xo.hasAttribute(STARTING_TT_FILE)){
+            treeModel = new PartitionedTreeModel(xo.getId(), tree, outbreak, xo.getStringAttribute(STARTING_TT_FILE));
+        } else {
+            treeModel = new PartitionedTreeModel(xo.getId(), tree, outbreak);
+        }
 
         Logger.getLogger("dr.evomodel").info("Creating the partitioned tree model, '" + xo.getId() + "'");
 
@@ -260,7 +273,9 @@ public class PartitionedTreeModelParser extends AbstractXMLObjectParser {
                     ParameterParser.replaceParameter(cxo, parameter);
 
                 } else {
-                    throw new XMLParseException("illegal child element in " + getParserName() + ": " + cxo.getName());
+                    if(!cxo.getName().equals(OUTBREAK)) {
+                        throw new XMLParseException("illegal child element in " + getParserName() + ": " + cxo.getName());
+                    }
                 }
 
             } else if (xo.getChild(i) instanceof Tree) {
diff --git a/src/dr/evomodel/epidemiology/casetocase/WithinCaseCoalescent.java b/src/dr/evomodel/epidemiology/casetocase/WithinCaseCoalescent.java
index 47657e2..1dbba8f 100644
--- a/src/dr/evomodel/epidemiology/casetocase/WithinCaseCoalescent.java
+++ b/src/dr/evomodel/epidemiology/casetocase/WithinCaseCoalescent.java
@@ -31,11 +31,8 @@ import dr.evolution.tree.FlexibleNode;
 import dr.evolution.tree.FlexibleTree;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
-import dr.evolution.util.Taxon;
-import dr.evolution.util.TaxonList;
-import dr.evolution.util.Units;
+import dr.evolution.util.*;
 import dr.evomodel.coalescent.DemographicModel;
-import dr.evomodel.epidemiology.casetocase.periodpriors.AbstractPeriodPriorDistribution;
 import dr.evomodel.tree.TreeModel;
 import dr.inference.loggers.LogColumn;
 import dr.inference.model.Model;
@@ -64,18 +61,17 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
     private double[] partitionTreeLogLikelihoods;
     private double[] storedPartitionTreeLogLikelihoods;
     private boolean[] recalculateCoalescentFlags;
-    private HashMap<AbstractCase,Treelet> partitionsAsTrees;
-    private HashMap<AbstractCase,Treelet> storedPartitionsAsTrees;
     private DemographicModel demoModel;
     private Mode mode;
 
     private double coalescencesLogLikelihood;
     private double storedCoalescencesLogLikelihood;
 
+    private boolean pleaseReExplode = true;
 
-    public WithinCaseCoalescent(PartitionedTreeModel virusTree, AbstractOutbreak caseData,
-                                String startingNetworkFileName, Parameter maxFirstInfToRoot, DemographicModel demoModel,
-                                Mode mode)
+
+    public WithinCaseCoalescent(PartitionedTreeModel virusTree, AbstractOutbreak caseData, Parameter maxFirstInfToRoot,
+                                DemographicModel demoModel, Mode mode)
             throws TaxonList.MissingTaxonException {
 
         super(WITHIN_CASE_COALESCENT, virusTree, caseData, maxFirstInfToRoot);
@@ -86,18 +82,17 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
         partitionTreeLogLikelihoods = new double[outbreak.getCases().size()];
         storedPartitionTreeLogLikelihoods = new double[outbreak.getCases().size()];
         recalculateCoalescentFlags = new boolean[outbreak.getCases().size()];
+        Arrays.fill(recalculateCoalescentFlags, true);
 
-        partitionsAsTrees = new HashMap<AbstractCase, Treelet>();
+        elementsAsTrees = new HashMap<AbstractCase, Treelet>();
         for(AbstractCase aCase: outbreak.getCases()){
             if(aCase.wasEverInfected()){
-                partitionsAsTrees.put(aCase, null);
+                elementsAsTrees.put(aCase, null);
             }
         }
 
-        storedPartitionsAsTrees = new HashMap<AbstractCase, Treelet>();
 
-
-        prepareTree(startingNetworkFileName);
+        storedElementsAsTrees = new HashMap<AbstractCase, Treelet>();
 
     }
 
@@ -105,13 +100,14 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
         //checkPartitions();
 
-        double logL = 0;
+        if(pleaseReExplode){
+            explodeTree();
+        }
 
-        explodeTree();
+        double logL = 0;
 
         coalescencesLogLikelihood = 0;
 
-
         for(AbstractCase aCase : outbreak.getCases()){
 
             int number = outbreak.getCaseIndex(aCase);
@@ -120,20 +116,16 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
                 // and then the little tree calculations
 
-                HashSet<AbstractCase> children = ((PartitionedTreeModel)treeModel).getInfectees(aCase);
-
                 if (recalculateCoalescentFlags[number]) {
-                    Treelet treelet = partitionsAsTrees.get(aCase);
 
-                    if (children.size() != 0) {
+                    Treelet treelet = elementsAsTrees.get(aCase);
+
+                    if (treelet.getExternalNodeCount() > 1) {
                         SpecifiedZeroCoalescent coalescent = new SpecifiedZeroCoalescent(treelet, demoModel,
                                 treelet.getZeroHeight(), mode == Mode.TRUNCATE);
                         partitionTreeLogLikelihoods[number] = coalescent.calculateLogLikelihood();
                         coalescencesLogLikelihood += partitionTreeLogLikelihoods[number];
-                        if (DEBUG && partitionTreeLogLikelihoods[number] == Double.POSITIVE_INFINITY) {
-                            debugOutputTree("infCoalescent.nex", false);
-                            debugTreelet(treelet, aCase + "_partition.nex");
-                        }
+
                     } else {
                         partitionTreeLogLikelihoods[number] = 0.0;
                     }
@@ -150,17 +142,12 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
         likelihoodKnown = true;
 
-        if(DEBUG){
-            debugOutputTree("outstandard.nex", false);
-            debugOutputTree("outfancy.nex", true);
-        }
-
         return logL;
     }
 
     public void storeState(){
         super.storeState();
-        storedPartitionsAsTrees = new HashMap<AbstractCase, Treelet>(partitionsAsTrees);
+        storedElementsAsTrees = new HashMap<AbstractCase, Treelet>(elementsAsTrees);
         storedPartitionTreeLogLikelihoods = Arrays.copyOf(partitionTreeLogLikelihoods,
                 partitionTreeLogLikelihoods.length);
 
@@ -171,7 +158,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
     public void restoreState(){
         super.restoreState();
-        partitionsAsTrees = storedPartitionsAsTrees;
+        elementsAsTrees = storedElementsAsTrees;
         partitionTreeLogLikelihoods = storedPartitionTreeLogLikelihoods;
 
 
@@ -192,7 +179,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
                     recalculateCaseWCC(aCase);
                 }
             }
-        } else if(model == branchMap){
+        } else if(model == getBranchMap()){
             if(object instanceof ArrayList){
 
                 for(int i=0; i<((ArrayList) object).size(); i++){
@@ -206,7 +193,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
                     NodeRef parent = treeModel.getParent(node);
 
                     if(parent!=null){
-                        recalculateCaseWCC(branchMap.get(parent.getNumber()));
+                        recalculateCaseWCC(getBranchMap().get(parent.getNumber()));
                     }
                 }
             } else {
@@ -230,7 +217,8 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
     }
 
     protected void recalculateCaseWCC(int index){
-        partitionsAsTrees.put(outbreak.getCase(index), null);
+        elementsAsTrees.put(outbreak.getCase(index), null);
+        pleaseReExplode = true;
         recalculateCoalescentFlags[index] = true;
     }
 
@@ -246,58 +234,16 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
         Arrays.fill(recalculateCoalescentFlags, true);
         for(AbstractCase aCase : outbreak.getCases()){
             if(aCase.wasEverInfected()) {
-                partitionsAsTrees.put(aCase, null);
+                elementsAsTrees.put(aCase, null);
             }
         }
+        pleaseReExplode = true;
     }
 
     // Tears the tree into small pieces. Indexes correspond to indexes in the outbreak.
 
-    private void explodeTree(){
-        if(DEBUG){
-            debugOutputTree("test.nex", false);
-        }
-        for(int i=0; i<outbreak.size(); i++){
-            AbstractCase aCase = outbreak.getCase(i);
-            if(aCase.wasEverInfected() && partitionsAsTrees.get(aCase)==null){
-
-                NodeRef partitionRoot = ((PartitionedTreeModel)treeModel).getEarliestNodeInPartition(aCase);
-
-                double extraHeight;
-
-                if(treeModel.isRoot(partitionRoot)){
-                    extraHeight = maxFirstInfToRoot.getParameterValue(0) * aCase.getInfectionBranchPosition().getParameterValue(0);
-                } else {
-                    extraHeight = treeModel.getBranchLength(partitionRoot) * aCase.getInfectionBranchPosition().getParameterValue(0);
-                }
-
-                FlexibleNode newRoot = new FlexibleNode();
-
-                FlexibleTree littleTree = new FlexibleTree(newRoot);
-                littleTree.beginTreeEdit();
-
-                if (!treeModel.isExternal(partitionRoot)) {
-                    for (int j = 0; j < treeModel.getChildCount(partitionRoot); j++) {
-                        copyPartitionToTreelet(littleTree, treeModel.getChild(partitionRoot, j), newRoot, aCase);
-                    }
-                }
-
-                littleTree.endTreeEdit();
-
-                littleTree.resolveTree();
-
-                Treelet treelet = new Treelet(littleTree,
-                        littleTree.getRootHeight() + extraHeight);
-
-                partitionsAsTrees.put(aCase, treelet);
-
-
-            }
-        }
-    }
-
     public ArrayList<AbstractCase> postOrderTransmissionTreeTraversal(){
-        return traverseTransmissionTree(branchMap.get(treeModel.getRoot().getNumber()));
+        return traverseTransmissionTree(getBranchMap().get(treeModel.getRoot().getNumber()));
     }
 
     private ArrayList<AbstractCase> traverseTransmissionTree(AbstractCase aCase){
@@ -314,54 +260,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
         return out;
     }
 
-    private void copyPartitionToTreelet(FlexibleTree littleTree, NodeRef oldNode, NodeRef newParent,
-                                        AbstractCase partition){
-        if(partition.wasEverInfected()) {
-            if (branchMap.get(oldNode.getNumber()) == partition) {
-                if (treeModel.isExternal(oldNode)) {
-                    NodeRef newTip = new FlexibleNode(new Taxon(treeModel.getNodeTaxon(oldNode).getId()));
-                    littleTree.addChild(newParent, newTip);
-                    littleTree.setBranchLength(newTip, treeModel.getBranchLength(oldNode));
-                } else {
-                    NodeRef newChild = new FlexibleNode();
-                    littleTree.addChild(newParent, newChild);
-                    littleTree.setBranchLength(newChild, treeModel.getBranchLength(oldNode));
-                    for (int i = 0; i < treeModel.getChildCount(oldNode); i++) {
-                        copyPartitionToTreelet(littleTree, treeModel.getChild(oldNode, i), newChild, partition);
-                    }
-                }
-            } else {
-                // we need a new tip
-                NodeRef transmissionTip = new FlexibleNode(
-                        new Taxon("Transmission_" + branchMap.get(oldNode.getNumber()).getName()));
-                double parentTime = getNodeTime(treeModel.getParent(oldNode));
-                double childTime = getInfectionTime(branchMap.get(oldNode.getNumber()));
-                littleTree.addChild(newParent, transmissionTip);
-                littleTree.setBranchLength(transmissionTip, childTime - parentTime);
-            }
-        }
-    }
 
-    private class Treelet extends FlexibleTree {
-
-        private double zeroHeight;
-
-        private Treelet(FlexibleTree tree, double zeroHeight){
-            super(tree);
-            this.zeroHeight = zeroHeight;
-
-        }
-
-        private double getZeroHeight(){
-            return zeroHeight;
-        }
-
-
-
-        private void setZeroHeight(double rootBranchLength){
-            this.zeroHeight = zeroHeight;
-        }
-    }
 
     private Treelet transformTreelet(Treelet treelet){
 
@@ -454,7 +353,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
                 final double intervalArea = demographicFunction.getIntegral(startTime, finishTime);
                 final double normalisationArea = demographicFunction.getIntegral(startTime, 0);
 
-                if (intervalArea == 0 && duration != 0) {
+                if (intervalArea == 0 && duration > tolerance) {
                     return Double.NEGATIVE_INFINITY;
                 }
 
@@ -616,12 +515,6 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
 
             PartitionedTreeModel virusTree = (PartitionedTreeModel) xo.getChild(TreeModel.class);
 
-            String startingNetworkFileName=null;
-
-            if(xo.hasChildNamed(STARTING_NETWORK)){
-                startingNetworkFileName = (String) xo.getElementFirstChild(STARTING_NETWORK);
-            }
-
             AbstractOutbreak caseSet = (AbstractOutbreak) xo.getChild(AbstractOutbreak.class);
 
             CaseToCaseTreeLikelihood likelihood;
@@ -633,8 +526,7 @@ public class WithinCaseCoalescent extends CaseToCaseTreeLikelihood {
             Mode mode = xo.hasAttribute(TRUNCATE) & xo.getBooleanAttribute(TRUNCATE) ? Mode.TRUNCATE : Mode.NORMAL;
 
             try {
-                likelihood = new WithinCaseCoalescent(virusTree, caseSet, startingNetworkFileName,
-                        earliestFirstInfection, demoModel, mode);
+                likelihood = new WithinCaseCoalescent(virusTree, caseSet, earliestFirstInfection, demoModel, mode);
             } catch (TaxonList.MissingTaxonException e) {
                 throw new XMLParseException(e.toString());
             }
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchMovementOperator.java b/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchMovementOperator.java
index f2a8315..0d96ba6 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchMovementOperator.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/InfectionBranchMovementOperator.java
@@ -28,6 +28,7 @@ package dr.evomodel.epidemiology.casetocase.operators;
 import dr.evolution.tree.NodeRef;
 import dr.evomodel.epidemiology.casetocase.*;
 import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.OperatorFailedException;
 import dr.inference.operators.SimpleMCMCOperator;
 import dr.math.MathUtils;
 import dr.xml.*;
@@ -62,54 +63,64 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
     /*  Switch the partition of a randomly selected internal node from the painting of one of its children to the
     * painting of the other, and adjust the rest of the tree to ensure the result still obeys partition rules.*/
 
-    public double doOperation(){
-
-        if(DEBUG){
-            c2cLikelihood.debugOutputTree("before.nex", false);
-        }
+    public double doOperation() throws OperatorFailedException{
 
         PartitionedTreeModel tree = c2cLikelihood.getTreeModel();
         BranchMapModel branchMap = c2cLikelihood.getBranchMap();
-        int externalNodeCount = tree.getExternalNodeCount();
+
         // find a case whose infection event we are going to move about
-        int nodeToSwitch = MathUtils.nextInt(externalNodeCount);
+        int caseIndexToAdjust = MathUtils.nextInt(c2cLikelihood.getOutbreak().size());
+
+        AbstractCase aCase = c2cLikelihood.getOutbreak().getCase(caseIndexToAdjust);
+
         // if the infection event is the seed of the epidemic, we need to try again
-        while(branchMap.get(tree.getRoot().getNumber())==branchMap.get(tree.getExternalNode(nodeToSwitch).getNumber())){
-            nodeToSwitch = MathUtils.nextInt(externalNodeCount);
+        while(branchMap.get(tree.getRoot().getNumber()) == aCase || !aCase.wasEverInfected()){
+            caseIndexToAdjust = MathUtils.nextInt(c2cLikelihood.getOutbreak().size());
+            aCase = c2cLikelihood.getOutbreak().getCase(caseIndexToAdjust);
         }
+
         // find the child node of the transmission branch
-        NodeRef node = tree.getExternalNode(nodeToSwitch);
-        while(branchMap.get(node.getNumber())==branchMap.get(tree.getParent(node).getNumber())){
-            node = tree.getParent(node);
-        }
-        double hr = adjustTree(tree, node, branchMap);
+        NodeRef node = tree.getEarliestNodeInElement(aCase);
 
-        if(DEBUG){
-            c2cLikelihood.debugOutputTree("after.nex", false);
-        }
+        double hr = adjustTree(tree, node);
 
         return hr;
     }
 
 
-    private double adjustTree(PartitionedTreeModel tree, NodeRef node, BranchMapModel map){
-        // are we going up or down? If we're not extended then all moves are down. External nodes have to move down.
+    private double adjustTree(PartitionedTreeModel tree, NodeRef node)
+            throws OperatorFailedException{
         double out;
 
+        BranchMapModel map = tree.getBranchMap();
+
+        AbstractCase infectedCase = map.get(node.getNumber());
+        AbstractCase infectorCase = map.get(tree.getParent(node).getNumber());
+
+        NodeRef infectedMRCA = tree.caseMRCA(infectedCase);
 
+        boolean downIsPossible = node != infectedMRCA;
+        boolean upIsPossible = !(tree.isRootBlockedBy(infectedCase, infectorCase)
+                && tree.isAncestral(tree.getParent(node)));
 
-        if(tree.isExternal(node) || MathUtils.nextBoolean()){
-            out = moveDown(tree, node, map);
+        if(upIsPossible && downIsPossible){
+            out = MathUtils.nextBoolean() ? moveUp(tree, node) : moveDown(tree, node);
+        } else if(upIsPossible){
+            out = moveUp(tree, node);
+        } else if(downIsPossible){
+            out = moveDown(tree, node);
         } else {
-            out = moveUp(tree, node, map);
+            throw new OperatorFailedException("Chosen infection event cannot be adjusted in this tree");
         }
+
         if(DEBUG){
             c2cLikelihood.getTreeModel().checkPartitions();
         }
         return out;
     }
 
-    private double moveDown(PartitionedTreeModel tree, NodeRef node, BranchMapModel map){
+    private double moveUp(PartitionedTreeModel tree, NodeRef node){
+        BranchMapModel map = tree.getBranchMap();
 
         AbstractCase infectedCase = map.get(node.getNumber());
 
@@ -119,8 +130,6 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
 
         double hr = 0;
 
-        assert map.get(parent.getNumber()) == map.get(node.getNumber()) : "Partition problem";
-
         NodeRef sibling = node;
         for(int i=0; i<tree.getChildCount(parent); i++){
             if(tree.getChild(parent, i)!=node){
@@ -130,7 +139,10 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
 
         AbstractCase infectorCase = map.get(parent.getNumber());
 
-        if(c2cLikelihood.isAncestral(parent)){
+        NodeRef infectedMRCA = tree.caseMRCA(infectedCase);
+        NodeRef infectorMRCA = tree.caseMRCA(infectorCase);
+
+        if(c2cLikelihood.getTreeModel().isAncestral(parent)){
 
             if(resampleInfectionTimes){
                 infectorCase.setInfectionBranchPosition(MathUtils.nextDouble());
@@ -138,28 +150,39 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
 
             NodeRef grandparent = tree.getParent(parent);
             if(grandparent!=null && map.get(grandparent.getNumber())==map.get(parent.getNumber())){
-                for(Integer ancestor: c2cLikelihood.getTreeModel().samePartitionDownTree(parent)){
+                for(Integer ancestor: c2cLikelihood.getTreeModel().samePartitionElementUpTree(parent)){
                     newMap[ancestor] = map.get(node.getNumber());
                 }
                 newMap[grandparent.getNumber()]=map.get(node.getNumber());
             }
 
-            hr += tree.isExternal(sibling) ? Math.log(2) : 0;
-            hr += tree.isExternal(node) ? Math.log(0.5) : 0;
+            hr += node == infectedMRCA ? Math.log(0.5) : 0;
 
         } else {
             if(map.get(sibling.getNumber())==map.get(parent.getNumber())){
-                for(Integer descendant: c2cLikelihood.getTreeModel().samePartitionUpTree(sibling)){
+                for(Integer descendant: c2cLikelihood.getTreeModel().samePartitionElementDownTree(sibling)){
                     newMap[descendant]=map.get(node.getNumber());
                 }
                 newMap[sibling.getNumber()]=map.get(node.getNumber());
             }
 
-            hr += tree.isExternal(node) ? Math.log(0.5) : 0;
+            hr += node == infectedMRCA ? Math.log(0.5) : 0;
         }
         newMap[parent.getNumber()]=map.get(node.getNumber());
         map.setAll(newMap, false);
 
+        //HR adjustments for reverse moves
+        if(c2cLikelihood.getTreeModel().isAncestral(parent)){
+            hr += sibling == infectorMRCA ? Math.log(2) : 0;
+        } else {
+            NodeRef grandparent = tree.getParent(parent);
+
+            hr += tree.isRootBlockedBy(infectedCase, infectorCase)
+                    && tree.isAncestral(grandparent) ? Math.log(2) : 0;
+
+        }
+
+
         if(resampleInfectionTimes){
             infectedCase.setInfectionBranchPosition(MathUtils.nextDouble());
         }
@@ -167,9 +190,11 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
         return hr;
     }
 
-    private double moveUp(PartitionedTreeModel tree, NodeRef node, BranchMapModel map){
+    private double moveDown(PartitionedTreeModel tree, NodeRef node){
+        BranchMapModel map = tree.getBranchMap();
 
         AbstractCase infectedCase = map.get(node.getNumber());
+        AbstractCase infectorCase = map.get(tree.getParent(node).getNumber());
 
         AbstractCase[] newMap = map.getArrayCopy();
 
@@ -177,23 +202,27 @@ public class InfectionBranchMovementOperator extends SimpleMCMCOperator{
 
         NodeRef parent = tree.getParent(node);
 
-        assert map.get(parent.getNumber()) == map.get(node.getNumber()) : "Partition problem";
+        NodeRef infectedMRCA = tree.caseMRCA(infectedCase);
+
         // check if either child is not ancestral (at most one is not, and if so it must have been in the same
         // partition as both the other child and 'node')
         for(int i=0; i<tree.getChildCount(node); i++){
             NodeRef child = tree.getChild(node, i);
-            if(!c2cLikelihood.isAncestral(child)){
-                assert map.get(child.getNumber()) == map.get(node.getNumber()) : "Partition problem";
-                for(Integer descendant: c2cLikelihood.getTreeModel().samePartitionUpTree(child)){
+            if(!c2cLikelihood.getTreeModel().isAncestral(child)){
+                for(Integer descendant: c2cLikelihood.getTreeModel().samePartitionElementDownTree(child)){
                     newMap[descendant]=map.get(parent.getNumber());
                 }
                 newMap[child.getNumber()]=map.get(parent.getNumber());
-            } else if(tree.isExternal(child) && map.get(child.getNumber())==map.get(node.getNumber())){
-                // we're moving a transmission event onto a terminal branch and need to adjust the HR accordingly
+            } else if(child == infectedMRCA && map.get(child.getNumber())==map.get(node.getNumber())){
+                // we're moving a transmission event as far down as it can go and need to adjust the HR accordingly
                 out += Math.log(2);
             }
         }
 
+        //if you couldn't move it any further up
+        out += tree.isRootBlockedBy(infectedCase, infectorCase)
+                && tree.isAncestral(parent) ? Math.log(0.5) : 0;
+
         if(resampleInfectionTimes){
             infectedCase.setInfectionBranchPosition(MathUtils.nextDouble());
         }
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideA.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideA.java
index 9ad00a0..9eaeded 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideA.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideA.java
@@ -28,18 +28,15 @@ package dr.evomodel.epidemiology.casetocase.operators;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.epidemiology.casetocase.AbstractCase;
-import dr.evomodel.epidemiology.casetocase.AbstractOutbreak;
 import dr.evomodel.epidemiology.casetocase.BranchMapModel;
 import dr.evomodel.epidemiology.casetocase.CaseToCaseTreeLikelihood;
 import dr.evomodel.operators.AbstractTreeOperator;
 import dr.evomodel.tree.TreeModel;
-import dr.inference.model.Parameter;
 import dr.inference.operators.*;
 import dr.math.MathUtils;
 import dr.xml.*;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 
 /**
@@ -98,7 +95,7 @@ public class TransmissionSubtreeSlideA extends AbstractTreeOperator implements C
     public double doOperation() throws OperatorFailedException {
 
         if(DEBUG){
-            c2cLikelihood.debugOutputTree("beforeTSSA.nex", false);
+            c2cLikelihood.outputTreeToFile("beforeTSSA.nex", false);
         }
 
         BranchMapModel branchMap = c2cLikelihood.getBranchMap();
@@ -113,7 +110,7 @@ public class TransmissionSubtreeSlideA extends AbstractTreeOperator implements C
 
         i = eligibleNodes.get(MathUtils.nextInt(eligibleNodes.size()));
 
-        int eligibleNodeCount = eligibleNodes.size();
+        double eligibleNodeCount = eligibleNodes.size();
 
 
         final NodeRef iP = tree.getParent(i);
@@ -348,10 +345,10 @@ public class TransmissionSubtreeSlideA extends AbstractTreeOperator implements C
 
         if (DEBUG){
             c2cLikelihood.getTreeModel().checkPartitions();
-            c2cLikelihood.debugOutputTree("afterTSSA.nex", false);
+            c2cLikelihood.outputTreeToFile("afterTSSA.nex", false);
         }
 
-        int reverseEligibleNodeCount = getEligibleNodes(tree, branchMap).size();
+        double reverseEligibleNodeCount = getEligibleNodes(tree, branchMap).size();
 
         logq += Math.log(eligibleNodeCount/reverseEligibleNodeCount);
 
@@ -392,18 +389,14 @@ public class TransmissionSubtreeSlideA extends AbstractTreeOperator implements C
 
     private int intersectingEdges(Tree tree, NodeRef node, double height, BranchMapModel branchMap,
                                   AbstractCase partition, List<NodeRef> directChildren) {
-
         final NodeRef parent = tree.getParent(node);
-
         if (tree.getNodeHeight(parent) < height || branchMap.get(parent.getNumber())!=partition) return 0;
-
         if (tree.getNodeHeight(node) < height) {
             if (directChildren != null){
                 directChildren.add(node);
             }
             return 1;
         }
-
         int count = 0;
         for (int i = 0; i < tree.getChildCount(node); i++) {
             count += intersectingEdges(tree, tree.getChild(node, i), height, branchMap, partition, directChildren);
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideB.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideB.java
index 17f07b3..ffeb177 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideB.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionSubtreeSlideB.java
@@ -28,18 +28,15 @@ package dr.evomodel.epidemiology.casetocase.operators;
 import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodel.epidemiology.casetocase.AbstractCase;
-import dr.evomodel.epidemiology.casetocase.AbstractOutbreak;
 import dr.evomodel.epidemiology.casetocase.BranchMapModel;
 import dr.evomodel.epidemiology.casetocase.CaseToCaseTreeLikelihood;
 import dr.evomodel.operators.AbstractTreeOperator;
 import dr.evomodel.tree.TreeModel;
-import dr.inference.model.Parameter;
 import dr.inference.operators.*;
 import dr.math.MathUtils;
 import dr.xml.*;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 
 /**
@@ -98,7 +95,7 @@ public class TransmissionSubtreeSlideB extends AbstractTreeOperator implements C
 
 
         if(DEBUG){
-            c2cLikelihood.debugOutputTree("beforeTSSB.nex", false);
+            c2cLikelihood.outputTreeToFile("beforeTSSB.nex", false);
         }
 
 
@@ -212,7 +209,7 @@ public class TransmissionSubtreeSlideB extends AbstractTreeOperator implements C
                 // Randomly assign iP the partition of either its parent or the child that is not i, and adjust q
                 // appropriately
 
-                if(branchMap.get(PiP.getNumber())!=branchMap.get(CiP.getNumber())){
+                if(PiPCase != CiPCase){
                     logq += Math.log(0.5);
                 }
 
@@ -334,7 +331,7 @@ public class TransmissionSubtreeSlideB extends AbstractTreeOperator implements C
                 // Randomly assign iP the partition of either its parent or the child that is not i, and adjust q
                 // appropriately
 
-                if(PiP!=null && branchMap.get(PiP.getNumber())!=branchMap.get(CiP.getNumber())){
+                if(PiP!=null && PiPCase != CiPCase){
                     logq += Math.log(0.5);
                 }
 
@@ -400,7 +397,7 @@ public class TransmissionSubtreeSlideB extends AbstractTreeOperator implements C
 
         if (DEBUG) {
             c2cLikelihood.getTreeModel().checkPartitions();
-            c2cLikelihood.debugOutputTree("afterTSSB.nex", false);
+            c2cLikelihood.outputTreeToFile("afterTSSB.nex", false);
         }
 
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingA.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingA.java
index d2ce726..c252b96 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingA.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingA.java
@@ -29,6 +29,7 @@ import dr.evolution.tree.NodeRef;
 import dr.evomodel.epidemiology.casetocase.AbstractCase;
 import dr.evomodel.epidemiology.casetocase.BranchMapModel;
 import dr.evomodel.epidemiology.casetocase.CaseToCaseTreeLikelihood;
+import dr.evomodel.epidemiology.casetocase.PartitionedTreeModel;
 import dr.evomodel.operators.AbstractTreeOperator;
 import dr.evomodel.tree.TreeModel;
 import dr.inference.operators.MCMCOperator;
@@ -76,7 +77,7 @@ public class TransmissionWilsonBaldingA extends AbstractTreeOperator {
     }
 
     public void proposeTree() throws OperatorFailedException {
-        TreeModel tree = c2cLikelihood.getTreeModel();
+        PartitionedTreeModel tree = c2cLikelihood.getTreeModel();
         BranchMapModel branchMap = c2cLikelihood.getBranchMap();
         NodeRef i;
         double oldMinAge, newMinAge, newRange, oldRange, newAge, q;
@@ -87,17 +88,19 @@ public class TransmissionWilsonBaldingA extends AbstractTreeOperator {
 
         i = eligibleNodes.get(MathUtils.nextInt(eligibleNodes.size()));
 
-        int eligibleNodeCount = eligibleNodes.size();
+        double eligibleNodeCount = eligibleNodes.size();
 
         final NodeRef iP = tree.getParent(i);
-        Integer[] samePaintings = c2cLikelihood.getTreeModel().samePartitionElement(iP);
+
+        Integer[] sameElements = tree.samePartitionElement(iP);
+
         HashSet<Integer> possibleDestinations = new HashSet<Integer>();
         // we can insert the node above OR BELOW any node in the same partition
-        for (Integer samePainting : samePaintings) {
-            possibleDestinations.add(samePainting);
-            if (!tree.isExternal(tree.getNode(samePainting))) {
-                possibleDestinations.add(tree.getChild(tree.getNode(samePainting), 0).getNumber());
-                possibleDestinations.add(tree.getChild(tree.getNode(samePainting), 1).getNumber());
+        for (Integer sameElement : sameElements) {
+            possibleDestinations.add(sameElement);
+            if (!tree.isExternal(tree.getNode(sameElement))) {
+                possibleDestinations.add(tree.getChild(tree.getNode(sameElement), 0).getNumber());
+                possibleDestinations.add(tree.getChild(tree.getNode(sameElement), 1).getNumber());
             }
         }
         Integer[] pd = possibleDestinations.toArray(new Integer[possibleDestinations.size()]);
@@ -210,7 +213,7 @@ public class TransmissionWilsonBaldingA extends AbstractTreeOperator {
         }
         logq = Math.log(q);
 
-        int reverseEligibleNodeCount = getEligibleNodes(tree, branchMap).size();
+        double reverseEligibleNodeCount = getEligibleNodes(tree, branchMap).size();
 
         logq += Math.log(eligibleNodeCount/reverseEligibleNodeCount);
 
diff --git a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingB.java b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingB.java
index 2d7b165..7283ba7 100644
--- a/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingB.java
+++ b/src/dr/evomodel/epidemiology/casetocase/operators/TransmissionWilsonBaldingB.java
@@ -27,19 +27,15 @@ package dr.evomodel.epidemiology.casetocase.operators;
 
 import dr.evolution.tree.NodeRef;
 import dr.evomodel.epidemiology.casetocase.AbstractCase;
-import dr.evomodel.epidemiology.casetocase.AbstractOutbreak;
 import dr.evomodel.epidemiology.casetocase.BranchMapModel;
 import dr.evomodel.epidemiology.casetocase.CaseToCaseTreeLikelihood;
 import dr.evomodel.operators.AbstractTreeOperator;
 import dr.evomodel.tree.TreeModel;
-import dr.inference.model.Parameter;
 import dr.inference.operators.MCMCOperator;
 import dr.inference.operators.OperatorFailedException;
 import dr.math.MathUtils;
 import dr.xml.*;
 
-import java.util.HashMap;
-
 /**
  * Implements the Wilson-Balding branch swapping move if it moves an entire subtree of the transmission tree.
  *
@@ -68,7 +64,7 @@ public class TransmissionWilsonBaldingB extends AbstractTreeOperator {
     public double doOperation() throws OperatorFailedException {
 
         if(DEBUG){
-            c2cLikelihood.debugOutputTree("BeforeTWWB.nex", false);
+            c2cLikelihood.outputTreeToFile("BeforeTWWB.nex", false);
         }
 
         proposeTree();
@@ -80,7 +76,7 @@ public class TransmissionWilsonBaldingB extends AbstractTreeOperator {
         }
 
         if(DEBUG){
-            c2cLikelihood.debugOutputTree("AfterTWWB.nex", false);
+            c2cLikelihood.outputTreeToFile("AfterTWWB.nex", false);
         }
 
         return logq;
@@ -153,7 +149,7 @@ public class TransmissionWilsonBaldingB extends AbstractTreeOperator {
         oldRange = tree.getNodeHeight(PiP) - oldMinAge;
         q = newRange / Math.abs(oldRange);
 
-        // need to account for the random repainting of iP
+        // need to account for the random reassignment of iP
 
         if(branchMap.get(PiP.getNumber())!=branchMap.get(CiP.getNumber())){
             q *= 0.5;
diff --git a/src/dr/evomodel/operators/LatentFactorHamiltonianMC.java b/src/dr/evomodel/operators/LatentFactorHamiltonianMC.java
index 448a385..b2d84bb 100644
--- a/src/dr/evomodel/operators/LatentFactorHamiltonianMC.java
+++ b/src/dr/evomodel/operators/LatentFactorHamiltonianMC.java
@@ -1,13 +1,12 @@
 package dr.evomodel.operators;
 
 import dr.evomodel.continuous.FullyConjugateMultivariateTraitLikelihood;
-import dr.inference.model.LatentFactorModel;
-import dr.inference.model.MatrixParameter;
-import dr.inference.model.Parameter;
+import dr.inference.model.*;
 import dr.inference.operators.AbstractHamiltonianMCOperator;
 import dr.inference.operators.CoercionMode;
 import dr.inference.operators.OperatorFailedException;
 import dr.math.MathUtils;
+import java.util.Random;
 
 /**
  * Created by max on 12/2/15.
@@ -15,30 +14,32 @@ import dr.math.MathUtils;
 public class LatentFactorHamiltonianMC extends AbstractHamiltonianMCOperator{
     private LatentFactorModel lfm;
     private FullyConjugateMultivariateTraitLikelihood tree;
-    private MatrixParameter factors;
-    private MatrixParameter loadings;
-    private MatrixParameter Precision;
+    private MatrixParameterInterface factors;
+    private MatrixParameterInterface loadings;
+    private MatrixParameterInterface Precision;
     private int nfac;
     private int ntaxa;
     private int ntraits;
     private double stepSize;
     private int nSteps;
     private boolean diffusionSN=true;
+    private Random random;
 
 
     public LatentFactorHamiltonianMC(LatentFactorModel lfm, FullyConjugateMultivariateTraitLikelihood tree, double weight, CoercionMode mode, double stepSize, int nSteps, double momentumSd){
         super(mode, momentumSd);
         setWeight(weight);
-        this.lfm=lfm;
-        this.tree=tree;
-        this.factors=lfm.getFactors();
-        this.loadings=lfm.getLoadings();
-        this.Precision=lfm.getColumnPrecision();
-        nfac=lfm.getFactorDimension();
-        ntaxa=lfm.getFactors().getColumnDimension();
-        ntraits=Precision.getRowDimension();
-        this.stepSize=stepSize;
-        this.nSteps=nSteps;
+        this.lfm = lfm;
+        this.tree = tree;
+        this.factors = lfm.getFactors();
+        this.loadings = lfm.getLoadings();
+        this.Precision = lfm.getColumnPrecision();
+        nfac = lfm.getFactorDimension();
+        ntaxa = lfm.getFactors().getColumnDimension();
+        ntraits = Precision.getRowDimension();
+        this.stepSize = stepSize;
+        this.nSteps = nSteps;
+        random = new Random(555);
     }
 
 
@@ -69,32 +70,32 @@ public class LatentFactorHamiltonianMC extends AbstractHamiltonianMCOperator{
     }
 
     private double[] getMatrix(int element, double[] residual){
-        double answer[]=new double[this.nfac];
-        for (int i = 0; i <this.nfac ; i++) {
+        double answer[] = new double[this.nfac];
+        for (int i = 0; i < this.nfac ; i++) {
             for (int j = 0; j < ntraits; j++) {
-                answer[i] +=loadings.getParameterValue(i,j)*Precision.getParameterValue(j,j)*
-                        residual[j*ntaxa+element];
+                answer[i] += loadings.getParameterValue(j, i) * Precision.getParameterValue(j, j) *
+                        residual[j * ntaxa + element];
             }
         }
         return answer;
     }
 
     private double[] getGradient(int randel, double[] mean, double[][] prec, double precfactor){
-        double[] residual=lfm.getResidual();
-        double[] derivative=getMatrix(randel, residual);
+        double[] residual = lfm.getResidual();
+        double[] derivative = getMatrix(randel, residual);
 
         if(diffusionSN){
             for (int i = 0; i <mean.length ; i++) {
-                derivative[i]-=(factors.getParameterValue(i, randel)-mean[i])*precfactor;
+                derivative[i] -= (factors.getParameterValue(i, randel) - mean[i])*precfactor;
             }
         }
         else{
             for (int i = 0; i <mean.length ; i++) {
-                double sumi=0;
+                double sumi = 0;
                 for (int j = 0; j <mean.length ; j++) {
-                    sumi+=prec[i][j]*(factors.getParameterValue(j, randel)-mean[j]);
+                    sumi += prec[i][j]*(factors.getParameterValue(j, randel) - mean[j]);
                 }
-                derivative[i]-=sumi;
+                derivative[i] -= sumi;
             }
         }
         return derivative;
@@ -103,58 +104,75 @@ public class LatentFactorHamiltonianMC extends AbstractHamiltonianMCOperator{
     @Override
     public double doOperation() throws OperatorFailedException {
         int randel = MathUtils.nextInt(ntaxa);
+//        System.out.println(randel);
 
 
 
-        double[] mean=tree.getConditionalMean(randel);
-        double precfactor=0;
-        double[][] prec=null;
+        double[] mean = tree.getConditionalMean(randel);
+        double precfactor = 0;
+        double[][] prec = null;
+        double rand = random.nextDouble();
+//        System.out.println(rand);
+        double functionalStepSize = stepSize * rand;
+
         if(diffusionSN){
-            precfactor=tree.getPrecisionFactor(randel);
+            precfactor = tree.getPrecisionFactor(randel);
         }
         else {
             prec = tree.getConditionalPrecision(randel);
         }
 
-        double[] derivative=getGradient(randel, mean, prec, precfactor);
+        double[] derivative = getGradient(randel, mean, prec, precfactor);
         drawMomentum(lfm.getFactorDimension());
 
         double prop=0;
-        for (int i = 0; i <momentum.length ; i++) {
-            prop+=momentum[i]*momentum[i]/(2*getMomentumSd()*getMomentumSd());
+        for (int i = 0; i < momentum.length ; i++) {
+            prop += momentum[i]*momentum[i] / (2 * getMomentumSd()*getMomentumSd());
         }
 
         for (int i = 0; i <lfm.getFactorDimension() ; i++) {
-            momentum[i] = momentum[i] - stepSize / 2 * derivative[i];
+            momentum[i] = momentum[i] - functionalStepSize / 2 * derivative[i];
         }
 
         for (int i = 0; i <nSteps ; i++) {
             for (int j = 0; j <lfm.getFactorDimension() ; j++) {
-                factors.setParameterValueQuietly(j, randel, factors.getParameterValue(j,randel)+stepSize*momentum[j]);
+                factors.setParameterValueQuietly(j, randel, factors.getParameterValue(j, randel) + functionalStepSize * momentum[j] / (getMomentumSd() * getMomentumSd()));
             }
 //            System.out.println("randel");
 //            System.out.println(randel);
-            ((Parameter.Default) factors.getParameter(randel)).fireParameterChangedEvent(0, null);
+            if(factors instanceof FastMatrixParameter) {
+                for (int j = 0; j <factors.getParameter(randel).getDimension() ; j++) {
+                    factors.fireParameterChangedEvent(randel * factors.getRowDimension() + j, null);
+                }
+//                factors.fireParameterChangedEvent();
+            }
+            else{
+                for (int j = 0; j <factors.getParameter(randel).getDimension() ; j++) {
+                    factors.getParameter(randel).fireParameterChangedEvent(j, null);
+                }
+//                factors.getParameter(randel).fireParameterChangedEvent();
+            }
 
+//            factors.fireParameterChangedEvent();
 
             if(i!=nSteps){
-                derivative=getGradient(randel,mean,prec, precfactor);
+                derivative=getGradient(randel, mean, prec, precfactor);
 
-                for (int j = 0; j <lfm.getFactorDimension() ; j++) {
-                    momentum[j] = momentum[j] - stepSize * derivative[j];
+                for (int j = 0; j < lfm.getFactorDimension() ; j++) {
+                    momentum[j] = momentum[j] - functionalStepSize * derivative[j];
                 }
             }
         }
 
-        derivative=getGradient(randel,mean,prec, precfactor);
+        derivative=getGradient(randel , mean , prec , precfactor);
         for (int i = 0; i <lfm.getFactorDimension() ; i++) {
 
-            momentum[i] = momentum[i] - stepSize / 2 * derivative[i];
+            momentum[i] = momentum[i] - functionalStepSize / 2 * derivative[i];
         }
 
         double res=0;
         for (int i = 0; i <momentum.length ; i++) {
-            res+=momentum[i]*momentum[i]/(2*getMomentumSd()*getMomentumSd());
+            res += momentum[i] * momentum[i] / (2 * getMomentumSd() * getMomentumSd());
         }
         return prop-res;
     }
diff --git a/src/dr/evomodel/operators/LoadingsHamiltonianMC.java b/src/dr/evomodel/operators/LoadingsHamiltonianMC.java
new file mode 100644
index 0000000..2c66f9b
--- /dev/null
+++ b/src/dr/evomodel/operators/LoadingsHamiltonianMC.java
@@ -0,0 +1,155 @@
+package dr.evomodel.operators;
+
+import dr.evomodel.continuous.FullyConjugateMultivariateTraitLikelihood;
+import dr.inference.distribution.MomentDistributionModel;
+import dr.inference.model.*;
+import dr.inference.operators.AbstractHamiltonianMCOperator;
+import dr.inference.operators.CoercionMode;
+import dr.inference.operators.OperatorFailedException;
+import jebl.math.Random;
+
+/**
+ * Created by max on 1/11/16.
+ */
+public class LoadingsHamiltonianMC extends AbstractHamiltonianMCOperator {
+    private LatentFactorModel lfm;
+    private MomentDistributionModel prior;
+    private MatrixParameterInterface factors;
+    private MatrixParameterInterface loadings;
+    private MatrixParameterInterface Precision;
+    private int nfac;
+    private int ntaxa;
+    private int ntraits;
+    private double stepSize;
+    private int nSteps;
+
+
+    public LoadingsHamiltonianMC(LatentFactorModel lfm, MomentDistributionModel prior, double weight, CoercionMode mode, double stepSize, int nSteps, double momentumSd, MatrixParameterInterface loadings){
+        super(mode , momentumSd);
+        setWeight(weight);
+        this.lfm = lfm;
+        this.prior = prior;
+        this.factors = lfm.getFactors();
+        this.loadings = loadings;
+        this.Precision = lfm.getColumnPrecision();
+        nfac = lfm.getFactorDimension();
+        ntaxa = lfm.getFactors().getColumnDimension();
+        ntraits = Precision.getRowDimension();
+        this.stepSize = stepSize;
+        this.nSteps = nSteps;
+    }
+
+    @Override
+    public double getCoercableParameter() {
+        return 0;
+    }
+
+    @Override
+    public void setCoercableParameter(double value) {
+
+    }
+
+    @Override
+    public double getRawParameter() {
+        return 0;
+    }
+
+    @Override
+    public String getPerformanceSuggestion() {
+        return null;
+    }
+
+    @Override
+    public String getOperatorName() {
+        return "LoadingsHamiltonianMC";
+    }
+
+    @Override
+    public double doOperation() throws OperatorFailedException {
+
+
+        double[][] derivative = getGradient();
+        drawMomentum(lfm.getFactorDimension() * ntraits);
+        double functionalStepSize = stepSize;
+
+        double prop=0;
+        for (int i = 0; i < momentum.length ; i++) {
+            prop += momentum[i] * momentum[i] / (2 * getMomentumSd() * getMomentumSd());
+        }
+
+        for (int i = 0; i < lfm.getFactorDimension() ; i++) {
+            for (int j = 0; j < ntraits ; j++) {
+                momentum[i * ntraits + j] = momentum[i * ntraits + j] - functionalStepSize / 2 * derivative[j][i];
+            }
+
+        }
+
+        for (int i = 0; i <nSteps ; i++) {
+            for (int j = 0; j <lfm.getFactorDimension() ; j++) {
+                for (int k = 0; k <ntraits ; k++) {
+                    loadings.setParameterValueQuietly(k, j, loadings.getParameterValue(k, j) + functionalStepSize * momentum[j * ntraits + k]);                }
+
+            }
+            loadings.fireParameterChangedEvent(-1, Parameter.ChangeType.ALL_VALUES_CHANGED);
+
+
+            if(i != nSteps){
+                derivative = getGradient();
+
+                for (int j = 0; j < lfm.getFactorDimension() ; j++) {
+                    for (int k = 0; k <ntraits ; k++) {
+                        momentum[j * ntraits + k] = momentum[j * ntraits + k] - functionalStepSize * derivative[k][j];
+                    }
+
+                }
+            }
+        }
+
+        derivative=getGradient();
+        for (int i = 0; i < lfm.getFactorDimension() ; i++) {
+            for (int j = 0; j < ntraits ; j++) {
+                momentum[i * ntraits + j] = momentum[i * ntraits + j ] - functionalStepSize / 2 * derivative[j][i];
+            }
+
+        }
+        double res=0;
+        for (int i = 0; i <momentum.length ; i++) {
+            res+=momentum[i] * momentum[i] / (2 * getMomentumSd() * getMomentumSd());
+        }
+        return prop - res;
+    }
+
+
+
+    private double[][] getLFMDerivative(){
+        double[] residual=lfm.getResidual();
+        double[][] answer= new double[ntraits][lfm.getFactorDimension()];
+        for (int i = 0; i < ntaxa; i++) {
+            for (int j = 0; j < ntraits; j++) {
+                for (int k = 0; k < lfm.getFactorDimension() ; k++) {
+                    answer[j][k] -= residual[i * ntaxa + j] * factors.getParameterValue(k , i);
+                }
+            }
+
+        }
+        for (int i = 0; i < ntraits ; i++) {
+            for (int j = 0; j < lfm.getFactorDimension() ; j++) {
+                answer[i][j] *= Precision.getParameterValue(i , i);
+            }
+
+        }
+        return answer;
+    }
+
+    private double[][] getGradient(){
+        double[][] answer = getLFMDerivative();
+        for (int i = 0; i < loadings.getRowDimension(); i++) {
+            for (int j = 0; j < loadings.getColumnDimension(); j++) {
+                answer[i][j] += 2 / loadings.getParameterValue(i, j) + (loadings.getParameterValue(i, j) - prior.getMean()[0]) / prior.getScaleMatrix()[0][0];
+            }
+
+        }
+        return answer;
+    }
+
+}
diff --git a/src/dr/evomodel/operators/SubtreeLeapOperator.java b/src/dr/evomodel/operators/SubtreeLeapOperator.java
index 0c5b311..f6bb04d 100644
--- a/src/dr/evomodel/operators/SubtreeLeapOperator.java
+++ b/src/dr/evomodel/operators/SubtreeLeapOperator.java
@@ -53,6 +53,7 @@ import java.util.Map;
 public class SubtreeLeapOperator extends AbstractTreeOperator implements CoercableMCMCOperator {
 
     private double size = 1.0;
+    private double accP = 0.234;
 
     private final TreeModel tree;
     private final CoercionMode mode;
@@ -65,10 +66,11 @@ public class SubtreeLeapOperator extends AbstractTreeOperator implements Coercab
      * @param size   scaling on a unit Gaussian to draw the patristic distance from
      * @param mode   coercion mode
      */
-    public SubtreeLeapOperator(TreeModel tree, double weight, double size, CoercionMode mode) {
+    public SubtreeLeapOperator(TreeModel tree, double weight, double size, double accP, CoercionMode mode) {
         this.tree = tree;
         setWeight(weight);
         this.size = size;
+        this.accP = accP;
         this.mode = mode;
     }
 
@@ -296,7 +298,7 @@ public class SubtreeLeapOperator extends AbstractTreeOperator implements Coercab
     }
 
     public double getTargetAcceptanceProbability() {
-        return 0.234;
+        return accP;
     }
 
 
diff --git a/src/dr/evomodel/speciation/AlloppSpeciesNetworkModel.java b/src/dr/evomodel/speciation/AlloppSpeciesNetworkModel.java
index bcd5256..a022e6c 100644
--- a/src/dr/evomodel/speciation/AlloppSpeciesNetworkModel.java
+++ b/src/dr/evomodel/speciation/AlloppSpeciesNetworkModel.java
@@ -44,6 +44,7 @@ import dr.math.MathUtils;
 import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
+import dr.util.CommonCitations;
 import jebl.util.FixedBitSet;
 import java.util.*;
 import java.util.logging.Logger;
@@ -277,39 +278,33 @@ public class AlloppSpeciesNetworkModel extends AbstractModel implements
     /***********************************************************************************/
 
 
-    // Citable implementation
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SPECIES_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Allopolyploid Species Networks";
+    }
+
     @Override
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("Graham", "Jones"),
-                        new Author("Serik", "Sagitov"),
-                        new Author("Bengt", "Oxelman")
-                },
-                "Statistical Inference of Allopolyploid Species Networks in the Presence of Incomplete Lineage Sorting",
-                2013,
-                "Systematic Biology",
-                62,
-                467,
-                478,
-                Citation.Status.PUBLISHED
-        ));
-
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("Graham", "Jones")
-                },
-                "Bayesian phylogenetic analysis for diploid and allotetraploid species networks",
-                2013,
-                "",
-                -1,
-                -1,
-                -1,
-                Citation.Status.IN_PREPARATION
-        ));
-
-        return citations;
+        return Collections.singletonList(
+                new Citation(
+                        new Author[]{
+                                new Author("Graham", "Jones"),
+                                new Author("Serik", "Sagitov"),
+                                new Author("Bengt", "Oxelman")
+                        },
+                        "Statistical Inference of Allopolyploid Species Networks in the Presence of Incomplete Lineage Sorting",
+                        2013,
+                        "Systematic Biology",
+                        62,
+                        467,
+                        478,
+                        Citation.Status.PUBLISHED
+                ));
     }
 
 
diff --git a/src/dr/evomodel/speciation/BirthDeathCollapseModel.java b/src/dr/evomodel/speciation/BirthDeathCollapseModel.java
index 8dd5bdd..eb205a7 100644
--- a/src/dr/evomodel/speciation/BirthDeathCollapseModel.java
+++ b/src/dr/evomodel/speciation/BirthDeathCollapseModel.java
@@ -59,9 +59,7 @@ import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
+import java.util.*;
 import java.util.logging.Logger;
 
 
@@ -175,23 +173,30 @@ public class BirthDeathCollapseModel extends SpeciationModel implements Citable
     }
 
     @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SPECIES_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "DISSECT species delimitation model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-
-        citations.add(new Citation(
-                new Author[]{
-                        new Author("Graham", "Jones"),
-                        new Author("Bengt", "Oxelman")
-                },
-                "DISSECT: an assignment-free Bayesian discovery method for species delimitation under the multispecies coalescent",
-                2014,
-                "BIORXIV/2014/003178",
-                -1,
-                -1,
-                -1,
-                Citation.Status.IN_SUBMISSION
-        ));
-
-        return citations;
+        return Collections.singletonList(
+                new Citation(
+                        new Author[]{
+                                new Author("Graham", "Jones"),
+                                new Author("Bengt", "Oxelman")
+                        },
+                        "DISSECT: an assignment-free Bayesian discovery method for species delimitation under the multispecies coalescent",
+                        2014,
+                        "BIORXIV/2014/003178",
+                        -1,
+                        -1,
+                        -1,
+                        Citation.Status.IN_SUBMISSION
+                ));
     }
 }
diff --git a/src/dr/evomodel/speciation/BirthDeathGernhard08Model.java b/src/dr/evomodel/speciation/BirthDeathGernhard08Model.java
index d82c5fc..c806580 100644
--- a/src/dr/evomodel/speciation/BirthDeathGernhard08Model.java
+++ b/src/dr/evomodel/speciation/BirthDeathGernhard08Model.java
@@ -29,6 +29,14 @@ import dr.evolution.tree.NodeRef;
 import dr.evolution.tree.Tree;
 import dr.evomodelxml.speciation.BirthDeathModelParser;
 import dr.inference.model.Parameter;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+import dr.util.CommonCitations;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
 
 import static org.apache.commons.math.special.Gamma.logGamma;
 
@@ -49,7 +57,7 @@ import static org.apache.commons.math.special.Gamma.logGamma;
  * @author Joseph Heled
  *         Date: 24/02/2008
  */
-public class BirthDeathGernhard08Model extends UltrametricSpeciationModel {
+public class BirthDeathGernhard08Model extends UltrametricSpeciationModel implements Citable {
 
     public enum TreeType {
         UNSCALED,     // no coefficient 
@@ -71,7 +79,7 @@ public class BirthDeathGernhard08Model extends UltrametricSpeciationModel {
      *    lambda - mu
      */
     private Parameter birthDiffRateParameter;
-   
+
     private Parameter sampleProbability;
 
     private TreeType type;
@@ -106,8 +114,8 @@ public class BirthDeathGernhard08Model extends UltrametricSpeciationModel {
 
         this.relativeDeathRateParameter = relativeDeathRateParameter;
         if( relativeDeathRateParameter != null ) {
-          addVariable(relativeDeathRateParameter);
-          relativeDeathRateParameter.addBounds(new Parameter.DefaultBounds(1.0, 0.0, 1));
+            addVariable(relativeDeathRateParameter);
+            relativeDeathRateParameter.addBounds(new Parameter.DefaultBounds(1.0, 0.0, 1));
         }
 
         this.sampleProbability = sampleProbability;
@@ -132,8 +140,8 @@ public class BirthDeathGernhard08Model extends UltrametricSpeciationModel {
 
     @Override
     public double getMarginal(Tree tree, CalibrationPoints calibration) {
-       // Yule only
-       return calibration.getCorrection(tree, getR());
+        // Yule only
+        return calibration.getCorrection(tree, getR());
     }
 
     public double getR() {
@@ -198,9 +206,9 @@ public class BirthDeathGernhard08Model extends UltrametricSpeciationModel {
                 final double ca = 1 - a;
                 final double emrh = Math.exp(-mrh);
                 if( emrh != 1.0 ) {
-                  l = (tree.getTaxonCount() - 2) * Math.log(r * ca * (1 + ca /(emrh - 1)));
+                    l = (tree.getTaxonCount() - 2) * Math.log(r * ca * (1 + ca /(emrh - 1)));
                 } else {  // use exp(x)-1 = x for x near 0
-                  l = (tree.getTaxonCount() - 2) * Math.log(ca * (r + ca/height));
+                    l = (tree.getTaxonCount() - 2) * Math.log(ca * (r + ca/height));
                 }
             }
             return l;
@@ -210,4 +218,30 @@ public class BirthDeathGernhard08Model extends UltrametricSpeciationModel {
     public boolean includeExternalNodesInLikelihoodCalculation() {
         return false;
     }
-}
\ No newline at end of file
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Gernhard 2008 Birth Death Tree Model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(new Citation(
+                new Author[]{
+                        new Author("T", "Gernhard"),
+                },
+                "The conditioned reconstructed process",
+                2008,
+                "Journal of Theoretical Biology",
+                253,
+                769, 778,
+                "10.1016/j.jtbi.2008.04.005"
+        ));
+    }
+}
+
diff --git a/src/dr/evomodel/speciation/BirthDeathSerialSamplingModel.java b/src/dr/evomodel/speciation/BirthDeathSerialSamplingModel.java
index d0a45d2..409a5c1 100644
--- a/src/dr/evomodel/speciation/BirthDeathSerialSamplingModel.java
+++ b/src/dr/evomodel/speciation/BirthDeathSerialSamplingModel.java
@@ -30,7 +30,12 @@ import dr.evolution.tree.Tree;
 import dr.evolution.util.Taxon;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 
+import java.util.Collections;
+import java.util.List;
 import java.util.Set;
 
 /**
@@ -38,7 +43,7 @@ import java.util.Set;
  *
  * @author Alexei Drummond
  */
-public class BirthDeathSerialSamplingModel extends MaskableSpeciationModel {
+public class BirthDeathSerialSamplingModel extends MaskableSpeciationModel implements Citable {
 
     // R0
     Variable<Double> R0;
@@ -374,4 +379,29 @@ public class BirthDeathSerialSamplingModel extends MaskableSpeciationModel {
 
     // if a mask exists then use the mask's parameters instead (except for origin and finalTimeInterval)
     BirthDeathSerialSamplingModel mask = null;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Gernhard 2008 Birth Death Tree Model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(new Citation(
+                new Author[]{
+                        new Author("T", "Gernhard"),
+                },
+                "The conditioned reconstructed process",
+                2008,
+                "Journal of Theoretical Biology",
+                253,
+                769, 778,
+                "10.1016/j.jtbi.2008.04.005"
+        ));
+    }
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/speciation/MulSpeciesTreeModel.java b/src/dr/evomodel/speciation/MulSpeciesTreeModel.java
index 988f15f..25578a9 100644
--- a/src/dr/evomodel/speciation/MulSpeciesTreeModel.java
+++ b/src/dr/evomodel/speciation/MulSpeciesTreeModel.java
@@ -53,7 +53,7 @@ import java.util.logging.Logger;
 
 /**
  * Multiply labelled species tree which includes demographic function per branch.
- * 
+ *
  * @author Joseph Heled, Graham Jones
  *         Date: 20/12/2011
  */
@@ -93,11 +93,11 @@ public class MulSpeciesTreeModel extends AbstractModel implements MutableTree, C
     private final boolean bmp;
     private final boolean nonConstRootPopulation;
     private final boolean constantPopulation;
-    
+
     // grj
-	public final static boolean DBUGTUNE = false;
+    public final static boolean DBUGTUNE = false;
+
 
-    
 
     private class NodeProperties {
         private final int speciesIndex;
@@ -112,8 +112,8 @@ public class MulSpeciesTreeModel extends AbstractModel implements MutableTree, C
     }
 
     public MulSpeciesTreeModel(MulSpeciesBindings species, Parameter sppSplitPopulations,
-                            Parameter coalPointsPops, Parameter coalPointsIndicator, Tree startTree,
-                            boolean bmp, boolean nonConstRootPopulation, boolean constantPopulation) {
+                               Parameter coalPointsPops, Parameter coalPointsIndicator, Tree startTree,
+                               boolean bmp, boolean nonConstRootPopulation, boolean constantPopulation) {
         super(MulSpeciesTreeModelParser.MUL_SPECIES_TREE);
 
         this.mulspb = species;
@@ -202,97 +202,97 @@ public class MulSpeciesTreeModel extends AbstractModel implements MutableTree, C
                 }
             }
         }
-        
+
         Logger.getLogger("dr.evomodel.speciation.allopolyploid").info("\tConstructing a multiply labelled tree, please cite:\n"
                 + Citable.Utils.getCitationString(this));
 
     }
 
-    
-    
+
+
     // grj
     private String nodeAsText(NodeRef node, int indentlen) {
-		StringBuilder s = new StringBuilder();
-		Formatter formatter = new Formatter(s, Locale.US);
-		if (spTree.isExternal(node)) {
-			formatter.format("%s ", spTree.getNodeTaxon(node));
-		} else {
-			formatter.format("%s ", "+");
-		}
-		while (s.length() < 20-indentlen) {
-			formatter.format("%s", " "); 
-		}
-		formatter.format("%s ", AlloppMisc.nonnegIn8Chars(spTree.getNodeHeight(node)));
+        StringBuilder s = new StringBuilder();
+        Formatter formatter = new Formatter(s, Locale.US);
+        if (spTree.isExternal(node)) {
+            formatter.format("%s ", spTree.getNodeTaxon(node));
+        } else {
+            formatter.format("%s ", "+");
+        }
+        while (s.length() < 20-indentlen) {
+            formatter.format("%s", " ");
+        }
+        formatter.format("%s ", AlloppMisc.nonnegIn8Chars(spTree.getNodeHeight(node)));
         // it would be nice to disply popsizes and nlineages like allopp mul tree
-		return s.toString();
-	}
-    
-    
-  
+        return s.toString();
+    }
+
+
+
+    // grj
+    private String subtreeAsText(NodeRef node, String s, Stack<Integer> x, int depth, String b) {
+        Integer[] y = x.toArray(new Integer[x.size()]);
+        StringBuffer indent = new StringBuffer();
+        for (int i = 0; i < depth; i++) {
+            indent.append("  ");
+        }
+        for (int i = 0; i < y.length; i++) {
+            indent.replace(2*y[i], 2*y[i]+1, "|");
+        }
+        if (b.length() > 0) {
+            indent.replace(indent.length()-b.length(), indent.length(), b);
+        }
+        s += indent;
+        s += nodeAsText(node, indent.length());
+        s += System.getProperty("line.separator");
+        String subs = "";
+        if (!spTree.isExternal(node)) {
+            x.push(depth);
+            subs += subtreeAsText(spTree.getChild(node, 0), "", x, depth+1, "-");
+            x.pop();
+            subs += subtreeAsText(spTree.getChild(node, 1), "", x, depth+1, "`-");
+        }
+        return s + subs;
+    }
+
+
+
     // grj
-	private String subtreeAsText(NodeRef node, String s, Stack<Integer> x, int depth, String b) {
-		Integer[] y = x.toArray(new Integer[x.size()]);
-		StringBuffer indent = new StringBuffer();
-		for (int i = 0; i < depth; i++) {
-			indent.append("  ");
-		}
-		for (int i = 0; i < y.length; i++) {
-			indent.replace(2*y[i], 2*y[i]+1, "|");
-		}
-		if (b.length() > 0) {
-			indent.replace(indent.length()-b.length(), indent.length(), b);
-		}
-		s += indent;
-		s += nodeAsText(node, indent.length());
-		s += System.getProperty("line.separator");
-		String subs = "";
-		if (!spTree.isExternal(node)) {
-			x.push(depth);
-			subs += subtreeAsText(spTree.getChild(node, 0), "", x, depth+1, "-");
-			x.pop();
-			subs += subtreeAsText(spTree.getChild(node, 1), "", x, depth+1, "`-");
-		}
-		return s + subs;
-	}
-    
-    
-    
-	// grj
-	public String asText() {
-		String header = "topology             height" + System.getProperty("line.separator");
-
-		String s = "";
-		Stack<Integer> x = new Stack<Integer>();
-		return header + subtreeAsText(spTree.getRoot(), s, x, 0, "");
-	}
-    
-    
-	public String toString() {
-		int ngt = mulspb.numberOfGeneTrees();
-		String nl = System.getProperty("line.separator");
-		String s = nl + asText() + nl;
-		for (int g = 0; g < ngt; g++) {
-			s += "Gene tree " + g + nl;
-			s += mulspb.genetreeAsText(g) + nl;
-			s += mulspb.seqassignsAsText(g) + nl;
-		}
-		s += nl;
-		return s;
-	}
-    
-
-	
-	// grj
-	public LogColumn[] getColumns() {
-		LogColumn[] columns = new LogColumn[1];
-		columns[0] = new LogColumn.Default("    MUL-tree and gene trees", this);
-		return columns;
-	}
-    
-    
-	
-	
-    
+    public String asText() {
+        String header = "topology             height" + System.getProperty("line.separator");
+
+        String s = "";
+        Stack<Integer> x = new Stack<Integer>();
+        return header + subtreeAsText(spTree.getRoot(), s, x, 0, "");
+    }
+
+
+    public String toString() {
+        int ngt = mulspb.numberOfGeneTrees();
+        String nl = System.getProperty("line.separator");
+        String s = nl + asText() + nl;
+        for (int g = 0; g < ngt; g++) {
+            s += "Gene tree " + g + nl;
+            s += mulspb.genetreeAsText(g) + nl;
+            s += mulspb.seqassignsAsText(g) + nl;
+        }
+        s += nl;
+        return s;
+    }
+
+
+
+    // grj
+    public LogColumn[] getColumns() {
+        LogColumn[] columns = new LogColumn[1];
+        columns[0] = new LogColumn.Default("    MUL-tree and gene trees", this);
+        return columns;
+    }
+
+
+
+
+
     public boolean constPopulation() {
         return constantPopulation;
     }
@@ -306,8 +306,8 @@ public class MulSpeciesTreeModel extends AbstractModel implements MutableTree, C
         }
         return isSubtreeCompatible(getRoot(), geneTreeInfo.getCoalInfo(), 0) >= 0;
     }
-    
-    
+
+
 
     // Not very efficient, should do something better, based on traversing the cList once
     private int isSubtreeCompatible(NodeRef node, MulSpeciesBindings.CoalInfo[] cList, int loc) {
@@ -1538,16 +1538,24 @@ public class MulSpeciesTreeModel extends AbstractModel implements MutableTree, C
         return new Parameter.Default(dim, value);
     }
 
-	
-	public List<Citation> getCitations() {
-		List<Citation> citations = new ArrayList<Citation>();
-		citations.add(new Citation(
-				new Author[]{
-						new Author("GR", "Jones")
-				},
-				Citation.Status.IN_PREPARATION
-		));
-		return citations;
-	}
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SPECIES_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Multiply labelled species tree";
+    }
+
+    public List<Citation> getCitations() {
+        return Arrays.asList(
+                new Citation(
+                        new Author[]{
+                                new Author("GR", "Jones")
+                        },
+                        Citation.Status.IN_PREPARATION
+                ));
+    }
 
 }
diff --git a/src/dr/evomodel/speciation/PopsIOSpeciesTreeModel.java b/src/dr/evomodel/speciation/PopsIOSpeciesTreeModel.java
index 247301f..b193549 100644
--- a/src/dr/evomodel/speciation/PopsIOSpeciesTreeModel.java
+++ b/src/dr/evomodel/speciation/PopsIOSpeciesTreeModel.java
@@ -333,11 +333,19 @@ public class PopsIOSpeciesTreeModel extends AbstractModel implements SlidableTre
                 + Citable.Utils.getCitationString(this));
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SPECIES_MODELS;
+    }
 
+    @Override
+    public String getDescription() {
+        return "Multi-species coalescent model";
+    }
 
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(new Citation(
+        return Arrays.asList(
+                new Citation(
                 new Author[]{
                         new Author("Graham", "Jones")
                 },
@@ -345,7 +353,6 @@ public class PopsIOSpeciesTreeModel extends AbstractModel implements SlidableTre
                 "??",  // journal
                 Citation.Status.IN_PREPARATION
         ));
-        return citations;
     }
 
 
diff --git a/src/dr/evomodel/speciation/SpeciesTreeModel.java b/src/dr/evomodel/speciation/SpeciesTreeModel.java
index 5fd7c0d..22392ad 100644
--- a/src/dr/evomodel/speciation/SpeciesTreeModel.java
+++ b/src/dr/evomodel/speciation/SpeciesTreeModel.java
@@ -40,6 +40,9 @@ import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.inference.operators.OperatorFailedException;
 import dr.inference.operators.Scalable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 import dr.util.HeapSort;
 import jebl.util.FixedBitSet;
 
@@ -51,7 +54,8 @@ import java.util.*;
  * @author Joseph Heled
  *         Date: 24/05/2008
  */
-public class SpeciesTreeModel extends AbstractModel implements MutableTree, TreeTraitProvider, TreeLogger.LogUpon, Scalable {
+public class SpeciesTreeModel extends AbstractModel implements
+        MutableTree, TreeTraitProvider, TreeLogger.LogUpon, Scalable, Citable {
     private final SimpleTree spTree;
     private final SpeciesBindings species;
     private final Map<NodeRef, NodeProperties> props = new HashMap<NodeRef, NodeProperties>();
@@ -907,7 +911,7 @@ public class SpeciesTreeModel extends AbstractModel implements MutableTree, Tree
             final double treeHeight = tree.getRootHeight();
             if (treeHeight <= 0) {
                 tree.setRootHeight(1.0);
-                Utils.correctHeightsForTips(tree);
+                MutableTree.Utils.correctHeightsForTips(tree);
                 SimpleTree.Utils.scaleNodeHeights(tree, rootHeight / tree.getRootHeight());
             }
 
@@ -1420,4 +1424,29 @@ public class SpeciesTreeModel extends AbstractModel implements MutableTree, Tree
         return new Parameter.Default(dim, value);
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SPECIES_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "StarBEAST multi-locus species tree inference";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(new Citation(
+                new Author[]{
+                        new Author("J", "Heled"),
+                        new Author("AJ", "Drummond"),
+                },
+                "Bayesian Inference of Species Trees from Multilocus Data",
+                2010,
+                "Mol Biol Evol",
+                27, 570, 580,
+                "10.1093/molbev/msp274"
+        ));
+    }
+
 }
diff --git a/src/dr/evomodel/substmodel/Blosum62.java b/src/dr/evomodel/substmodel/Blosum62.java
index 29228ef..6e38411 100644
--- a/src/dr/evomodel/substmodel/Blosum62.java
+++ b/src/dr/evomodel/substmodel/Blosum62.java
@@ -29,8 +29,7 @@ import dr.evolution.datatype.AminoAcids;
 import dr.util.Author;
 import dr.util.Citation;
 
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * BLOSUM62 model of amino acid evolution
@@ -188,11 +187,22 @@ public class Blosum62 extends EmpiricalRateMatrix.AbstractAminoAcid {
 		setEmpiricalFrequencies(f, "ARNDCQEGHILKMFPSTWYV");
 	}
 
-    public List<Citation> getCitations() {
-        return Arrays.asList(CITATION);
-    }
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.SUBSTITUTION_MODELS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "Blosum62 amino acid substitution model";
+	}
+
+	@Override
+	public List<Citation> getCitations() {
+		return Collections.singletonList(CITATION);
+	}
 
-    public static Citation CITATION = new Citation(
+	public static Citation CITATION = new Citation(
             new Author[]{
                     new Author("S", "Henikoff"),
                     new Author("JG", "Henikoff")
diff --git a/src/dr/evomodel/substmodel/CPREV.java b/src/dr/evomodel/substmodel/CPREV.java
index a59633d..fe06fa2 100644
--- a/src/dr/evomodel/substmodel/CPREV.java
+++ b/src/dr/evomodel/substmodel/CPREV.java
@@ -29,8 +29,7 @@ import dr.evolution.datatype.AminoAcids;
 import dr.util.Author;
 import dr.util.Citation;
 
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * CPREV 45 model of amino acid evolution
@@ -202,9 +201,20 @@ public class CPREV extends EmpiricalRateMatrix.AbstractAminoAcid {
 		setEmpiricalFrequencies(f, "ARNDCQEGHILKMFPSTWYV");
 	}
 
-    public List<Citation> getCitations() {
-        return Arrays.asList(CITATION);
-    }
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.SUBSTITUTION_MODELS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "CPREV amino acid substitution model";
+	}
+
+	@Override
+	public List<Citation> getCitations() {
+		return Collections.singletonList(CITATION);
+	}
 
     public static Citation CITATION = new Citation(
             new Author[]{
diff --git a/src/dr/evomodel/substmodel/ComplexSubstitutionModel.java b/src/dr/evomodel/substmodel/ComplexSubstitutionModel.java
index 60dde65..6e27b13 100644
--- a/src/dr/evomodel/substmodel/ComplexSubstitutionModel.java
+++ b/src/dr/evomodel/substmodel/ComplexSubstitutionModel.java
@@ -38,10 +38,11 @@ import dr.inference.model.*;
 import dr.math.matrixAlgebra.Matrix;
 import dr.math.matrixAlgebra.RobustEigenDecomposition;
 import dr.math.matrixAlgebra.RobustSingularValueDecomposition;
+import dr.util.Citable;
+import dr.util.Citation;
+import dr.util.CommonCitations;
 
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
+import java.util.*;
 
 /**
  * <b>A general irreversible class for any
@@ -50,7 +51,7 @@ import java.util.Set;
  * @author Marc Suchard
  */
 
-public class ComplexSubstitutionModel extends AbstractSubstitutionModel implements Likelihood {
+public class ComplexSubstitutionModel extends AbstractSubstitutionModel implements Likelihood, Citable {
 
     public ComplexSubstitutionModel(String name, DataType dataType,
                                     FrequencyModel rootFreqModel, Parameter parameter) {
@@ -582,4 +583,19 @@ public class ComplexSubstitutionModel extends AbstractSubstitutionModel implemen
 
     private double[] probability = null;
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Complex-diagonalizable, irreversible substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CommonCitations.EDWARDS_2011_ANCIENT);
+    }
+
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/substmodel/Dayhoff.java b/src/dr/evomodel/substmodel/Dayhoff.java
index ca4d7e3..bfdd31a 100644
--- a/src/dr/evomodel/substmodel/Dayhoff.java
+++ b/src/dr/evomodel/substmodel/Dayhoff.java
@@ -29,8 +29,7 @@ import dr.evolution.datatype.AminoAcids;
 import dr.util.Author;
 import dr.util.Citation;
 
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * Dayhoff model for amino acid evolution
@@ -191,11 +190,22 @@ public class Dayhoff extends EmpiricalRateMatrix.AbstractAminoAcid {
 		setEmpiricalFrequencies(f, "ARNDCQEGHILKMFPSTWYV");
 	}
 
-    public List<Citation> getCitations() {
-        return Arrays.asList(CITATION);
-    }
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.SUBSTITUTION_MODELS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "Dayhoff amino acid substitution model";
+	}
+
+	@Override
+	public List<Citation> getCitations() {
+		return Collections.singletonList(CITATION);
+	}
 
-    public static Citation CITATION = new Citation(
+	public static Citation CITATION = new Citation(
             new Author[]{
                     new Author("MO", "Dayhoff"),
                     new Author("RM", "Schwartz"),
diff --git a/src/dr/evomodel/substmodel/FLU.java b/src/dr/evomodel/substmodel/FLU.java
index e26a0db..0094a67 100644
--- a/src/dr/evomodel/substmodel/FLU.java
+++ b/src/dr/evomodel/substmodel/FLU.java
@@ -30,9 +30,7 @@ import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
 
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
+import java.util.*;
 
 /**
  * FLU model of amino acid evolution (add reference).
@@ -55,7 +53,7 @@ public class FLU extends EmpiricalRateMatrix.AbstractAminoAcid {
     // but the AminoAcids dataType wants them in this order:
     // ACDEFGHIKLMNPQRSTVWY
     // This is solved by calling the setEmpiricalRates and setEmpiricalFrequencies methods
-    
+
     private FLU() { super("FLU");
 
         int n = AminoAcids.INSTANCE.getStateCount();
@@ -199,8 +197,19 @@ public class FLU extends EmpiricalRateMatrix.AbstractAminoAcid {
         setEmpiricalFrequencies(f, "ARNDCQEGHILKMFPSTWYV");
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "FLU amino acid substitution model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
-        return Arrays.asList(CITATION);
+        return Collections.singletonList(CITATION);
     }
 
     public static Citation CITATION = new Citation(
@@ -211,10 +220,6 @@ public class FLU extends EmpiricalRateMatrix.AbstractAminoAcid {
                     new Author("VS", "Le")
             },
             "FLU, an amino acid substitution model for influenza proteins",
-            2010,
-            "BMC Evolutionary Biology",
-            10,
-            99, -1,
-            Citation.Status.PUBLISHED
+            2010, "BMC Evolutionary Biology", 10, 99, -1
     );
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/substmodel/GLMSubstitutionModel.java b/src/dr/evomodel/substmodel/GLMSubstitutionModel.java
index c533f91..b1968ef 100644
--- a/src/dr/evomodel/substmodel/GLMSubstitutionModel.java
+++ b/src/dr/evomodel/substmodel/GLMSubstitutionModel.java
@@ -30,6 +30,11 @@ import dr.inference.distribution.LogLinearModel;
 import dr.inference.loggers.LogColumn;
 import dr.inference.model.BayesianStochasticSearchVariableSelection;
 import dr.inference.model.Model;
+import dr.util.Citation;
+import dr.util.CommonCitations;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * <b>A irreversible class for any data type where
@@ -75,7 +80,18 @@ public class GLMSubstitutionModel extends ComplexSubstitutionModel {
             return 0;
         }
         return Double.NEGATIVE_INFINITY;
-    }   
+    }
+
+    @Override
+    public String getDescription() {
+        return "Generalized linear (model, GLM) substitution model"; // TODO Horrible; fix
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+
+        return Collections.singletonList(CommonCitations.LEMEY_2014_UNIFYING);
+    }
 
     private LogLinearModel glm;
     private double[] testProbabilities;    
diff --git a/src/dr/evomodel/substmodel/GTR.java b/src/dr/evomodel/substmodel/GTR.java
index 9e8a9e4..6e8cf76 100644
--- a/src/dr/evomodel/substmodel/GTR.java
+++ b/src/dr/evomodel/substmodel/GTR.java
@@ -28,6 +28,12 @@ package dr.evomodel.substmodel;
 import dr.evomodelxml.substmodel.GTRParser;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * General Time Reversible model of nucleotide evolution
@@ -38,7 +44,7 @@ import dr.inference.model.Variable;
  * @author Alexei Drummond
  * @version $Id: GTR.java,v 1.19 2005/05/24 20:25:58 rambaut Exp $
  */
-public class GTR extends AbstractNucleotideModel {
+public class GTR extends AbstractNucleotideModel implements Citable {
 
     private Variable<Double> rateACValue = null;
     private Variable<Double> rateAGValue = null;
@@ -172,4 +178,30 @@ public class GTR extends AbstractNucleotideModel {
         return buffer.toString();
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "HKY nucleotide substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("S", "Tavaré")
+            },
+            "Some probabilistic and statistical problems in the analysis of DNA sequences.",
+            1985,
+            "In: Miura R. M., editor. Lectures on mathematics in the life sciences.",
+            17, 57, 86
+    );
+
 }
diff --git a/src/dr/evomodel/substmodel/HKY.java b/src/dr/evomodel/substmodel/HKY.java
index 0f40232..52f5524 100644
--- a/src/dr/evomodel/substmodel/HKY.java
+++ b/src/dr/evomodel/substmodel/HKY.java
@@ -28,6 +28,14 @@ package dr.evomodel.substmodel;
 import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
 
 
 /**
@@ -37,7 +45,7 @@ import dr.inference.model.Variable;
  * @author Andrew Rambaut
  * @version $Id: HKY.java,v 1.42 2005/09/23 13:17:59 rambaut Exp $
  */
-public class HKY extends AbstractNucleotideModel {
+public class HKY extends AbstractNucleotideModel implements Citable {
 
     /**
      * tsTv
@@ -375,4 +383,34 @@ public class HKY extends AbstractNucleotideModel {
         }
 
     };
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "HKY nucleotide substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("M", "Hasegowa"),
+                    new Author("H", "Kishino"),
+                    new Author("T", "Yano")
+            },
+            "Dating the human-ape splitting by a molecular clock of mitochondrial DNA",
+            1985,
+            "J. Mol. Evol.",
+            22,
+            160, 174,
+            Citation.Status.PUBLISHED
+    );
 }
diff --git a/src/dr/evomodel/substmodel/JTT.java b/src/dr/evomodel/substmodel/JTT.java
index dac27c0..086067b 100644
--- a/src/dr/evomodel/substmodel/JTT.java
+++ b/src/dr/evomodel/substmodel/JTT.java
@@ -29,8 +29,7 @@ import dr.evolution.datatype.AminoAcids;
 import dr.util.Author;
 import dr.util.Citation;
 
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * JTT model for amino acid evolution
@@ -189,9 +188,21 @@ public class JTT extends EmpiricalRateMatrix.AbstractAminoAcid {
 		setEmpiricalFrequencies(f, "ARNDCQEGHILKMFPSTWYV");
 	}
 
-    public List<Citation> getCitations() {
-        return Arrays.asList(CITATION);
-    }
+
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.SUBSTITUTION_MODELS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "JTT amino acid substitution model";
+	}
+
+	@Override
+	public List<Citation> getCitations() {
+		return Collections.singletonList(CITATION);
+	}
 
     public static Citation CITATION = new Citation(
             new Author[]{
diff --git a/src/dr/evomodel/substmodel/LG.java b/src/dr/evomodel/substmodel/LG.java
index e5432c0..38ac876 100644
--- a/src/dr/evomodel/substmodel/LG.java
+++ b/src/dr/evomodel/substmodel/LG.java
@@ -29,8 +29,7 @@ import dr.evolution.datatype.AminoAcids;
 import dr.util.Author;
 import dr.util.Citation;
 
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * LG model of amino acid evolution (Le and Gascuel, 2008)
@@ -310,11 +309,23 @@ public class LG extends EmpiricalRateMatrix.AbstractAminoAcid {
 		setEmpiricalFrequencies(f, "ARNDCQEGHILKMFPSTWYV");
 	}
 
-    public List<Citation> getCitations() {
-        return Arrays.asList(CITATION);
-    }
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.SUBSTITUTION_MODELS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "LG amino acid substitution model";
+	}
+
+	@Override
+	public List<Citation> getCitations() {
+		return Collections.singletonList(CITATION);
+	}
+
 
-    public static Citation CITATION = new Citation(
+	public static Citation CITATION = new Citation(
             new Author[]{
                     new Author("S. Q.", "Le"),
                     new Author("O.", "Gascuel")
diff --git a/src/dr/evomodel/substmodel/MTREV.java b/src/dr/evomodel/substmodel/MTREV.java
index 5f1269a..63f9d6d 100644
--- a/src/dr/evomodel/substmodel/MTREV.java
+++ b/src/dr/evomodel/substmodel/MTREV.java
@@ -29,8 +29,7 @@ import dr.evolution.datatype.AminoAcids;
 import dr.util.Author;
 import dr.util.Citation;
 
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * MTREV24 model of amino acid evolution
@@ -187,20 +186,27 @@ public class MTREV extends EmpiricalRateMatrix.AbstractAminoAcid {
 		setEmpiricalFrequencies(f, "ARNDCQEGHILKMFPSTWYV");
 	}
 
-    public List<Citation> getCitations() {
-        return Arrays.asList(CITATION);
-    }
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.SUBSTITUTION_MODELS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "MTREV amino acid substitution model";
+	}
+
+	@Override
+	public List<Citation> getCitations() {
+		return Collections.singletonList(CITATION);
+	}
 
-    public static Citation CITATION = new Citation(
+	public static Citation CITATION = new Citation(
             new Author[]{
                     new Author("J", "Adachi"),
                     new Author("M", "Hasegawa")
             },
             "Model of amino acid substitution in proteins encoded by mitochondrial DNA",
-            1996,
-            "J Mol Evol",
-            42,
-            459, 468,
-            Citation.Status.PUBLISHED
+            1996, "J Mol Evol", 42, 459, 468
     );
 }
diff --git a/src/dr/evomodel/substmodel/MarginalVarianceStatistic.java b/src/dr/evomodel/substmodel/MarginalVarianceStatistic.java
index ae1a292..39b8686 100644
--- a/src/dr/evomodel/substmodel/MarginalVarianceStatistic.java
+++ b/src/dr/evomodel/substmodel/MarginalVarianceStatistic.java
@@ -25,7 +25,7 @@
 
 package dr.evomodel.substmodel;
 
-import dr.evomodel.substmodel.MultivariateOUModel;
+import dr.inference.distribution.MultivariateOUModel;
 import dr.xml.*;
 import dr.inference.model.Statistic;
 
diff --git a/src/dr/evomodel/substmodel/SVSGeneralSubstitutionModel.java b/src/dr/evomodel/substmodel/SVSGeneralSubstitutionModel.java
index 895d6ae..351e89e 100644
--- a/src/dr/evomodel/substmodel/SVSGeneralSubstitutionModel.java
+++ b/src/dr/evomodel/substmodel/SVSGeneralSubstitutionModel.java
@@ -29,10 +29,11 @@ import dr.evolution.datatype.*;
 import dr.inference.loggers.LogColumn;
 import dr.inference.loggers.NumberColumn;
 import dr.inference.model.*;
+import dr.util.Citable;
+import dr.util.Citation;
+import dr.util.CommonCitations;
 
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
+import java.util.*;
 
 /**
  * <b>A general model of sequence substitution with stochastic variable selection</b>. A general reversible class for any
@@ -43,7 +44,7 @@ import java.util.Set;
  */
 
 public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implements Likelihood,
-        BayesianStochasticSearchVariableSelection {
+        BayesianStochasticSearchVariableSelection, Citable {
 
 
     public SVSGeneralSubstitutionModel(DataType dataType, FrequencyModel freqModel, Parameter parameter,
@@ -67,7 +68,7 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
     }
 
     public boolean validState() {
-        return !updateMatrix || Utils.connectedAndWellConditioned(probability,this);
+        return !updateMatrix || BayesianStochasticSearchVariableSelection.Utils.connectedAndWellConditioned(probability,this);
     }
 
     protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
@@ -92,7 +93,7 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
      */
     public double getLogLikelihood() {
         if (updateMatrix) {
-            if (!Utils.connectedAndWellConditioned(probability,this)) {
+            if (!BayesianStochasticSearchVariableSelection.Utils.connectedAndWellConditioned(probability,this)) {
                 return Double.NEGATIVE_INFINITY;
             }
         }
@@ -187,4 +188,19 @@ public class SVSGeneralSubstitutionModel extends GeneralSubstitutionModel implem
     private boolean isUsed = false;
 
     private Parameter rateIndicator;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Stochastic search variable selection, reversible substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CommonCitations.LEMEY_2009_BAYESIAN);
+    }
 }
diff --git a/src/dr/evomodel/substmodel/TN93.java b/src/dr/evomodel/substmodel/TN93.java
index 7eba5af..774692e 100644
--- a/src/dr/evomodel/substmodel/TN93.java
+++ b/src/dr/evomodel/substmodel/TN93.java
@@ -28,6 +28,12 @@ package dr.evomodel.substmodel;
 import dr.evomodelxml.substmodel.TN93Parser;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Collections;
+import java.util.List;
 
 /**
  * Tamura and Nei model of nucleotide evolution.
@@ -49,7 +55,7 @@ import dr.inference.model.Variable;
  *
  * @author Joseph Heled
  */
-public class TN93 extends AbstractNucleotideModel {
+public class TN93 extends AbstractNucleotideModel implements Citable {
 
     private Variable<Double> kappa1Variable = null;
     private Variable<Double> kappa2Variable = null;
@@ -331,4 +337,30 @@ public class TN93 extends AbstractNucleotideModel {
 
         return buffer.toString();
     }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Tamura-Nei nucleotide substitution model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CITATION);
+    }
+
+    public static Citation CITATION = new Citation(
+            new Author[]{
+                    new Author("K", "Tamura"),
+                    new Author("M", "Nei")
+            },
+            "Estimation of the number of nucleotide substitutions in the control region of mitochondrial DNA in humans and chimpanzees",
+            1993,
+            "Mol Biol Evol",
+            10, 512, 526
+    );
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/substmodel/WAG.java b/src/dr/evomodel/substmodel/WAG.java
index 948af3d..7c365fa 100644
--- a/src/dr/evomodel/substmodel/WAG.java
+++ b/src/dr/evomodel/substmodel/WAG.java
@@ -29,8 +29,7 @@ import dr.evolution.datatype.AminoAcids;
 import dr.util.Author;
 import dr.util.Citation;
 
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
 
 /**
  * WAG model of amino acid evolution (S. Whelan and N. Goldman 2000)
@@ -202,11 +201,22 @@ public class WAG extends EmpiricalRateMatrix.AbstractAminoAcid {
 		setEmpiricalFrequencies(f, "ARNDCQEGHILKMFPSTWYV");
 	}
 
-    public List<Citation> getCitations() {
-        return Arrays.asList(CITATION);
-    }
+	@Override
+	public Citation.Category getCategory() {
+		return Citation.Category.SUBSTITUTION_MODELS;
+	}
+
+	@Override
+	public String getDescription() {
+		return "WAG amino acid substitution model";
+	}
+
+	@Override
+	public List<Citation> getCitations() {
+		return Collections.singletonList(CITATION);
+	}
 
-    public static Citation CITATION = new Citation(
+	public static Citation CITATION = new Citation(
             new Author[]{
                     new Author("S", "Whelan"),
                     new Author("N", "Goldman")
diff --git a/src/dr/inference/distribution/ConditionalCladeProbability.java b/src/dr/evomodel/tree/ConditionalCladeProbability.java
similarity index 94%
rename from src/dr/inference/distribution/ConditionalCladeProbability.java
rename to src/dr/evomodel/tree/ConditionalCladeProbability.java
index b4f924d..c99e864 100644
--- a/src/dr/inference/distribution/ConditionalCladeProbability.java
+++ b/src/dr/evomodel/tree/ConditionalCladeProbability.java
@@ -1,7 +1,7 @@
 /*
  * ConditionalCladeProbability.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -23,7 +23,7 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.inference.distribution;
+package dr.evomodel.tree;
 
 import dr.evolution.tree.SimpleTree;
 import dr.evomodel.tree.ConditionalCladeFrequency;
diff --git a/src/dr/evomodel/tree/ProgressiveScalarTreeTransform.java b/src/dr/evomodel/tree/ProgressiveScalarTreeTransform.java
index 7deb92a..8fa99e4 100644
--- a/src/dr/evomodel/tree/ProgressiveScalarTreeTransform.java
+++ b/src/dr/evomodel/tree/ProgressiveScalarTreeTransform.java
@@ -33,6 +33,13 @@ import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inference.model.Variable;
 import dr.util.Citable;
+import dr.util.Citation;
+import dr.util.CommonCitations;
+
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * @author Marc A. Suchard
diff --git a/src/dr/evomodel/tree/TerminalBranchStatistic.java b/src/dr/evomodel/tree/TerminalBranchStatistic.java
new file mode 100644
index 0000000..a0a99fe
--- /dev/null
+++ b/src/dr/evomodel/tree/TerminalBranchStatistic.java
@@ -0,0 +1,73 @@
+/*
+ * terminalBranchStatistic.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodel.tree;
+
+import dr.evolution.tree.NodeRef;
+import dr.evolution.tree.Tree;
+import dr.inference.model.Statistic;
+
+/**
+ * A statistic that reports the branch lengths of external branches in the tree
+ *
+ * @author Luiz Carvalho
+ */
+public class TerminalBranchStatistic extends Statistic.Abstract implements TreeStatistic {
+
+    public TerminalBranchStatistic(String name, Tree tree) {
+        super(name);
+        this.tree = tree;
+    }
+
+    public void setTree(Tree tree) {
+        this.tree = tree;
+    }
+
+    public Tree getTree() {
+        return tree;
+    }
+
+    public int getDimension() {
+        return tree.getExternalNodeCount();
+    }
+
+    /**
+     * @returns external branch lengths
+     */
+    public double getStatisticValue(int dim) {
+        final double[] terminalBranches = new double [tree.getExternalNodeCount()] ;
+        int k = 0;
+        for (int i = 0; i < tree.getNodeCount(); i++) {
+            NodeRef node = tree.getNode(i);
+            if(tree.isExternal(node)){
+            	terminalBranches[k] = tree.getBranchLength(node) ;
+            	k = k + 1 ;
+            }
+        }
+        return terminalBranches[dim];
+    }
+    
+    private Tree tree = null;
+}
\ No newline at end of file
diff --git a/src/dr/evomodel/tree/TransformedTreeModel.java b/src/dr/evomodel/tree/TransformedTreeModel.java
index dcf8b01..ce8e0b6 100644
--- a/src/dr/evomodel/tree/TransformedTreeModel.java
+++ b/src/dr/evomodel/tree/TransformedTreeModel.java
@@ -34,13 +34,12 @@ import dr.evolution.util.Taxon;
 import dr.inference.model.AbstractModel;
 import dr.inference.model.Model;
 import dr.inference.model.Variable;
+import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
 import dr.util.CommonCitations;
 
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
+import java.util.*;
 import java.util.logging.Logger;
 
 /**
@@ -59,8 +58,6 @@ public class TransformedTreeModel extends AbstractModel implements MultivariateT
 
         Logger log = Logger.getLogger("dr.evomodel.tree");
         log.info("Creating a transform tree.");
-        log.info(treeTransform.getInfo() + "\n\tPlease cite:");
-        log.info(Citable.Utils.getCitationString(this));
     }
 
     public String toString() {
@@ -192,14 +189,6 @@ public class TransformedTreeModel extends AbstractModel implements MultivariateT
         // Do nothing
     }
 
-    public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                CommonCitations.LEMEY_MIXTURE_2012
-        );
-        return citations;
-    }
-
     private final TreeTransform treeTransform;
     private final TreeModel treeModel;
 
@@ -330,4 +319,19 @@ public class TransformedTreeModel extends AbstractModel implements MultivariateT
     public void setUnits(Type units) {
         treeModel.setUnits(units);
     }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Bayesian estimation of Pagel's lambda";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Collections.singletonList(CommonCitations.VRANCKEN_2015_SIMULTANEOUSLY);
+    }
 }
diff --git a/src/dr/evomodel/tree/TreeModel.java b/src/dr/evomodel/tree/TreeModel.java
index ba9d756..d2dd81e 100644
--- a/src/dr/evomodel/tree/TreeModel.java
+++ b/src/dr/evomodel/tree/TreeModel.java
@@ -30,6 +30,9 @@ import dr.evolution.util.MutableTaxonListListener;
 import dr.evolution.util.Taxon;
 import dr.inference.model.*;
 import dr.util.Attributable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
@@ -42,7 +45,7 @@ import java.util.*;
  * @author Alexei Drummond
  * @version $Id: TreeModel.java,v 1.129 2006/01/05 17:55:47 rambaut Exp $
  */
-public class TreeModel extends AbstractModel implements MultivariateTraitTree {
+public class TreeModel extends AbstractModel implements MultivariateTraitTree, Citable {
 
     //
     // Public stuff
@@ -1022,6 +1025,8 @@ public class TreeModel extends AbstractModel implements MultivariateTraitTree {
             throw new RuntimeException("only leaves can be used with getLeafHeightParameter");
         }
 
+        isTipDateSampled = true;
+
         return nodes[node.getNumber()].heightParameter;
     }
 
@@ -1488,4 +1493,53 @@ public class TreeModel extends AbstractModel implements MultivariateTraitTree {
 
     private boolean hasRates = false;
     private boolean hasTraits = false;
+    private boolean isTipDateSampled = false;
+
+    public boolean isTipDateSampled() {
+        return isTipDateSampled;
+    }
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TREE_PRIORS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Sampling tip dates model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        if (isTipDateSampled()) {
+            return Arrays.asList(new Citation(
+                            new Author[]{
+                                    new Author("B", "Shapiro"),
+                                    new Author("SYW", "Ho"),
+                                    new Author("AJ", "Drummond"),
+                                    new Author("MA", "Suchard"),
+                                    new Author("OG", "Pybus"),
+                                    new Author("A", "Rambaut"),
+                            },
+                            "A Bayesian phylogenetic method to estimate unknown sequence ages",
+                            2010,
+                            "Mol Biol Evol",
+                            28,
+                            879, 887,
+                            "10.1093/molbev/msq262"
+                    ),
+                    new Citation(
+                            new Author[]{
+                                    new Author("AJ", "Drummond"),
+                            },
+                            "PhD Thesis",
+                            2002,
+                            "University of Auckland",
+                            ""
+                    ));
+        } else {
+            return  Collections.EMPTY_LIST;
+        }
+    }
+
 }
diff --git a/src/dr/evomodel/tree/TreeTransform.java b/src/dr/evomodel/tree/TreeTransform.java
index 33236bf..319b29f 100644
--- a/src/dr/evomodel/tree/TreeTransform.java
+++ b/src/dr/evomodel/tree/TreeTransform.java
@@ -35,9 +35,9 @@ import dr.inference.model.Variable;
 import dr.util.Author;
 import dr.util.Citable;
 import dr.util.Citation;
+import dr.util.CommonCitations;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 
 /**
  * @author Marc A. Suchard
@@ -105,17 +105,21 @@ public abstract class TreeTransform extends AbstractModel implements TreeTraitPr
 
     private final Helper treeTraits = new Helper();
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Branch-specific phenotypic mixture model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                new Citation(
-                        new Author[]{
-                                new Author("P", "Lemey"),
-                                new Author("MA", "Suchard"),
-                        },
-                        Citation.Status.IN_PREPARATION
-                ));
-        return citations;
+        return Collections.singletonList(CommonCitations.VRANCKEN_2015_SIMULTANEOUSLY);
     }
+
+
 }
 
diff --git a/src/dr/evomodel/treelikelihood/HypermutantErrorModel.java b/src/dr/evomodel/treelikelihood/HypermutantErrorModel.java
index c41acde..044758b 100644
--- a/src/dr/evomodel/treelikelihood/HypermutantErrorModel.java
+++ b/src/dr/evomodel/treelikelihood/HypermutantErrorModel.java
@@ -30,9 +30,14 @@ import dr.evolution.datatype.Nucleotides;
 import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
 import dr.inference.model.Variable;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 import dr.xml.*;
 
+import java.util.Arrays;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
 
@@ -40,7 +45,7 @@ import java.util.logging.Logger;
  * @author Andrew Rambaut
  * @version $Id$
  */
-public class HypermutantErrorModel extends TipStatesModel {
+public class HypermutantErrorModel extends TipStatesModel implements Citable {
 
     public static final String HYPERMUTANT_ERROR_MODEL = "hypermutantErrorModel";
     public static final String HYPERMUTATION_RATE = "hypermutationRate";
@@ -304,4 +309,41 @@ public class HypermutantErrorModel extends TipStatesModel {
     private final Parameter hypermutationRateParameter;
     private final Parameter hypermutationIndicatorParameter;
     private final boolean unlinkedRates;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Sequence error model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Arrays.asList(new Citation(
+                        new Author[]{
+                                new Author("A", "Rambaut"),
+                                new Author("SYW", "Ho"),
+                                new Author("AJ", "Drummond"),
+                                new Author("B", "Shapiro"),
+                        },
+                        "Accommodating the effect of ancient DNA damage on inferences of demographic histories",
+                        2008,
+                        "Mol Biol Evol",
+                        26,
+                        245, 248,
+                        "10.1093/molbev/msn256"
+                ),
+                new Citation(
+                        new Author[]{
+                                new Author("J", "Felsenstein"),
+                        },
+                        "Inferring Phylogenies",
+                        2004,
+                        "Sinauer Associates",
+                        ""
+                ));
+    }
 }
\ No newline at end of file
diff --git a/src/dr/evomodel/treelikelihood/SequenceErrorModel.java b/src/dr/evomodel/treelikelihood/SequenceErrorModel.java
index d9af398..2eee124 100644
--- a/src/dr/evomodel/treelikelihood/SequenceErrorModel.java
+++ b/src/dr/evomodel/treelikelihood/SequenceErrorModel.java
@@ -30,6 +30,13 @@ import dr.evolution.util.TaxonList;
 import dr.evomodelxml.treelikelihood.SequenceErrorModelParser;
 import dr.inference.model.Parameter;
 import dr.inference.model.Statistic;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
 
 /**
  * This class incorporates uncertainty in the state at the tips of the tree and can
@@ -42,7 +49,7 @@ import dr.inference.model.Statistic;
  * @author Andrew Rambaut
  * @version $Id$
  */
-public class SequenceErrorModel extends TipStatesModel {
+public class SequenceErrorModel extends TipStatesModel implements Citable {
     public enum ErrorType {
         TYPE_1_TRANSITIONS("type1Transitions"),
         TYPE_2_TRANSITIONS("type2Transitions"),
@@ -248,4 +255,41 @@ public class SequenceErrorModel extends TipStatesModel {
     private final Parameter baseErrorRateParameter;
     private final Parameter ageRelatedErrorRateParameter;
     private final Parameter indicatorParameter;
+
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.SUBSTITUTION_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Sequence error model";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+            return Arrays.asList(new Citation(
+                            new Author[]{
+                                    new Author("A", "Rambaut"),
+                                    new Author("SYW", "Ho"),
+                                    new Author("AJ", "Drummond"),
+                                    new Author("B", "Shapiro"),
+                            },
+                            "Accommodating the effect of ancient DNA damage on inferences of demographic histories",
+                            2008,
+                            "Mol Biol Evol",
+                            26,
+                            245, 248,
+                            "10.1093/molbev/msn256"
+                    ),
+                    new Citation(
+                            new Author[]{
+                                    new Author("J", "Felsenstein"),
+                            },
+                            "Inferring Phylogenies",
+                            2004,
+                            "Sinauer Associates",
+                            ""
+                    ));
+    }
 }
\ No newline at end of file
diff --git a/src/dr/evomodelxml/MSSD/CTMCScalePriorParser.java b/src/dr/evomodelxml/MSSD/CTMCScalePriorParser.java
index a942371..cb3d75a 100644
--- a/src/dr/evomodelxml/MSSD/CTMCScalePriorParser.java
+++ b/src/dr/evomodelxml/MSSD/CTMCScalePriorParser.java
@@ -54,9 +54,7 @@ public class CTMCScalePriorParser extends AbstractXMLObjectParser {
         boolean trial = xo.getAttribute(TRIAL, false);
         SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
 
-        Logger.getLogger("dr.evolution").info("\n ---------------------------------\nCreating ctmcScalePrior model.");
-        Logger.getLogger("dr.evolution").info("\tIf you publish results using this prior, please reference:");
-        Logger.getLogger("dr.evolution").info("\t\t 1. Ferreira and Suchard (2008) for the conditional reference prior on CTMC scale parameter prior;");
+        Logger.getLogger("dr.evolution").info("Creating CTMC Scale Reference Prior model.");
 
         return new CTMCScalePrior(MODEL_NAME, ctmcScale, treeModel, reciprocal, substitutionModel, trial);
     }
diff --git a/src/dr/evomodelxml/TreeWorkingPriorParsers.java b/src/dr/evomodelxml/TreeWorkingPriorParsers.java
index 35ddee8..c1d6c6a 100644
--- a/src/dr/evomodelxml/TreeWorkingPriorParsers.java
+++ b/src/dr/evomodelxml/TreeWorkingPriorParsers.java
@@ -44,7 +44,7 @@ import dr.evomodel.coalescent.ExponentialProductSufficientStatisticsLikelihood;
 import dr.evomodel.coalescent.GammaProductLikelihood;
 import dr.evomodel.tree.ConditionalCladeFrequency;
 import dr.evomodel.tree.TreeModel;
-import dr.inference.distribution.ConditionalCladeProbability;
+import dr.evomodel.tree.ConditionalCladeProbability;
 import dr.inference.distribution.MultivariateDistributionLikelihood;
 import dr.inference.model.ConstantLikelihood;
 import dr.inference.model.Likelihood;
diff --git a/src/dr/evomodelxml/branchratemodel/LocalClockModelParser.java b/src/dr/evomodelxml/branchratemodel/LocalClockModelParser.java
index bb5b93d..7cb4597 100644
--- a/src/dr/evomodelxml/branchratemodel/LocalClockModelParser.java
+++ b/src/dr/evomodelxml/branchratemodel/LocalClockModelParser.java
@@ -76,14 +76,12 @@ public class LocalClockModelParser extends AbstractXMLObjectParser {
                         throw new XMLParseException("A local clock for a clade must be defined by at least two taxa");
                     }
 
-                    boolean includeStem = false;
                     boolean excludeClade = false;
                     double stemProportion = 0.0;
 
                     if (xoc.hasAttribute(INCLUDE_STEM)) {
-                        includeStem = xoc.getBooleanAttribute(INCLUDE_STEM);
                         // if includeStem=true then assume it is the whole stem
-                        stemProportion = 1.0;
+                        stemProportion = xoc.getBooleanAttribute(INCLUDE_STEM) ? 1.0 : 0.0;
                     }
 
                     if (xoc.hasAttribute(STEM_PROPORTION)) {
diff --git a/src/dr/evomodelxml/coalescent/GMRFSkyrideLikelihoodParser.java b/src/dr/evomodelxml/coalescent/GMRFSkyrideLikelihoodParser.java
index 44c36e3..1707bcf 100644
--- a/src/dr/evomodelxml/coalescent/GMRFSkyrideLikelihoodParser.java
+++ b/src/dr/evomodelxml/coalescent/GMRFSkyrideLikelihoodParser.java
@@ -52,6 +52,7 @@ public class GMRFSkyrideLikelihoodParser extends AbstractXMLObjectParser {
     public static final String POPULATION_TREE = "populationTree";
     public static final String LAMBDA_PARAMETER = "lambdaParameter";
     public static final String BETA_PARAMETER = "betaParameter";
+    public static final String SINGLE_BETA = "singleBeta";
     public static final String COVARIATE_MATRIX = "covariateMatrix";
     public static final String RANDOMIZE_TREE = "randomizeTree";
     public static final String TIME_AWARE_SMOOTHING = "timeAwareSmoothing";
@@ -63,6 +64,10 @@ public class GMRFSkyrideLikelihoodParser extends AbstractXMLObjectParser {
     public static final String CUT_OFF = "cutOff";
     public static final String PHI_PARAMETER = "phiParameter";
     public static final String PLOIDY = "ploidy";
+    public static final String COVARIATES = "covariates";
+    public static final String COLUMN_MAJOR = "columnMajor";
+    public static final String LAST_OBSERVED_INDEX = "lastObservedIndex";
+    public static final String COV_PREC_PARAM = "covariatePrecision";
 
 
     public String getParserName() {
@@ -109,13 +114,13 @@ public class GMRFSkyrideLikelihoodParser extends AbstractXMLObjectParser {
         } else {
             lambda = new Parameter.Default(1.0);
         }
-        /*
+
         Parameter gridPoints = null;
         if (xo.getChild(GRID_POINTS) != null) {
             cxo = xo.getChild(GRID_POINTS);
             gridPoints = (Parameter) cxo.getChild(Parameter.class);
         }
-        */
+
         Parameter numGridPoints = null;
         if (xo.getChild(NUM_GRID_POINTS) != null) {
             cxo = xo.getChild(NUM_GRID_POINTS);
@@ -134,6 +139,16 @@ public class GMRFSkyrideLikelihoodParser extends AbstractXMLObjectParser {
             phi = (Parameter) cxo.getChild(Parameter.class);
         }
 
+        List<Parameter> lastObservedIndex = null;
+        if (xo.hasChildNamed(LAST_OBSERVED_INDEX)) {
+            lastObservedIndex = new ArrayList<Parameter>();
+            cxo = xo.getChild(LAST_OBSERVED_INDEX);
+            final int numObsInd = cxo.getChildCount();
+
+            for(int i=0; i< numObsInd; ++i) {
+                lastObservedIndex.add((Parameter) cxo.getChild(i));
+            }
+        }
 
         Parameter ploidyFactors = null;
         if (xo.getChild(PLOIDY) != null) {
@@ -146,10 +161,19 @@ public class GMRFSkyrideLikelihoodParser extends AbstractXMLObjectParser {
             }
         }
 
-        Parameter beta = null;
+        Parameter betaParameter = null;
+        if (xo.hasChildNamed(SINGLE_BETA)) {
+            betaParameter = (Parameter) xo.getElementFirstChild(SINGLE_BETA);
+        }
+
+        List<Parameter> betaList = null;
         if (xo.getChild(BETA_PARAMETER) != null) {
+            betaList = new ArrayList<Parameter>();
             cxo = xo.getChild(BETA_PARAMETER);
-            beta = (Parameter) cxo.getChild(Parameter.class);
+            final int numBeta = cxo.getChildCount();
+            for (int i = 0; i < numBeta; ++i) {
+                betaList.add((Parameter) cxo.getChild(i));
+            }
         }
 
         MatrixParameter dMatrix = null;
@@ -163,16 +187,43 @@ public class GMRFSkyrideLikelihoodParser extends AbstractXMLObjectParser {
             timeAwareSmoothing = xo.getBooleanAttribute(TIME_AWARE_SMOOTHING);
         }
 
-        if ((dMatrix != null && beta == null) || (dMatrix == null && beta != null))
-            throw new XMLParseException("Must specify both a set of regression coefficients and a design matrix.");
+       // if ((dMatrix != null && beta == null) || (dMatrix == null && beta != null))
+       //     throw new XMLParseException("Must specify both a set of regression coefficients and a design matrix.");
 
         if (dMatrix != null) {
             if (dMatrix.getRowDimension() != popParameter.getDimension())
                 throw new XMLParseException("Design matrix row dimension must equal the population parameter length.");
-            if (dMatrix.getColumnDimension() != beta.getDimension())
+            if (dMatrix.getColumnDimension() != betaParameter.getDimension())
                 throw new XMLParseException("Design matrix column dimension must equal the regression coefficient length.");
         }
 
+        List<Parameter> covPrecParam = null;
+        if (xo.hasChildNamed(COV_PREC_PARAM)){
+            covPrecParam = new ArrayList<Parameter>();
+            cxo = xo.getChild(COV_PREC_PARAM);
+            final int numCovPrec = cxo.getChildCount();
+
+            for(int i=0; i < numCovPrec; ++i){
+                covPrecParam.add((Parameter) cxo.getChild(i));
+            }
+        }
+
+        List<MatrixParameter> covariates = null;
+        if (xo.hasChildNamed(COVARIATES)){
+            covariates = new ArrayList<MatrixParameter>();
+            cxo = xo.getChild(COVARIATES);
+            final int numCov = cxo.getChildCount();
+
+            for (int i = 0; i < numCov; ++i) {
+                covariates.add((MatrixParameter) cxo.getChild(i));
+            }
+        }
+
+        if ((covariates != null && betaList == null) ||
+                (covariates == null &&  betaList != null))
+             throw new XMLParseException("Must specify both a set of regression coefficients and a design matrix.");
+
+
         if (xo.getAttribute(RANDOMIZE_TREE, false)) {
             for (Tree tree : treeList) {
                 if (tree instanceof TreeModel) {
@@ -191,13 +242,19 @@ public class GMRFSkyrideLikelihoodParser extends AbstractXMLObjectParser {
         if (xo.getAttribute(OLD_SKYRIDE, true) && xo.getName().compareTo(SKYGRID_LIKELIHOOD) != 0) {
 
             return new GMRFSkyrideLikelihood(treeList, popParameter, groupParameter, precParameter,
-                    lambda, beta, dMatrix, timeAwareSmoothing, rescaleByRootHeight);
+                    lambda, betaParameter, dMatrix, timeAwareSmoothing, rescaleByRootHeight);
 
         } else {
-
-            return new GMRFMultilocusSkyrideLikelihood(treeList, popParameter, groupParameter, precParameter,
-                    lambda, beta, dMatrix, timeAwareSmoothing, cutOff.getParameterValue(0), (int) numGridPoints.getParameterValue(0), phi, ploidyFactors);
-
+            if(xo.getChild(GRID_POINTS) != null){
+                System.err.println("A");
+
+                return new GMRFMultilocusSkyrideLikelihood(treeList, popParameter, groupParameter, precParameter,
+                        lambda, betaParameter, dMatrix, timeAwareSmoothing, gridPoints, covariates, ploidyFactors,
+                        lastObservedIndex, covPrecParam, betaList);
+            }else {
+                return new GMRFMultilocusSkyrideLikelihood(treeList, popParameter, groupParameter, precParameter,
+                        lambda, betaParameter, dMatrix, timeAwareSmoothing, cutOff.getParameterValue(0), (int) numGridPoints.getParameterValue(0), phi, ploidyFactors);
+            }
         }
     }
 
@@ -233,6 +290,9 @@ public class GMRFSkyrideLikelihoodParser extends AbstractXMLObjectParser {
             new ElementRule(GROUP_SIZES, new XMLSyntaxRule[]{
                     new ElementRule(Parameter.class)
             }, true),
+            new ElementRule(SINGLE_BETA, new XMLSyntaxRule[] {
+                    new ElementRule(Parameter.class),
+            }, true),
             AttributeRule.newBooleanRule(RESCALE_BY_ROOT_ISSUE, true),
             AttributeRule.newBooleanRule(RANDOMIZE_TREE, true),
             AttributeRule.newBooleanRule(TIME_AWARE_SMOOTHING, true),
diff --git a/src/dr/evomodelxml/operators/LoadingsHamiltonianMCParser.java b/src/dr/evomodelxml/operators/LoadingsHamiltonianMCParser.java
new file mode 100644
index 0000000..fde61ef
--- /dev/null
+++ b/src/dr/evomodelxml/operators/LoadingsHamiltonianMCParser.java
@@ -0,0 +1,65 @@
+package dr.evomodelxml.operators;
+
+import dr.evomodel.operators.LoadingsHamiltonianMC;
+import dr.inference.distribution.MomentDistributionModel;
+import dr.inference.model.LatentFactorModel;
+import dr.inference.model.MatrixParameter;
+import dr.inference.operators.CoercionMode;
+import dr.xml.*;
+
+/**
+ * Created by max on 1/11/16.
+ */
+public class LoadingsHamiltonianMCParser extends AbstractXMLObjectParser {
+    public static final String LOADINGS_HAMILTONIAN_MC="loadingsHamiltonianMC";
+    public static final String WEIGHT="weight";
+    public static final String STEP_SIZE="stepSize";
+    public static final String N_STEPS="nSteps";
+    public static final String MOMENTUM_SD="momentumSd";
+
+    @Override
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        LatentFactorModel lfm=(LatentFactorModel) xo.getChild(LatentFactorModel.class);
+        MomentDistributionModel prior=(MomentDistributionModel) xo.getChild(MomentDistributionModel.class);
+        double weight=xo.getDoubleAttribute(WEIGHT);
+        CoercionMode mode=CoercionMode.parseMode(xo);
+        int nSteps=xo.getIntegerAttribute(N_STEPS);
+        double stepSize=xo.getDoubleAttribute(STEP_SIZE);
+        double momentumSd= xo.getDoubleAttribute(MOMENTUM_SD);
+        MatrixParameter loadings=(MatrixParameter) xo.getChild(MatrixParameter.class);
+
+        return new LoadingsHamiltonianMC(lfm, prior, weight, mode, stepSize, nSteps, momentumSd, loadings);
+    }
+
+    @Override
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private static final XMLSyntaxRule[] rules = {
+            AttributeRule.newDoubleRule(WEIGHT),
+            AttributeRule.newDoubleRule(STEP_SIZE),
+            AttributeRule.newIntegerRule(N_STEPS),
+            AttributeRule.newDoubleRule(MOMENTUM_SD),
+            new ElementRule(LatentFactorModel.class),
+            new ElementRule(MomentDistributionModel.class),
+            new ElementRule(MatrixParameter.class),
+    };
+
+
+    @Override
+    public String getParserDescription() {
+        return "Hamiltonian Monte Carlo for loadings matrix in a latent factor model";
+    }
+
+    @Override
+    public Class getReturnType() {
+        return LoadingsHamiltonianMC.class;
+    }
+
+    @Override
+    public String getParserName() {
+        return LOADINGS_HAMILTONIAN_MC;
+    }
+}
diff --git a/src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java b/src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java
index 2a19270..d22f1ab 100644
--- a/src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java
+++ b/src/dr/evomodelxml/operators/SubtreeLeapOperatorParser.java
@@ -50,13 +50,16 @@ public class SubtreeLeapOperatorParser extends AbstractXMLObjectParser {
         final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
 
         final double size = xo.getAttribute("size", 1.0);
+        final double prob = xo.getAttribute("accP", 0.234);
 
         if (Double.isInfinite(size) || size <= 0.0) {
-            throw new XMLParseException("size attribute must be positive and not infinite. was " + size +
-           " for tree " + treeModel.getId() );
+            throw new XMLParseException("size attribute must be positive and not infinite. was " + size);
         }
-
-        SubtreeLeapOperator operator = new SubtreeLeapOperator(treeModel, weight, size, mode);
+        
+        if (prob <= 0.0 || prob >= 1.0) {
+            throw new XMLParseException("Target acceptance probability has to lie in (0, 1). Currently: " + prob);
+        }
+        SubtreeLeapOperator operator = new SubtreeLeapOperator(treeModel, weight, size, prob, mode);
 
         return operator;
     }
@@ -76,6 +79,7 @@ public class SubtreeLeapOperatorParser extends AbstractXMLObjectParser {
     private final XMLSyntaxRule[] rules = {
             AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
             AttributeRule.newDoubleRule("size", true),
+            AttributeRule.newDoubleRule("accP", true),
             AttributeRule.newBooleanRule(CoercableMCMCOperator.AUTO_OPTIMIZE, true),
             new ElementRule(TreeModel.class)
     };
diff --git a/src/dr/evomodelxml/substmodel/MultivariateOUModelParser.java b/src/dr/evomodelxml/substmodel/MultivariateOUModelParser.java
index ecfbe02..f6a2efa 100644
--- a/src/dr/evomodelxml/substmodel/MultivariateOUModelParser.java
+++ b/src/dr/evomodelxml/substmodel/MultivariateOUModelParser.java
@@ -25,7 +25,7 @@
 
 package dr.evomodelxml.substmodel;
 
-import dr.evomodel.substmodel.MultivariateOUModel;
+import dr.inference.distribution.MultivariateOUModel;
 import dr.evomodel.substmodel.SubstitutionModel;
 import dr.inference.distribution.GeneralizedLinearModel;
 import dr.inference.model.DesignMatrix;
diff --git a/src/dr/evomodelxml/tree/MonophylyStatisticParser.java b/src/dr/evomodelxml/tree/MonophylyStatisticParser.java
index b201987..62bf18d 100644
--- a/src/dr/evomodelxml/tree/MonophylyStatisticParser.java
+++ b/src/dr/evomodelxml/tree/MonophylyStatisticParser.java
@@ -40,6 +40,7 @@ public class MonophylyStatisticParser extends AbstractXMLObjectParser {
     public static final String MONOPHYLY_STATISTIC = "monophylyStatistic";
     public static final String MRCA = "mrca";
     public static final String IGNORE = "ignore";
+    public static final String INVERSE = "inverse";
 
     public String getParserName() {
         return MONOPHYLY_STATISTIC;
@@ -49,6 +50,8 @@ public class MonophylyStatisticParser extends AbstractXMLObjectParser {
 
         String name = xo.getAttribute(Statistic.NAME, xo.getId());
 
+        Boolean inverse = xo.getAttribute(INVERSE, false);
+
         Tree tree = (Tree) xo.getChild(Tree.class);
 
         XMLObject cxo = xo.getChild(MRCA);
@@ -81,7 +84,7 @@ public class MonophylyStatisticParser extends AbstractXMLObjectParser {
         }
 
         try {
-            return new MonophylyStatistic(name, tree, taxa, ignore);
+            return new MonophylyStatistic(name, tree, taxa, ignore, inverse);
         } catch (Tree.MissingTaxonException mte) {
             throw new XMLParseException("Taxon, " + mte + ", in " + getParserName() + "was not found in the tree.");
         }
@@ -105,6 +108,7 @@ public class MonophylyStatisticParser extends AbstractXMLObjectParser {
 
     private final XMLSyntaxRule[] rules = {
             new StringAttributeRule(Statistic.NAME, "A name for this statistic for the purpose of logging", true),
+            AttributeRule.newBooleanRule(INVERSE, true, "inverse, returns 0/false when monophyletic and 1/true when not monophyletic"),
             // Any tree will do, no need to insist on a Tree Model
             new ElementRule(Tree.class),
             new ElementRule(MRCA, new XMLSyntaxRule[]{
diff --git a/src/dr/evomodelxml/tree/TerminalBranchStatisticParser.java b/src/dr/evomodelxml/tree/TerminalBranchStatisticParser.java
new file mode 100644
index 0000000..e5fc957
--- /dev/null
+++ b/src/dr/evomodelxml/tree/TerminalBranchStatisticParser.java
@@ -0,0 +1,73 @@
+/*
+ * terminalBranchStatisticParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evomodelxml.tree;
+
+import dr.evolution.tree.Tree;
+import dr.evomodel.tree.TerminalBranchStatistic;
+import dr.evomodel.tree.TreeModel;
+import dr.inference.model.Statistic;
+import dr.xml.*;
+
+/**
+ */
+public class TerminalBranchStatisticParser extends AbstractXMLObjectParser {
+
+    public static final String EXTERNAL_BRANCHES_STATISTIC = "TerminalBranchStatistic";
+
+        public String getParserName() {
+            return EXTERNAL_BRANCHES_STATISTIC;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            String name = xo.getAttribute(Statistic.NAME, xo.getId());
+            Tree tree = (Tree) xo.getChild(Tree.class);
+
+            return new TerminalBranchStatistic(name, tree);
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "A statistic that returns the length of external (terminal) branches";
+        }
+
+        public Class getReturnType() {
+            return TerminalBranchStatistic.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newStringRule(Statistic.NAME, true),
+                new ElementRule(TreeModel.class),
+        };
+
+}
diff --git a/src/dr/evoxml/NewickParser.java b/src/dr/evoxml/NewickParser.java
index 14ed53f..ce1b83a 100644
--- a/src/dr/evoxml/NewickParser.java
+++ b/src/dr/evoxml/NewickParser.java
@@ -127,14 +127,12 @@ public class NewickParser extends AbstractXMLObjectParser {
             String id = node.getTaxon().getId();
             Taxon taxon = null;
 
-            try {
-                Object obj = getStore().getObjectById(id);
+            XMLObject obj = getStore().get(id);
 
-                if (obj instanceof Taxon) {
+            if (obj != null && obj.getNativeObject() instanceof Taxon) {
 
-                    taxon = (Taxon) obj;
-                }
-            } catch (ObjectNotFoundException e) { /**/}
+                taxon = (Taxon) obj.getNativeObject();
+            }
 
             if (taxon != null) {
 
diff --git a/src/dr/evoxml/UncertainAttributePatternsParser.java b/src/dr/evoxml/UncertainAttributePatternsParser.java
new file mode 100644
index 0000000..70dbe39
--- /dev/null
+++ b/src/dr/evoxml/UncertainAttributePatternsParser.java
@@ -0,0 +1,230 @@
+/*
+ * AttributeUncertainPatternsParser.java
+ *
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.evoxml;
+
+import dr.evolution.alignment.PatternList;
+import dr.evolution.alignment.SimpleSiteList;
+import dr.evolution.alignment.UncertainSiteList;
+import dr.evolution.datatype.DataType;
+import dr.evolution.util.Taxon;
+import dr.evolution.util.TaxonList;
+import dr.evoxml.util.DataTypeUtils;
+import dr.util.Citable;
+import dr.util.Citation;
+import dr.xml.*;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+/**
+ * @author Marc A. Suchard
+ */
+public class UncertainAttributePatternsParser extends AbstractXMLObjectParser {
+
+    public static final String ATTRIBUTE = AttributePatternsParser.ATTRIBUTE;
+    public static final String NAME = "uncertainAttributePatterns";
+    public static final String LOCATION_TOKEN = "\\s";
+    public static final String PROBABILITY_TOKEN = ":";
+    public static final String NORMALIZE = "normalize";
+
+    public String getParserName() { return NAME; }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        String attributeName = xo.getStringAttribute(ATTRIBUTE);
+        TaxonList taxa = (TaxonList) xo.getChild(TaxonList.class);
+        DataType dataType = DataTypeUtils.getDataType(xo);
+
+        if (dataType == null) {
+            throw new XMLParseException("dataType expected for attributePatterns element");
+        }
+
+        // using a SimpleSiteList rather than Patterns to allow ancestral reconstruction
+        UncertainSiteList patterns = new UncertainSiteList(dataType, taxa);
+
+        boolean normalize = xo.getAttribute(NORMALIZE, true);
+
+        if (dataType == null) { // TODO Is this necessary given XMLSyntaxRules?
+            throw new XMLParseException("dataType expected for attributePatterns element");
+        }
+
+        double[][] uncertainPattern = new double[taxa.getTaxonCount()][];
+
+        // Parse attributes
+        boolean attributeFound = false;
+
+        for (int i = 0; i < taxa.getTaxonCount(); i++) {
+            Taxon taxon = taxa.getTaxon(i);
+
+            Object value = taxon.getAttribute(attributeName);
+
+            if (value != null) {
+                attributeFound = true;
+                List<StateProbability> stateProbabilities;
+
+                try {
+                    stateProbabilities = parseStates(value.toString(), dataType);
+                } catch (StateParseException e) {
+                    throw new XMLParseException("State or probability for attribute (" + attributeName + ") in taxon "
+                            + taxon.getId()  + " is invalid; state = \"" + e.getState() + "\" and probability =\""
+                            + e.getProbability() + "\"");
+                }
+
+                uncertainPattern[i] = convertToPartials(stateProbabilities, dataType, normalize);
+            } else {
+                throw new XMLParseException("State for attribute (" + attributeName + ") in taxon "
+                        + taxon.getId() + " is unknown.");
+            }
+        }
+
+        if (!attributeFound) {
+            throw new XMLParseException("The attribute (" + attributeName + ") was missing in all taxa. Check the name of the attribute.");
+        }
+
+        patterns.addPattern(uncertainPattern);
+
+        Logger.getLogger("dr.evolution").info("\n ---------------------------------\nCreating an uncertain attribute model for attribute \""
+                + attributeName + "\"");
+        Logger.getLogger("dr.evolution").info("\tIf you publish results using this model, please reference:");
+        Logger.getLogger("dr.evolution").info("\t" + Citable.Utils.getCitationString(patterns));
+        Logger.getLogger("dr.evolution").info("\n");
+
+        return patterns;
+    }
+
+
+    class StateProbability {
+        int state;
+        double probability;
+
+        public StateProbability(int state, double probability) {
+            this.state = state;
+            this.probability = probability;
+        }
+
+        public int getState() { return state; }
+
+        public double getProbability() { return probability; }
+    }
+
+    class StateParseException extends Exception {
+        String state;
+        String probability;
+
+        public StateParseException(String state, String probability) {
+            this.state = state;
+            this.probability = probability;
+        }
+
+        public String getState() { return state; }
+
+        public String getProbability() { return probability; }
+    }
+
+    private List<StateProbability> parseStates(String string, DataType dataType)
+            throws StateParseException {
+
+        List<StateProbability> stateProbabilities = new ArrayList<StateProbability>();
+
+        String[] tokens = string.split(LOCATION_TOKEN);
+        for (String token : tokens) {
+            String[] component = token.split(PROBABILITY_TOKEN);
+
+            int state = dataType.getState(component[0]);
+
+            double probability = 1.0;
+
+            if (component.length > 1) {
+                try {
+                    probability = Double.valueOf(component[1]);
+                } catch (NumberFormatException e) {
+                    probability = Double.NaN;
+                }
+            }
+
+            if (state < 0 || Double.isNaN(probability) || probability <= 0.0 || probability > 1.0) {
+                throw new StateParseException(component[0], (component.length == 1 ? "" : component[1]));
+            }
+
+            stateProbabilities.add(new StateProbability(state, probability));
+        }
+
+        return stateProbabilities;
+    }
+
+    private void normalize(double[] vec) {
+        double sum = 0.0;
+        for (double x : vec) {
+            sum += x;
+        }
+        for (int i = 0; i < vec.length; ++i) {
+            vec[i] /= sum;
+        }
+    }
+
+    private double[] convertToPartials(List<StateProbability> stateProbabilities, DataType dataType,
+                                       boolean normalize) {
+        double[] partials = new double[dataType.getStateCount()];
+
+        for (StateProbability state : stateProbabilities) {
+            partials[state.getState()] = state.getProbability();
+        }
+
+        if (normalize) {
+            normalize(partials);
+        }
+        return partials;
+    }
+
+    //************************************************************************
+    // AbstractXMLObjectParser implementation
+    //************************************************************************
+
+    public XMLSyntaxRule[] getSyntaxRules() { return rules; }
+
+    private XMLSyntaxRule[] rules = new XMLSyntaxRule[] {
+            new XORRule(
+                new StringAttributeRule(
+                    DataType.DATA_TYPE,
+                    "The data type",
+                    DataType.getRegisteredDataTypeNames(), false),
+                new ElementRule(DataType.class)
+            ),
+            AttributeRule.newStringRule(ATTRIBUTE),
+            AttributeRule.newBooleanRule(NORMALIZE, true),
+            new ElementRule(TaxonList.class, "The taxon set")
+    };
+
+    public String getParserDescription() {
+        return "A site pattern defined by an attribute in a set of taxa.";
+    }
+
+    public Class getReturnType() { return PatternList.class; }
+
+}
\ No newline at end of file
diff --git a/src/dr/inference/distribution/GeneralizedLinearModel.java b/src/dr/inference/distribution/GeneralizedLinearModel.java
index dd4a6e7..348bd01 100644
--- a/src/dr/inference/distribution/GeneralizedLinearModel.java
+++ b/src/dr/inference/distribution/GeneralizedLinearModel.java
@@ -1,7 +1,7 @@
 /*
  * GeneralizedLinearModel.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -132,8 +132,20 @@ public abstract class GeneralizedLinearModel extends AbstractModelLikelihood imp
             offset += length;
         }
 
+        double[][] mat = grandDesignMatrix;
+
+        if (grandDesignMatrix.length < grandDesignMatrix[0].length) {
+            mat = new double[grandDesignMatrix[0].length][grandDesignMatrix.length];
+
+            for (int i = 0; i < grandDesignMatrix.length; ++i) {
+                for (int j = 0; j < grandDesignMatrix[i].length; ++j) {
+                    mat[j][i] = grandDesignMatrix[i][j];
+                }
+            }
+        }
+
         SingularValueDecomposition svd = new SingularValueDecomposition(
-                new DenseDoubleMatrix2D(grandDesignMatrix));
+                new DenseDoubleMatrix2D(mat));
 
         int rank = svd.rank();
         boolean isFullRank = (totalColDim == rank);
diff --git a/src/dr/inference/distribution/MomentDistributionModel.java b/src/dr/inference/distribution/MomentDistributionModel.java
index 0cd815e..60a9992 100644
--- a/src/dr/inference/distribution/MomentDistributionModel.java
+++ b/src/dr/inference/distribution/MomentDistributionModel.java
@@ -29,6 +29,7 @@ package dr.inference.distribution;
 import dr.inference.model.*;
 import dr.inference.model.Parameter;
 import dr.inferencexml.distribution.MomentDistributionModelParser;
+import dr.math.MathUtils;
 import dr.math.distributions.RandomGenerator;
 
 //@author Max Tolkoff
@@ -48,9 +49,9 @@ public class MomentDistributionModel extends AbstractModelLikelihood implements
         mean.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 1));
         addVariable(precision);
 //        precision.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, 1));
-        this.cutoff=cutoff;
+        this.cutoff=cutoff;  if(cutoff!=null){
         addVariable(cutoff);
-        cutoff.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, cutoff.getDimension()));
+        cutoff.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, 0.0, cutoff.getDimension()));}
         addVariable(data);
         this.data=data;
         untruncated=new NormalDistributionModel(mean, precision, true);
@@ -84,16 +85,26 @@ public class MomentDistributionModel extends AbstractModelLikelihood implements
         {
             sum=0;
         }
-        if(data.getDimension()!=cutoff.getDimension()){
-            throw new RuntimeException("Incorrect number of cutoffs");
-        }
+
+        if(cutoff!=null){
+            if(data.getDimension()!=cutoff.getDimension()){
+                throw new RuntimeException("Incorrect number of cutoffs");
+            }
         for (int i = 0; i <data.getDimension() ; i++) {
-            if (Math.sqrt(precision.getParameterValue(0) * cutoff.getParameterValue(i)) > Math.abs(data.getParameterValue(i)) && data.getParameterValue(i)!=0)
-                return Double.NEGATIVE_INFINITY;
+            if (Math.sqrt(cutoff.getParameterValue(i)) - .0001 > Math.abs(data.getParameterValue(i)) && data.getParameterValue(i)!=0){
+//                System.out.println(i);
+//                System.out.println(cutoff.getParameterValue(i));
+//                System.out.println(data.getParameterValue(i));
+                return Double.NEGATIVE_INFINITY;                                                                          }
             else if(data.getParameterValue(i)==0)
                 sum+=-1000-Math.log(precision.getParameterValue(0));
             else
                 sum+=untruncated.logPdf(data.getParameterValue(i));//(2*untruncated.logPdf(cutoff.getParameterValue(i)));
+        }         }
+        else{
+            for (int i = 0; i <data.getDimension() ; i++) {
+                sum+= untruncated.logPdf(data.getParameterValue(i))+2* StrictMath.log(data.getParameterValue(i))+StrictMath.log(precision.getParameterValue(0));
+            }
         }
         sumKnown=true;
         return sum;
@@ -106,6 +117,8 @@ public class MomentDistributionModel extends AbstractModelLikelihood implements
         return 0;
     }
 
+    public Parameter getCutoff(){return cutoff;}
+
     @Override
     public double[][] getScaleMatrix() {
         double[][] temp=new double[1][1];
diff --git a/src/dr/inference/distribution/MultivariateDistributionLikelihood.java b/src/dr/inference/distribution/MultivariateDistributionLikelihood.java
index 1c8c0e6..435915e 100644
--- a/src/dr/inference/distribution/MultivariateDistributionLikelihood.java
+++ b/src/dr/inference/distribution/MultivariateDistributionLikelihood.java
@@ -167,7 +167,6 @@ public class MultivariateDistributionLikelihood extends AbstractDistributionLike
                             .append(i + 1).append("\n");
                 }
             }
-            sb.append("Please cite:\n").append(Citable.Utils.getCitationString(Transform.LOG));
             Logger.getLogger("dr.utils.Transform").info(sb.toString());
         }
         return transforms;
diff --git a/src/dr/inference/distribution/MultivariateNormalDistributionModel.java b/src/dr/inference/distribution/MultivariateNormalDistributionModel.java
index 4b1824f..a3476c6 100644
--- a/src/dr/inference/distribution/MultivariateNormalDistributionModel.java
+++ b/src/dr/inference/distribution/MultivariateNormalDistributionModel.java
@@ -163,4 +163,14 @@ public class MultivariateNormalDistributionModel extends AbstractModel implement
         checkDistribution();
         return distribution.logPdf(x);
     }
+
+    @Override
+    public int getDimension() {
+        return mean.getDimension();
+    }
+
+    @Override
+    public double[][] getPrecisionMatrix() {
+        return precision.getParameterAsMatrix();
+    }
 }
diff --git a/src/dr/evomodel/substmodel/MultivariateOUModel.java b/src/dr/inference/distribution/MultivariateOUModel.java
similarity index 98%
rename from src/dr/evomodel/substmodel/MultivariateOUModel.java
rename to src/dr/inference/distribution/MultivariateOUModel.java
index bd729f8..9901d0d 100644
--- a/src/dr/evomodel/substmodel/MultivariateOUModel.java
+++ b/src/dr/inference/distribution/MultivariateOUModel.java
@@ -1,7 +1,7 @@
 /*
  * MultivariateOUModel.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -23,8 +23,10 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.evomodel.substmodel;
+package dr.inference.distribution;
 
+import dr.evomodel.substmodel.PositiveDefiniteSubstitutionModel;
+import dr.evomodel.substmodel.SubstitutionModel;
 import dr.inference.distribution.GeneralizedLinearModel;
 import dr.inference.model.MatrixParameter;
 import dr.inference.model.Model;
diff --git a/src/dr/inference/distribution/NormalDistributionModel.java b/src/dr/inference/distribution/NormalDistributionModel.java
index efc91c9..b2e9fed 100644
--- a/src/dr/inference/distribution/NormalDistributionModel.java
+++ b/src/dr/inference/distribution/NormalDistributionModel.java
@@ -31,7 +31,6 @@ import dr.math.MathUtils;
 import dr.math.UnivariateFunction;
 import dr.math.distributions.GaussianProcessRandomGenerator;
 import dr.math.distributions.NormalDistribution;
-import dr.math.distributions.RandomGenerator;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 
@@ -189,4 +188,15 @@ public class NormalDistributionModel extends AbstractModel implements Parametric
     public Likelihood getLikelihood() {
         return null;
     }
+
+    @Override
+    public int getDimension() { return 1; }
+
+    @Override
+    public double[][] getPrecisionMatrix() {
+        double p = hasPrecision ?
+                precision.getValue(0) :
+                stdev.getValue(0) * stdev.getValue(0);
+        return new double[][]{{p}};
+    }
 }
diff --git a/src/dr/inference/mcmc/MarginalLikelihoodEstimator.java b/src/dr/inference/mcmc/MarginalLikelihoodEstimator.java
index 5178b8d..8bd2585 100644
--- a/src/dr/inference/mcmc/MarginalLikelihoodEstimator.java
+++ b/src/dr/inference/mcmc/MarginalLikelihoodEstimator.java
@@ -33,12 +33,16 @@ import dr.inference.model.Model;
 import dr.inference.model.PathLikelihood;
 import dr.inference.operators.*;
 import dr.inference.prior.Prior;
+import dr.util.Author;
+import dr.util.Citable;
+import dr.util.Citation;
 import dr.util.Identifiable;
 import dr.xml.*;
 import org.apache.commons.math.MathException;
 import org.apache.commons.math.distribution.BetaDistributionImpl;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 /**
@@ -47,7 +51,7 @@ import java.util.List;
  * @author Marc Suchard
  * @author Guy Baele
  */
-public class MarginalLikelihoodEstimator implements Runnable, Identifiable {
+public class MarginalLikelihoodEstimator implements Runnable, Identifiable, Citable {
 
     public MarginalLikelihoodEstimator(String id, int chainLength, int burninLength, int pathSteps, double[] fixedRunValues,
 //                                       boolean linear, boolean lacing,
@@ -100,8 +104,8 @@ public class MarginalLikelihoodEstimator implements Runnable, Identifiable {
 
             for (int i = 0; i < schedule.getOperatorCount(); ++i) {
                 MCMCOperator operator = schedule.getOperator(i);
-                if (operator instanceof GibbsOperator) {
-                    ((GibbsOperator)operator).setPathParameter(pathParameter);
+                if (operator instanceof PathDependentOperator) {
+                    ((PathDependentOperator)operator).setPathParameter(pathParameter);
                 }
             }
 
@@ -401,6 +405,65 @@ public class MarginalLikelihoodEstimator implements Runnable, Identifiable {
         }
     };
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.FRAMEWORK;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Marginal likelihood estimation using path sampling / stepping-stone sampling (first 2 citations) and generalized stepping-stone sampling (3rd citation)";
+    }
+
+    @Override
+    public List<Citation> getCitations() {
+        return Arrays.asList(new Citation(
+                        new Author[]{
+                                new Author("G", "Baele"),
+                                new Author("P", "Lemey"),
+                                new Author("T", "Bedford"),
+                                new Author("A", "Rambaut"),
+                                new Author("MA", "Suchard"),
+                                new Author("AV", "Alekseyenko")
+                        },
+                        "Improving the accuracy of demographic and molecular clock model comparison while accommodating phylogenetic uncertainty",
+                        2012,
+                        "Mol. Biol. Evol.",
+                        29,
+                        2157, 2167,
+                        Citation.Status.PUBLISHED
+                ),
+                new Citation(
+                        new Author[]{
+                                new Author("G", "Baele"),
+                                new Author("WLS", "Li"),
+                                new Author("AJ", "Drummond"),
+                                new Author("MA", "Suchard"),
+                                new Author("P", "Lemey")
+                        },
+                        "Accurate model selection of relaxed molecular clocks in Bayesian phylogenetics",
+                        2013,
+                        "Mol. Biol. Evol.",
+                        30,
+                        239, 243,
+                        Citation.Status.PUBLISHED
+                ),
+                new Citation(
+                        new Author[]{
+                                new Author("G", "Baele"),
+                                new Author("P", "Lemey"),
+                                new Author("MA", "Suchard")
+                        },
+                        "Genealogical working distributions for Bayesian model testing with phylogenetic uncertainty",
+                        2016,
+                        "Syst. Biol.",
+                        65,
+                        250, 264,
+                        Citation.Status.PUBLISHED
+                )
+        );
+    }
+
     // TRANSIENT PUBLIC METHODS *****************************************
 
     /**
@@ -553,14 +616,14 @@ public class MarginalLikelihoodEstimator implements Runnable, Identifiable {
             java.util.logging.Logger.getLogger("dr.inference").info("\nCreating the Marginal Likelihood Estimator chain:" +
                     "\n  chainLength=" + chainLength +
                     "\n  pathSteps=" + pathSteps +
-                    "\n  pathScheme=" + scheme.getText() + alphaBetaText +
-                    "\n  If you use these results, please cite:" +
-                    "\n    Guy Baele, Philippe Lemey, Trevor Bedford, Andrew Rambaut, Marc A. Suchard, and Alexander V. Alekseyenko." +
-                    "\n    2012. Improving the accuracy of demographic and molecular clock model comparison while accommodating " +
-                    "\n          phylogenetic uncertainty. Mol. Biol. Evol. 29(9):2157-2167." +
-                    "\n    and " +
-                    "\n    Guy Baele, Wai Lok Sibon Li, Alexei J. Drummond, Marc A. Suchard, and Philippe Lemey. 2013." +
-                    "\n    Accurate model selection of relaxed molecular clocks in Bayesian phylogenetics. Mol. Biol. Evol. 30(2):239-243.\n");
+                    "\n  pathScheme=" + scheme.getText() + alphaBetaText); //+
+                    //"\n  If you use these results, please cite:" +
+                    //"\n    Guy Baele, Philippe Lemey, Trevor Bedford, Andrew Rambaut, Marc A. Suchard, and Alexander V. Alekseyenko." +
+                    //"\n    2012. Improving the accuracy of demographic and molecular clock model comparison while accommodating " +
+                    //"\n          phylogenetic uncertainty. Mol. Biol. Evol. 29(9):2157-2167." +
+                    //"\n    and " +
+                    //"\n    Guy Baele, Wai Lok Sibon Li, Alexei J. Drummond, Marc A. Suchard, and Philippe Lemey. 2013." +
+                    //"\n    Accurate model selection of relaxed molecular clocks in Bayesian phylogenetics. Mol. Biol. Evol. 30(2):239-243.\n");
             return mle;
         }
 
diff --git a/src/dr/inference/mcmcmc/MCMCMC.java b/src/dr/inference/mcmcmc/MCMCMC.java
index 3541267..b5421a4 100644
--- a/src/dr/inference/mcmcmc/MCMCMC.java
+++ b/src/dr/inference/mcmcmc/MCMCMC.java
@@ -485,21 +485,6 @@ public class MCMCMC implements Runnable {
         }
     }
 
-    //PRIVATE METHODS *****************************************
-    private boolean isPreBurninNeeded() {
-
-        if (mcmcOptions.useCoercion()) return true;
-
-        for (int i = 0; i < schedules[coldChain].getOperatorCount(); i++) {
-            MCMCOperator op = schedules[coldChain].getOperator(i);
-
-            if (op instanceof CoercableMCMCOperator) {
-                if (((CoercableMCMCOperator) op).getMode() == CoercionMode.COERCION_ON) return true;
-            }
-        }
-        return false;
-    }
-
     public void setShowOperatorAnalysis(boolean soa) {
         showOperatorAnalysis = soa;
     }
diff --git a/src/dr/inference/model/ComplementParameter.java b/src/dr/inference/model/ComplementParameter.java
new file mode 100644
index 0000000..6426df1
--- /dev/null
+++ b/src/dr/inference/model/ComplementParameter.java
@@ -0,0 +1,112 @@
+/*
+ * ComplementParameter.java
+ *
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inference.model;
+
+/**
+ * @author Marc Suchard
+ */
+public class ComplementParameter extends Parameter.Abstract implements VariableListener {
+
+    public ComplementParameter(Parameter parameter) {
+        this.parameter = parameter;
+
+        parameter.addVariableListener(this);
+    }
+
+    public int getDimension() {
+        return parameter.getDimension();
+    }
+
+    protected void storeValues() {
+        parameter.storeParameterValues();
+    }
+
+    protected void restoreValues() {
+        parameter.restoreParameterValues();
+    }
+
+    protected void acceptValues() {
+        parameter.acceptParameterValues();
+    }
+
+    protected void adoptValues(Parameter source) {
+        throw new RuntimeException("Not implemented");
+    }
+
+    public double getParameterValue(int dim) {
+        return 1.0 - parameter.getParameterValue(dim);
+    }
+
+    public void setParameterValue(int dim, double value) {
+        throw new RuntimeException("Not implemented");
+    }
+
+    public void setParameterValueQuietly(int dim, double value) {
+        throw new RuntimeException("Not implemented");
+    }
+
+    public void setParameterValueNotifyChangedAll(int dim, double value) {
+        throw new RuntimeException("Not implemented");
+    }
+
+    public String getParameterName() {
+        if (getId() == null) {
+            StringBuilder sb = new StringBuilder("complement");
+
+            sb.append(".").append(parameter.getId());
+
+            setId(sb.toString());
+        }
+        return getId();
+    }
+
+    public void addBounds(Bounds bounds) {
+        this.bounds = bounds;
+    }
+
+    public Bounds<Double> getBounds() {
+        if (bounds == null) {
+            return parameter.getBounds(); // TODO
+        } else {
+            return bounds;
+        }
+    }
+
+    public void addDimension(int index, double value) {
+        throw new RuntimeException("Not yet implemented.");
+    }
+
+    public double removeDimension(int index) {
+        throw new RuntimeException("Not yet implemented.");
+    }
+
+    public void variableChangedEvent(Variable variable, int index, ChangeType type) {
+        fireParameterChangedEvent(index, type);
+    }
+
+    private final Parameter parameter;
+    private Bounds bounds = null;
+}
diff --git a/src/dr/inference/model/CompoundLikelihood.java b/src/dr/inference/model/CompoundLikelihood.java
index ce2bec3..10866b3 100644
--- a/src/dr/inference/model/CompoundLikelihood.java
+++ b/src/dr/inference/model/CompoundLikelihood.java
@@ -57,9 +57,10 @@ public class CompoundLikelihood implements Likelihood, Reportable {
 
         if (threads < 0 && this.likelihoods.size() > 1) {
             // asking for an automatic threadpool size and there is more than one likelihood to compute
-            threadCount = this.likelihoods.size();
+            threadCount = this.likelihoods.size();  // create a threadpool the size of the number of likelihoods
+//            threadCount = -1; // use cached thread pool
         } else if (threads > 0) {
-            threadCount = threads;
+            threadCount = threads; // use a thread pool of a specified size
         } else {
             // no thread pool requested or only one likelihood
             threadCount = 0;
@@ -67,10 +68,11 @@ public class CompoundLikelihood implements Likelihood, Reportable {
 
         if (threadCount > 0) {
             pool = Executors.newFixedThreadPool(threadCount);
-//        } else if (threads < 0) {
-//            // create a cached thread pool which should create one thread per likelihood...
-//            pool = Executors.newCachedThreadPool();
+        } else if (threadCount < 0) {
+            // create a cached thread pool which should create one thread per likelihood...
+            pool = Executors.newCachedThreadPool();
         } else {
+            // don't use a threadpool (i.e., compute serially)
             pool = null;
         }
 
@@ -143,13 +145,10 @@ public class CompoundLikelihood implements Likelihood, Reportable {
                         likelihoodCallers.add(new LikelihoodCaller(likelihood, index));
                     }
                 }
-                
-//            } else {
-            	
-            	//TODO: hack in branch likes here
-//            	likelihoods.add(likelihood);
-            	
-            }// END: contains check
+
+            } else {
+                throw new IllegalArgumentException("Attempted to add the same likelihood multiple times to CompoundLikelihood.");
+            } // END: contains check
             
         }//END: if unroll check
         
diff --git a/src/dr/inference/model/ElementWiseMatrixMultiplicationParameter.java b/src/dr/inference/model/ElementWiseMatrixMultiplicationParameter.java
new file mode 100644
index 0000000..f3585a6
--- /dev/null
+++ b/src/dr/inference/model/ElementWiseMatrixMultiplicationParameter.java
@@ -0,0 +1,69 @@
+package dr.inference.model;
+
+/**
+ * Created by max on 11/30/15.
+ */
+public class ElementWiseMatrixMultiplicationParameter extends MatrixParameter {
+    private MatrixParameter[] paramList;
+
+    public ElementWiseMatrixMultiplicationParameter(String name) {
+        super(name);
+    }
+
+    public ElementWiseMatrixMultiplicationParameter(String name, MatrixParameter[] matList) {
+        super(name);
+        this.paramList =matList;
+        for (MatrixParameter mat : matList) {
+            mat.addVariableListener(this);
+        }
+    }
+
+    @Override
+    public double getParameterValue(int dim) {
+        double prod=1;
+        for (int i = 0; i < paramList.length ; i++) {
+            prod=prod* paramList[i].getParameterValue(dim);
+        }
+        return prod;
+    }
+
+    public double getParameterValue(int row, int col){
+        double prod=1;
+        for (int i = 0; i < paramList.length ; i++) {
+            prod=prod* paramList[i].getParameterValue(row,col);
+        }
+        return prod;
+    }
+
+
+    protected void storeValues() {
+        for (Parameter p : paramList) {
+            p.storeParameterValues();
+        }
+    }
+
+    protected void restoreValues() {
+        for (Parameter p : paramList) {
+            p.restoreParameterValues();
+        }
+    }
+
+    @Override
+    public void variableChangedEvent(Variable variable, int index, ChangeType type) {
+        fireParameterChangedEvent(index, type);
+    }
+
+    @Override
+    public int getDimension() {
+        return paramList[0].getDimension();
+    }
+
+    @Override
+    public int getColumnDimension() {
+        return paramList[0].getColumnDimension();
+    }
+
+    public int getRowDimension(){
+        return paramList[0].getRowDimension();
+    }
+}
diff --git a/src/dr/inference/model/EqualityConstrainedParameter.java b/src/dr/inference/model/EqualityConstrainedParameter.java
index 5769969..4fc0e00 100644
--- a/src/dr/inference/model/EqualityConstrainedParameter.java
+++ b/src/dr/inference/model/EqualityConstrainedParameter.java
@@ -36,7 +36,7 @@ import java.util.List;
 /**
  * @author Marc A. Suchard
  */
-public class EqualityConstrainedParameter extends Parameter.Abstract implements VariableListener, Citable {
+public class EqualityConstrainedParameter extends Parameter.Abstract implements VariableListener {
     public static final String EQUALITY_CONSTRAINED_PARAMETER = "constrainedEqualParameter";
 
     public EqualityConstrainedParameter(String name, List<Parameter> params) {
@@ -56,8 +56,6 @@ public class EqualityConstrainedParameter extends Parameter.Abstract implements
         }
         StringBuilder sb = new StringBuilder("Constraining multiple parameters to be equal: ");
         sb.append(getId()).append("\n");
-        sb.append("\tPlease cite:\n");
-        sb.append(Citable.Utils.getCitationString(this));
         java.util.logging.Logger.getLogger("dr.inference.model").info(sb.toString());
     }
 
@@ -228,19 +226,6 @@ public class EqualityConstrainedParameter extends Parameter.Abstract implements
         }
     };
 
-    public List<Citation> getCitations() {
-        List<Citation> list = new ArrayList<Citation>();
-        list.add(
-                new Citation(
-                        new Author[]{
-                                new Author("MA", "Suchard"),
-                        },
-                        Citation.Status.IN_PREPARATION
-                )
-        );
-        return list;
-    }
-
     private final List<Parameter> uniqueParameters;
 
     private final Bounds bounds;
diff --git a/src/dr/inference/model/FastBUTMP.java b/src/dr/inference/model/FastBUTMP.java
new file mode 100644
index 0000000..d2d17fd
--- /dev/null
+++ b/src/dr/inference/model/FastBUTMP.java
@@ -0,0 +1,226 @@
+/*
+ * BlockUpperTriangularMatrixParameter.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inference.model;
+
+/*
+ at author Max Tolkoff
+*/
+
+
+
+public class FastBUTMP extends FastMatrixParameter {
+    private int rowDim;
+    private Bounds bounds = null;
+
+//    public TransposedBlockUpperTriangularMatrixParameter transposeBlock(){
+//        return TransposedBlockUpperTriangularMatrixParameter.recast(getVariableName(), this);
+//    }
+
+    public FastBUTMP(String name, int rows, int cols) {
+        super(name, cols, rows);
+
+
+        boolean diagonalRestriction=false;
+        if(diagonalRestriction) {
+            for (int i = 0; i < getColumnDimension(); i++) {
+                if (i < getRowDimension()) {
+                    double[] uppers = new double[i + 1];
+                    double[] lowers = new double[i + 1];
+
+                    for (int j = 0; j < uppers.length; j++) {
+                        uppers[j] = Double.POSITIVE_INFINITY;
+                        lowers[j] = Double.NEGATIVE_INFINITY;
+
+                    }
+                    lowers[i] = 0;
+                    getParameter(i).addBounds(new DefaultBounds(uppers, lowers));
+                }
+            }
+        }
+
+    }
+
+//    @Override
+//    public int getRowDimension() {
+//        return rowDim;
+//    }
+
+//    public void setRowDimension(int rowDim){
+//        this.rowDim=rowDim;
+//    }
+
+//    public double[][] getParameterAsMatrix(){
+//        double[][] answer=new double[getRowDimension()][getColumnDimension()];
+//        for(int i=0; i<getRowDimension(); i++){
+//            for(int j=0; j<getColumnDimension(); j++){
+//                if(i<=j){
+////                    System.err.print(parameters[i].getSize());
+////                    System.err.print("we get here\n");
+//                    answer[i][j]=getParameter(j).getParameterValue(i);
+//                }
+//                else{
+////                    System.err.print(i);
+////                    System.err.print(" ");
+////                    System.err.print(j);
+////                    System.err.print("\n");
+//                    answer[i][j]=0;
+////                    System.err.print("getting here?\n");
+//                }
+//            }
+//        }
+//
+//        return answer;
+//    }
+
+    public double getParameterValue(int row, int col) {
+        if (!matrixCondition(row, col)) {
+            return 0.0;
+        } else {
+            return super.getParameterValue(row,col);
+        }
+    }
+
+    protected int getRow(int PID){
+        return  PID%getRowDimension();
+    }
+
+    protected int getColumn(int PID){
+        return PID/getRowDimension();
+    }
+
+    public void setParameterValueQuietly(int row, int col, double value){
+        if(matrixCondition(row, col)){
+            super.setParameterValueQuietly(row, col, value);
+        }
+    }
+
+    public void setParameterValue(int row, int col,double value){
+        if(matrixCondition(row, col)){
+            super.setParameterValue(row, col, value);}
+    }
+
+    public void setParameterValue(int PID, double value){
+
+        int row=getRow(PID);
+        int col=getColumn(PID);
+//        System.out.println(row+" "+col);
+//        System.out.println(matrixCondition(row, col));
+
+
+        if(matrixCondition(row, col)){
+            setParameterValue(row, col, value);
+        }
+    }
+
+
+    //test if violates matrix condition
+    boolean matrixCondition(int row, int col){
+        return row>=getColumnDimension()-row;
+    }
+
+    public double getParameterValue(int id){
+        int row=getRow(id);
+        int col=getColumn(id);
+
+        if(matrixCondition(row, col)){
+            return getParameterValue(row, col);
+        }
+        else
+        {
+            return 0;
+        }
+    }
+
+    public void addBounds(Bounds<Double> boundary) {
+
+        if (bounds == null) {
+            bounds = new BUTMPBounds();
+//            return;
+        } //else {
+        IntersectionBounds newBounds = new IntersectionBounds(getDimension());
+        newBounds.addBounds(bounds);
+
+//        }
+        ((IntersectionBounds) bounds).addBounds(boundary);
+    }
+
+    public Bounds<Double> getBounds() {
+
+        if (bounds == null) {
+            bounds = new BUTMPBounds();
+        }
+        return bounds;
+    }
+
+    protected int getInnerDimension(int row, int col){
+        return row;
+    }
+
+
+    private class BUTMPBounds implements Bounds<Double>{
+        //TODO test!
+
+        public Double getUpperLimit(int dim) {
+            int row=getRow(dim);
+            int col=getColumn(dim);
+
+            if(matrixCondition(row, col)){
+
+                return getParameter(col).getBounds().getUpperLimit(getInnerDimension(row, col)); }
+            else
+                return 0.0;
+        }
+
+        public Double getLowerLimit(int dim) {
+            int row=getRow(dim);
+            int col=getColumn(dim);
+
+            if(matrixCondition(row, col)){
+                return getParameter(col).getBounds().getLowerLimit(getInnerDimension(row, col));
+//                    System.out.println(getParameters().get(dim-row).getBounds().getLowerLimit(getPindex().get(dim-row)));
+//                return getParameters().get(dim-row).getBounds().getLowerLimit(getPindex().get(dim-row));
+            }
+            else
+                return 0.0;
+        }
+
+        public int getBoundsDimension() {
+//                int nBlanks = 0;
+//                for (int i = 0; i <getColumnDimension() ; i++) {
+//                    nBlanks+=1;
+//                }
+//
+//                return getDimension()-nBlanks;
+            return getDimension();
+        }
+    }
+
+    public int getDimension(){
+        return getRowDimension()*getColumnDimension();
+    }
+
+
+}
diff --git a/src/dr/inference/model/FastMatrixParameter.java b/src/dr/inference/model/FastMatrixParameter.java
index af04ed3..8592bde 100644
--- a/src/dr/inference/model/FastMatrixParameter.java
+++ b/src/dr/inference/model/FastMatrixParameter.java
@@ -65,6 +65,7 @@ public class FastMatrixParameter extends CompoundParameter implements MatrixPara
 
         ParameterProxy(FastMatrixParameter matrix, int column) {
             this.matrix = matrix;
+//            this.addParameterListener(this.matrix);
             this.column = column;
         }
 
@@ -124,6 +125,12 @@ public class FastMatrixParameter extends CompoundParameter implements MatrixPara
         }
 
         @Override
+        public void fireParameterChangedEvent(int index, ChangeType type){
+            matrix.fireParameterChangedEvent(index, type);
+        }
+
+
+        @Override
         public void addDimension(int index, double value) {
             throw new RuntimeException("Do not call");
         }
diff --git a/src/dr/inference/model/FastTransposedBUTMP.java b/src/dr/inference/model/FastTransposedBUTMP.java
new file mode 100644
index 0000000..77be5fb
--- /dev/null
+++ b/src/dr/inference/model/FastTransposedBUTMP.java
@@ -0,0 +1,125 @@
+/*
+ * TransposedBlockUpperTriangularMatrixParameter.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inference.model;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Created by max on 11/4/14.
+ */
+public class FastTransposedBUTMP extends FastBUTMP{
+    public FastTransposedBUTMP(String name, int rows, int cols) {
+        super(name, cols, rows);
+
+
+        int colDim=cols;
+//        int rowDim=params.length;
+
+//        for(int i=0; i<colDim; i++){
+//            if(i<rowDim)
+//            {params[i].setDimension(i+1);
+//                this.addParameter(params[i]);}
+//            else
+//            {params[i].setDimension(rowDim);
+//                this.addParameter(params[i]);
+////                System.err.print(colDim-rowDim+i+1);
+////                System.err.print("\n");
+//            }
+//        }
+        this.colDim=colDim;
+    }
+
+
+//    public static TransposedBlockUpperTriangularMatrixParameter recast(String name, CompoundParameter compoundParameter) {
+//        final int count = compoundParameter.getParameterCount();
+//        Parameter[] parameters = new Parameter[count];
+//        for (int i = 0; i < count; ++i) {
+//            parameters[i] = compoundParameter.getParameter(i);
+//        }
+//        return new TransposedBlockUpperTriangularMatrixParameter(name, parameters);
+//    }
+
+//    public double getParameterValue(int row, int col){
+//        if(col>row){
+//            return 0;
+//        }
+//        else{
+//            return getParameter(col).getParameterValue(row-col);
+//        }
+//    }
+
+    protected int getRow(int PID){
+        return  PID%getRowDimension();
+    }
+
+    protected int getColumn(int PID){
+        return PID/getRowDimension();
+    }
+
+    @Override
+    boolean matrixCondition(int row, int col) {
+        return row>=col;
+    }
+
+//    public void setParameterValue(int row, int col, double value){
+//            super.setParameterValue(row,col, value);
+//        }
+//
+//    public void setParameterValueQuietly(int row, int col, double value){
+//            super.setParameterValueQuietly(col, row, value);
+//    }
+
+    public Parameter getParameter(int index) {
+        if (slices == null) {
+            // construct vector_slices
+            slices = new ArrayList<Parameter>();
+            for (int i = 0; i < getColumnDimension(); ++i) {
+                VectorSliceParameter thisSlice = new VectorSliceParameter(getParameterName() + "." + i, i);
+                for (int j = i; j < getRowDimension(); ++j) {
+                    thisSlice.addParameter(super.getParameter(j));
+                }
+                slices.add(thisSlice);
+            }
+        }
+        return slices.get(index);
+    }
+
+    protected int getInnerDimension(int row, int col){
+        return row-col;
+    }
+
+//    public int getRowDimension(){
+//        return getParameterCount();
+//    }
+//
+//    public int getColumnDimension(){
+//        return colDim;
+//    }
+
+    int colDim;
+    private List<Parameter> slices = null;
+}
diff --git a/src/dr/inference/model/IndianBuffetProcessPrior.java b/src/dr/inference/model/IndianBuffetProcessPrior.java
index 09a8aa1..f391e39 100644
--- a/src/dr/inference/model/IndianBuffetProcessPrior.java
+++ b/src/dr/inference/model/IndianBuffetProcessPrior.java
@@ -156,7 +156,7 @@ public class IndianBuffetProcessPrior extends AbstractModelLikelihood {
                         same = true;
                         if (!isExplored[j]) {
                             for (int k = 0; k < data.getRowDimension(); k++) {
-                                if (data.getParameterValue(k, i) != data.getParameterValue(k, j))
+                                if (Math.abs(data.getParameterValue(k, i)) != Math.abs(data.getParameterValue(k, j)))
                                     same = false;
                                 if (data.getParameterValue(k, j) != 0) {
                                     containsNonZeroElements[j] = true;
@@ -185,7 +185,7 @@ public class IndianBuffetProcessPrior extends AbstractModelLikelihood {
           if(containsNonZeroElements[i]) {
               KPlus++;
               for (int j = 0; j < data.getRowDimension(); j++) {
-                  rowCount[i] += data.getParameterValue(j, i);
+                  rowCount[i] += Math.abs(data.getParameterValue(j, i));
               }
               sum2+=Beta.logBeta(rowCount[i], data.getRowDimension() + beta.getParameterValue(0) - rowCount[i]);
           }
diff --git a/src/dr/inference/model/LatentFactorModel.java b/src/dr/inference/model/LatentFactorModel.java
index 7dd994a..f868fdf 100644
--- a/src/dr/inference/model/LatentFactorModel.java
+++ b/src/dr/inference/model/LatentFactorModel.java
@@ -28,10 +28,9 @@ package dr.inference.model;
 import dr.math.matrixAlgebra.Matrix;
 import dr.util.Citable;
 import dr.util.Citation;
+import dr.util.CommonCitations;
 
-import java.util.List;
-import java.util.ListIterator;
-import java.util.Vector;
+import java.util.*;
 
 
 /**
@@ -44,10 +43,10 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //    private Matrix factors;
 //    private Matrix loadings;
 
-    private final MatrixParameter data;
-    private final MatrixParameter factors;
-    private final MatrixParameter loadings;
-    private MatrixParameter sData;
+    private final MatrixParameterInterface data;
+    private final MatrixParameterInterface factors;
+    private final MatrixParameterInterface loadings;
+    private MatrixParameterInterface sData;
     private final DiagonalMatrix rowPrecision;
     private final DiagonalMatrix colPrecision;
     private final Parameter continuous;
@@ -90,6 +89,8 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
     private boolean storedFactorsKnown = false;
     private boolean loadingsKnown = false;
     private boolean storedLoadingsKnown = false;
+    private boolean totalRecompute = true;
+    private boolean storedTotalRecompute = false;
 
     private double[] residual;
     private double[] LxF;
@@ -98,7 +99,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 
     private double pathParameter = 1.0;
 
-    public LatentFactorModel(MatrixParameter data, MatrixParameter factors, MatrixParameter loadings,
+    public LatentFactorModel(MatrixParameterInterface data, MatrixParameterInterface factors, MatrixParameterInterface loadings,
                              DiagonalMatrix rowPrecision, DiagonalMatrix colPrecision,
                              boolean scaleData, Parameter continuous, boolean newModel, boolean recomputeResiduals, boolean recomputeFactors, boolean recomputeLoadings
     ) {
@@ -119,12 +120,14 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
         this.data = data;
         this.factors = factors;
         // Put default bounds on factors
-        for (int i = 0; i < factors.getParameterCount(); ++i) {
-            Parameter p = factors.getParameter(i);
-            System.err.println(p.getId() + " " + p.getDimension());
-            p.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, p.getDimension()));
+        if(factors instanceof MatrixParameter){
+        for (int i = 0; i < factors.getColumnDimension(); ++i) {
+                Parameter p = factors.getParameter(i);
+                System.err.println(p.getId() + " " + p.getDimension());
+                p.addBounds(new Parameter.DefaultBounds(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, p.getDimension()));
+            }
         }
-        this.continuous = continuous;
+        this.continuous=continuous;
 
         this.loadings = loadings;
 
@@ -176,8 +179,19 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //        System.out.println(new Matrix(factors.getParameterAsMatrix()));
 
 
-        if (nTaxa * dimData != data.getDimension()) {
-            throw new RuntimeException("LOADINGS MATRIX AND FACTOR MATRIX MUST HAVE EXTERNAL DIMENSIONS WHOSE PRODUCT IS EQUAL TO THE NUMBER OF DATA POINTS\n");
+        if (nTaxa != data.getColumnDimension()) {
+            throw new RuntimeException("DATA COLUMNS MUST HAVE THE SAME DIMENSION AS FACTOR COLUMNS\n");
+//            System.exit(10);
+        }
+        if (dimData != data.getRowDimension()) {
+            System.out.println(dimData);
+            System.out.println(data.getRowDimension());
+            System.out.println(loadings.getRowDimension());
+            throw new RuntimeException("DATA ROWS MUST HAVE THE SAME DIMENSION AS LOADINGS ROWS\n");
+//            System.exit(10);
+        }
+        if (factors.getRowDimension() != loadings.getColumnDimension()) {
+            throw new RuntimeException("LOADINGS AND FACTORS MUST HAVE THE SAME NUMBER OF FACTORS\n");
 //            System.exit(10);
         }
         if (dimData < dimFactors) {
@@ -241,7 +255,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //        Matrix ans=residual;
 //        return ans;
 //    }
-    public MatrixParameter getFactors() {
+    public MatrixParameterInterface getFactors(){
         return factors;
     }
 
@@ -249,11 +263,11 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
         return colPrecision;
     }
 
-    public MatrixParameter getLoadings() {
+    public MatrixParameterInterface getLoadings(){
         return loadings;
     }
 
-    public MatrixParameter getData() {
+    public MatrixParameterInterface getData(){
         return data;
     }
 
@@ -273,7 +287,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //    }
 
 
-    public MatrixParameter getScaledData() {
+    public MatrixParameterInterface getScaledData(){
         return data;
     }
 
@@ -290,12 +304,12 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
         return residual;
     }
 
-    private void Multiply(MatrixParameter Left, MatrixParameter Right, double[] answer) {
+    private void Multiply(MatrixParameterInterface Left, MatrixParameterInterface Right, double[] answer){
         int dim = Left.getColumnDimension();
         int n = Left.getRowDimension();
         int p = Right.getColumnDimension();
 
-        if ((factorsKnown == false && !RecomputeFactors) || (!dataKnown && !RecomputeResiduals) || (!loadingsKnown && !RecomputeLoadings)) {
+        if(((factorsKnown == false && !RecomputeFactors) || (!dataKnown && !RecomputeResiduals) || (!loadingsKnown && !RecomputeLoadings)) && !totalRecompute){
             double sum;
             ListIterator<Integer> li = changedValues.listIterator();
             while (li.hasNext()) {
@@ -338,14 +352,14 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
         }
     }
 
-    private void subtract(MatrixParameter Left, double[] Right, double[] answer) {
+    private void subtract(MatrixParameterInterface Left, double[] Right, double[] answer) {
         int row = Left.getRowDimension();
         int col = Left.getColumnDimension();
-        if ((!RecomputeResiduals && !dataKnown) || (!RecomputeFactors && !factorsKnown) || (!RecomputeLoadings && !loadingsKnown)) {
-            while (!changedValues.isEmpty()) {
+        if(((!RecomputeResiduals && !dataKnown) || (!RecomputeFactors && !factorsKnown) || (!RecomputeLoadings && !loadingsKnown)) && !totalRecompute) {
+            while(!changedValues.isEmpty()){
                 int id = changedValues.remove(0);
-                int tcol = id / row;
-                int trow = id % row;
+                int tcol=id / row;
+                int trow=id % row;
 //                System.out.println(Left.getParameterValue(id)==Left.getParameterValue(tcol,trow));
                 answer[trow * col + tcol] = Left.getParameterValue(id) - Right[trow * col + tcol];
             }
@@ -465,6 +479,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
         factorsKnown = true;
         loadingsKnown = true;
         dataKnown = true;
+        totalRecompute = false;
 //        firstTime=false;}
 //        else{
 //            while(!factorVariablesChanged.empty()){
@@ -504,6 +519,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
         storedFactorsKnown = factorsKnown;
         storedLoadingsKnown = loadingsKnown;
         storedDataKnown = dataKnown;
+        storedTotalRecompute = totalRecompute;
         System.arraycopy(residual, 0, storedResidual, 0, residual.length);
 
         System.arraycopy(LxF, 0, storedLxF, 0, residual.length);
@@ -543,6 +559,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
         factorsKnown = storedFactorsKnown;
         loadingsKnown = storedLoadingsKnown;
         dataKnown = storedDataKnown;
+        totalRecompute = storedTotalRecompute;
 //        changedValues=storedChangedValues;
 //        storedChangedValues=new Vector<Integer>();
 
@@ -586,6 +603,9 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
             if (!RecomputeResiduals) {
                 if (index != -1)
                     changedValues.add(index);
+                else{
+                    totalRecompute = true;
+                }
                 dataKnown = false;
             }
         }
@@ -597,12 +617,17 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 //            }
 
             if (!RecomputeFactors) {
+//                System.out.println("index");
+//                System.out.println(index);
                 factorsKnown = false;
                 int row = index / factors.getRowDimension();
                 if (index != -1)
                     for (int i = 0; i < data.getRowDimension(); i++) {
                         changedValues.add(row * data.getRowDimension() + i);
                     }
+                else{
+                    totalRecompute = true;
+                }
 
             }
 
@@ -624,6 +649,7 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
                         changedValues.add(i * data.getRowDimension() + col);
                     }
                 }
+                else{totalRecompute = true;}
             }
 //            System.out.println("Loadings Changed");
 //            System.out.println(index);
@@ -651,12 +677,19 @@ public class LatentFactorModel extends AbstractModelLikelihood implements Citabl
 
     }
 
-    /**
-     * @return a list of citations associated with this object
-     */
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.TRAIT_MODELS;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Latent factor model";
+    }
+
     @Override
     public List<Citation> getCitations() {
-        return null;  //To change body of implemented methods use File | Settings | File Templates.
+        return Collections.singletonList(CommonCitations.CYBIS_2015_ASSESSING);
     }
 
     /**
diff --git a/src/dr/inference/model/MaskedParameter.java b/src/dr/inference/model/MaskedParameter.java
index e059d21..428205a 100644
--- a/src/dr/inference/model/MaskedParameter.java
+++ b/src/dr/inference/model/MaskedParameter.java
@@ -44,6 +44,10 @@ public class MaskedParameter extends Parameter.Abstract implements VariableListe
 
         this.map = new int[parameter.getDimension()];
         this.inverseMap = new int[parameter.getDimension()];
+
+        this.storedMap = new int[parameter.getDimension()];
+        this.storedInverseMap = new int[parameter.getDimension()];
+
         for (int i = 0; i < map.length; i++) {
             map[i] = i;
             inverseMap[i] = i;
@@ -78,7 +82,6 @@ public class MaskedParameter extends Parameter.Abstract implements VariableListe
             }
         }
         length = index;
-        fireParameterChangedEvent();
     }
 
     public int getDimension() {
@@ -90,10 +93,23 @@ public class MaskedParameter extends Parameter.Abstract implements VariableListe
 
     protected void storeValues() {
         parameter.storeParameterValues();
+        maskParameter.storeParameterValues();
+
+        System.arraycopy(map, 0, storedMap, 0, map.length);
+        System.arraycopy(inverseMap, 0, storedInverseMap, 0, inverseMap.length);
     }
 
     protected void restoreValues() {
         parameter.restoreParameterValues();
+        maskParameter.restoreParameterValues();
+
+        int[] tmp = storedMap;
+        storedMap = map;
+        map = tmp;
+
+        tmp = storedInverseMap;
+        storedInverseMap = inverseMap;
+        inverseMap = tmp;
     }
 
 //    public void fireParameterChangedEvent() {
@@ -102,17 +118,18 @@ public class MaskedParameter extends Parameter.Abstract implements VariableListe
 
     protected void acceptValues() {
         parameter.acceptParameterValues();
+        maskParameter.acceptParameterValues();
     }
 
-    protected void adoptValues(Parameter source) {
-        parameter.adoptParameterValues(source);
-    }
+    protected void adoptValues(Parameter source) { throw new IllegalArgumentException("Not yet implemented"); }
 
     public double getParameterValue(int dim) {
+//        if (!isMapValid) updateMask();
         return parameter.getParameterValue(map[dim]);
     }
 
     public void setParameterValue(int dim, double value) {
+//        if (!isMapValid) updateMask();
         parameter.setParameterValue(map[dim], value);
     }
 
@@ -169,6 +186,7 @@ public class MaskedParameter extends Parameter.Abstract implements VariableListe
     public void variableChangedEvent(Variable variable, int index, ChangeType type) {
         if (variable == maskParameter) {
             updateMask();
+            fireParameterChangedEvent();
         } else { // variable == parameter
             if (index == -1) {
                 fireParameterChangedEvent();
@@ -180,8 +198,12 @@ public class MaskedParameter extends Parameter.Abstract implements VariableListe
 
     private final Parameter parameter;
     private Parameter maskParameter;
-    private final int[] map;
-    private final int[] inverseMap;
+    private int[] map;
+    private int[] inverseMap;
+
+    private int[] storedMap;
+    private int[] storedInverseMap;
+
     private int length;
     private int equalValue;
 }
diff --git a/src/dr/inference/model/MatrixParameterInterface.java b/src/dr/inference/model/MatrixParameterInterface.java
index ddb4e26..cc3b1d7 100644
--- a/src/dr/inference/model/MatrixParameterInterface.java
+++ b/src/dr/inference/model/MatrixParameterInterface.java
@@ -28,7 +28,7 @@ package dr.inference.model;
 /**
  * Created by msuchard on 12/30/15.
  */
-public interface MatrixParameterInterface extends Variable<Double> {
+public interface MatrixParameterInterface extends Parameter {
 
     double getParameterValue(int row, int col);
 
diff --git a/src/dr/inference/model/MixtureModelLikelihood.java b/src/dr/inference/model/MixtureModelLikelihood.java
index decf377..bc75ff4 100644
--- a/src/dr/inference/model/MixtureModelLikelihood.java
+++ b/src/dr/inference/model/MixtureModelLikelihood.java
@@ -35,8 +35,7 @@ import dr.util.Citation;
 import dr.util.CommonCitations;
 import dr.xml.*;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 import java.util.logging.Logger;
 
 /**
@@ -278,9 +277,19 @@ public class MixtureModelLikelihood extends AbstractModelLikelihood implements C
     private final Parameter mixtureWeights;
     List<Likelihood> likelihoodList;
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.MISC;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Mixture model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(CommonCitations.LEMEY_MIXTURE_2012);
-        return citations;
+        return Collections.singletonList(CommonCitations.LEMEY_MIXTURE_2012);
     }
+
 }
diff --git a/src/dr/inference/model/Parameter.java b/src/dr/inference/model/Parameter.java
index 26d45bd..3bc28c2 100644
--- a/src/dr/inference/model/Parameter.java
+++ b/src/dr/inference/model/Parameter.java
@@ -161,6 +161,8 @@ public interface Parameter extends Statistic, Variable<Double> {
 
     public void fireParameterChangedEvent();
 
+    public void fireParameterChangedEvent(int index, Parameter.ChangeType type);
+
     boolean isUsed();
 
     public final static Set<Parameter> FULL_PARAMETER_SET = new LinkedHashSet<Parameter>();
@@ -702,7 +704,7 @@ public interface Parameter extends Statistic, Variable<Double> {
          */
         public void setParameterValueNotifyChangedAll(int i, double val) {
             values[i] = val;
-            fireParameterChangedEvent(i, Parameter.ChangeType.ALL_VALUES_CHANGED);
+            fireParameterChangedEvent(-1, Parameter.ChangeType.ALL_VALUES_CHANGED);
         }
 
         protected final void storeValues() {
diff --git a/src/dr/inference/model/TransposedMatrixParameter.java b/src/dr/inference/model/TransposedMatrixParameter.java
index 2e94451..ba641c0 100644
--- a/src/dr/inference/model/TransposedMatrixParameter.java
+++ b/src/dr/inference/model/TransposedMatrixParameter.java
@@ -105,6 +105,11 @@ public class TransposedMatrixParameter extends MatrixParameter {
         return slices.get(index);
     }
 
+//    @Override
+//    public void setParameterValueQuietly(int row, int column, double a) {
+//        super.setParameterValueQuietly(column,row, a);
+//    }
+
     MatrixParameter transposeBack(){
         return MatrixParameter.recast(null, this);
     }
diff --git a/src/dr/inference/model/WeightedMixtureModel.java b/src/dr/inference/model/WeightedMixtureModel.java
index 4a2251c..f70f726 100644
--- a/src/dr/inference/model/WeightedMixtureModel.java
+++ b/src/dr/inference/model/WeightedMixtureModel.java
@@ -32,8 +32,7 @@ import dr.util.Citation;
 import dr.util.CommonCitations;
 import dr.xml.*;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 import java.util.logging.Logger;
 
 /**
@@ -311,9 +310,18 @@ public class WeightedMixtureModel extends AbstractModelLikelihood implements Cit
         System.err.println("correct            = " + test);
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.MISC;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Weighted mixture model";
+    }
+
+    @Override
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(CommonCitations.LEMEY_MIXTURE_2012);
-        return citations;
+        return Collections.singletonList(CommonCitations.LEMEY_MIXTURE_2012);
     }
 }
diff --git a/src/dr/inference/operators/EllipticalSliceOperator.java b/src/dr/inference/operators/EllipticalSliceOperator.java
index 32ce920..d007911 100644
--- a/src/dr/inference/operators/EllipticalSliceOperator.java
+++ b/src/dr/inference/operators/EllipticalSliceOperator.java
@@ -190,9 +190,8 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
         return r;
     }
 
-    private void transformPoint(double[] x) {
+    public static void transformPoint(double[] x, boolean translationInvariant, boolean rotationInvariant, int dim) {
         if (translationInvariant) {
-            int dim = 2; // TODO How to determine?
 
             double[] mean = new double[dim];
             int k = 0;
@@ -217,7 +216,6 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
         }
 
         if (rotationInvariant) {
-            int dim = 2;
 
             final double theta = -Math.atan2(x[1], x[0]); // TODO Compute norm and avoid transcendentals
             final double sin = Math.sin(theta);
@@ -240,6 +238,10 @@ public class EllipticalSliceOperator extends SimpleMetropolizedGibbsOperator imp
         }
     }
 
+    private void transformPoint(double[] x) {
+        transformPoint(x, translationInvariant, rotationInvariant, 2);
+    }
+
     private void setAllParameterValues(double[] x) {
         if (variable instanceof MatrixParameterInterface) {
             ((MatrixParameterInterface) variable).setAllParameterValuesQuietly(x, 0);
diff --git a/src/dr/inference/operators/FactorGibbsOperator.java b/src/dr/inference/operators/FactorGibbsOperator.java
index 7c0d524..15b6e70 100644
--- a/src/dr/inference/operators/FactorGibbsOperator.java
+++ b/src/dr/inference/operators/FactorGibbsOperator.java
@@ -25,10 +25,7 @@
 
 package dr.inference.operators;
 
-import dr.inference.model.DiagonalMatrix;
-import dr.inference.model.LatentFactorModel;
-import dr.inference.model.MatrixParameter;
-import dr.inference.model.Parameter;
+import dr.inference.model.*;
 import dr.math.MathUtils;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.math.matrixAlgebra.SymmetricMatrix;
@@ -69,7 +66,7 @@ public class FactorGibbsOperator extends SimpleMCMCOperator implements GibbsOper
     }
 
     private void getPrecision(double[][] precision) {
-        MatrixParameter Loadings = LFM.getLoadings();
+        MatrixParameterInterface Loadings = LFM.getLoadings();
         MatrixParameter Precision = LFM.getColumnPrecision();
         int outerDim = Loadings.getRowDimension();
         int innerDim = Loadings.getColumnDimension();
@@ -91,9 +88,9 @@ public class FactorGibbsOperator extends SimpleMCMCOperator implements GibbsOper
     }
 
     private void getMean(int column, double[][] variance, double[] midMean, double[] mean) {
-        MatrixParameter scaledData = LFM.getScaledData();
-        MatrixParameter Precision = LFM.getColumnPrecision();
-        MatrixParameter Loadings = LFM.getLoadings();
+        MatrixParameterInterface scaledData = LFM.getScaledData();
+        MatrixParameterInterface Precision = LFM.getColumnPrecision();
+        MatrixParameterInterface Loadings = LFM.getLoadings();
         for (int i = 0; i < Loadings.getRowDimension(); i++) {
             double sum = 0;
             for (int j = i; j < Loadings.getColumnDimension(); j++) {
diff --git a/src/dr/inference/operators/FactorIndependenceOperator.java b/src/dr/inference/operators/FactorIndependenceOperator.java
index 43ef026..351eeee 100644
--- a/src/dr/inference/operators/FactorIndependenceOperator.java
+++ b/src/dr/inference/operators/FactorIndependenceOperator.java
@@ -25,10 +25,7 @@
 
 package dr.inference.operators;
 
-import dr.inference.model.DiagonalMatrix;
-import dr.inference.model.LatentFactorModel;
-import dr.inference.model.MatrixParameter;
-import dr.inference.model.Parameter;
+import dr.inference.model.*;
 import dr.math.MathUtils;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.math.matrixAlgebra.SymmetricMatrix;
@@ -72,7 +69,7 @@ public class FactorIndependenceOperator extends AbstractCoercableOperator {
     }
 
     private void getPrecision(double[][] precision) {
-        MatrixParameter Loadings = LFM.getLoadings();
+        MatrixParameterInterface Loadings = LFM.getLoadings();
         MatrixParameter Precision = LFM.getColumnPrecision();
         int outerDim = Loadings.getRowDimension();
         int innerDim = Loadings.getColumnDimension();
@@ -94,9 +91,9 @@ public class FactorIndependenceOperator extends AbstractCoercableOperator {
     }
 
     private void getMean(int column, double[][] variance, double[] midMean, double[] mean) {
-        MatrixParameter scaledData = LFM.getScaledData();
-        MatrixParameter Precision = LFM.getColumnPrecision();
-        MatrixParameter Loadings = LFM.getLoadings();
+        MatrixParameterInterface scaledData = LFM.getScaledData();
+        MatrixParameterInterface Precision = LFM.getColumnPrecision();
+        MatrixParameterInterface Loadings = LFM.getLoadings();
         for (int i = 0; i < Loadings.getRowDimension(); i++) {
             double sum = 0;
             for (int j = i; j < Loadings.getColumnDimension(); j++) {
diff --git a/src/dr/inference/operators/FactorOperator.java b/src/dr/inference/operators/FactorOperator.java
index dcb557c..54b0ab1 100644
--- a/src/dr/inference/operators/FactorOperator.java
+++ b/src/dr/inference/operators/FactorOperator.java
@@ -25,10 +25,7 @@
 
 package dr.inference.operators;
 
-import dr.inference.model.DiagonalMatrix;
-import dr.inference.model.LatentFactorModel;
-import dr.inference.model.MatrixParameter;
-import dr.inference.model.Parameter;
+import dr.inference.model.*;
 import dr.math.MathUtils;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.math.matrixAlgebra.SymmetricMatrix;
@@ -72,7 +69,7 @@ public class FactorOperator extends AbstractCoercableOperator {
     }
 
     private void getPrecision(double[][] precision) {
-        MatrixParameter Loadings = LFM.getLoadings();
+        MatrixParameterInterface Loadings = LFM.getLoadings();
         MatrixParameter Precision = LFM.getColumnPrecision();
         int outerDim = Loadings.getRowDimension();
         int innerDim = Loadings.getColumnDimension();
@@ -94,9 +91,9 @@ public class FactorOperator extends AbstractCoercableOperator {
     }
 
     private void getMean(int column, double[][] variance, double[] midMean, double[] mean) {
-        MatrixParameter scaledData = LFM.getScaledData();
-        MatrixParameter Precision = LFM.getColumnPrecision();
-        MatrixParameter Loadings = LFM.getLoadings();
+        MatrixParameterInterface scaledData = LFM.getScaledData();
+        MatrixParameterInterface Precision = LFM.getColumnPrecision();
+        MatrixParameterInterface Loadings = LFM.getLoadings();
         for (int i = 0; i < Loadings.getRowDimension(); i++) {
             double sum = 0;
             for (int j = i; j < Loadings.getColumnDimension(); j++) {
diff --git a/src/dr/inference/operators/GibbsOperator.java b/src/dr/inference/operators/GibbsOperator.java
index 64cc14e..fc18eeb 100644
--- a/src/dr/inference/operators/GibbsOperator.java
+++ b/src/dr/inference/operators/GibbsOperator.java
@@ -30,7 +30,7 @@ package dr.inference.operators;
  *
  * @author Roald Forsberg
  */
-public interface GibbsOperator extends MCMCOperator {
+public interface GibbsOperator extends MCMCOperator, PathDependentOperator {
 
    /**
 	* @return the number of steps the operator performs in one go.
@@ -40,5 +40,5 @@ public interface GibbsOperator extends MCMCOperator {
     /**
      * Set the path parameter for sampling from power-posterior
      */
-    void setPathParameter(double beta);
+    //void setPathParameter(double beta);
 }
diff --git a/src/dr/inference/operators/LatentFactorModelPrecisionGibbsOperator.java b/src/dr/inference/operators/LatentFactorModelPrecisionGibbsOperator.java
index 0b67aea..80115bc 100644
--- a/src/dr/inference/operators/LatentFactorModelPrecisionGibbsOperator.java
+++ b/src/dr/inference/operators/LatentFactorModelPrecisionGibbsOperator.java
@@ -29,6 +29,7 @@ import dr.inference.distribution.DistributionLikelihood;
 import dr.inference.model.DiagonalMatrix;
 import dr.inference.model.LatentFactorModel;
 import dr.inference.model.MatrixParameter;
+import dr.inference.model.MatrixParameterInterface;
 import dr.math.MathUtils;
 import dr.math.distributions.GammaDistribution;
 
@@ -60,10 +61,10 @@ public class LatentFactorModelPrecisionGibbsOperator extends SimpleMCMCOperator
     }
 
     private void setPrecision(int i) {
-        MatrixParameter factors = LFM.getFactors();
-        MatrixParameter loadings = LFM.getLoadings();
+        MatrixParameterInterface factors = LFM.getFactors();
+        MatrixParameterInterface loadings = LFM.getLoadings();
         DiagonalMatrix precision = (DiagonalMatrix) LFM.getColumnPrecision();
-        MatrixParameter data = LFM.getScaledData();
+        MatrixParameterInterface data = LFM.getScaledData();
         double di = 0;
         for (int j = 0; j < factors.getColumnDimension(); j++) {
             double sum = 0;
diff --git a/src/dr/inference/operators/LoadingsGibbsOperator.java b/src/dr/inference/operators/LoadingsGibbsOperator.java
index f3509d2..4dd7cfc 100644
--- a/src/dr/inference/operators/LoadingsGibbsOperator.java
+++ b/src/dr/inference/operators/LoadingsGibbsOperator.java
@@ -28,6 +28,7 @@ package dr.inference.operators;
 import dr.inference.distribution.DistributionLikelihood;
 import dr.inference.model.LatentFactorModel;
 import dr.inference.model.MatrixParameter;
+import dr.inference.model.MatrixParameterInterface;
 import dr.inference.model.TransposedBlockUpperTriangularMatrixParameter;
 import dr.math.MathUtils;
 import dr.math.distributions.MultivariateNormalDistribution;
@@ -118,7 +119,7 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
         priorMeanPrecision = this.prior.getMean() * priorPrecision;
     }
 
-    private void getPrecisionOfTruncated(MatrixParameter full, int newRowDimension, int row, double[][] answer) {
+    private void getPrecisionOfTruncated(MatrixParameterInterface full, int newRowDimension, int row, double[][] answer) {
 
 //        MatrixParameter answer=new MatrixParameter(null);
 //        answer.setDimensions(this.getRowDimension(), Right.getRowDimension());
@@ -149,8 +150,8 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
 //        answer.setDimensions(this.getRowDimension(), Right.getRowDimension());
 //        System.out.println(answer.getRowDimension());
 //        System.out.println(answer.getColumnDimension());
-        MatrixParameter data = LFM.getScaledData();
-        MatrixParameter Left = LFM.getFactors();
+        MatrixParameterInterface data = LFM.getScaledData();
+        MatrixParameterInterface Left = LFM.getFactors();
         int p = data.getColumnDimension();
         for (int i = 0; i < newRowDimension; i++) {
             double sum = 0;
@@ -210,7 +211,7 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
     }
 
     private void copy(int i, double[] random) {
-        TransposedBlockUpperTriangularMatrixParameter changing = (TransposedBlockUpperTriangularMatrixParameter) LFM.getLoadings();
+       MatrixParameterInterface changing = LFM.getLoadings();
         for (int j = 0; j < random.length; j++) {
             changing.setParameterValueQuietly(i, j, random[j]);
         }
diff --git a/src/dr/inference/operators/LoadingsGibbsOperator.java b/src/dr/inference/operators/LoadingsGibbsTruncatedOperator.java
similarity index 72%
copy from src/dr/inference/operators/LoadingsGibbsOperator.java
copy to src/dr/inference/operators/LoadingsGibbsTruncatedOperator.java
index f3509d2..17e0d90 100644
--- a/src/dr/inference/operators/LoadingsGibbsOperator.java
+++ b/src/dr/inference/operators/LoadingsGibbsTruncatedOperator.java
@@ -1,53 +1,24 @@
-/*
- * LoadingsGibbsOperator.java
- *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
- *
- * This file is part of BEAST.
- * See the NOTICE file distributed with this work for additional
- * information regarding copyright ownership and licensing.
- *
- * BEAST is free software; you can redistribute it and/or modify
- * it under the terms of the GNU Lesser General Public License as
- * published by the Free Software Foundation; either version 2
- * of the License, or (at your option) any later version.
- *
- *  BEAST is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- *  GNU Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with BEAST; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA  02110-1301  USA
- */
-
 package dr.inference.operators;
 
 import dr.inference.distribution.DistributionLikelihood;
-import dr.inference.model.LatentFactorModel;
-import dr.inference.model.MatrixParameter;
-import dr.inference.model.TransposedBlockUpperTriangularMatrixParameter;
+import dr.inference.distribution.MomentDistributionModel;
+import dr.inference.model.*;
 import dr.math.MathUtils;
 import dr.math.distributions.MultivariateNormalDistribution;
 import dr.math.distributions.NormalDistribution;
 import dr.math.matrixAlgebra.CholeskyDecomposition;
 import dr.math.matrixAlgebra.IllegalDimension;
 import dr.math.matrixAlgebra.SymmetricMatrix;
+import jebl.math.Random;
 
 import java.util.ArrayList;
 import java.util.ListIterator;
 
 /**
- * Created with IntelliJ IDEA.
- * User: max
- * Date: 5/23/14
- * Time: 2:23 PM
- * To change this template use File | Settings | File Templates.
+ * Created by max on 2/4/16.
  */
-public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOperator {
-    NormalDistribution prior;
+public class LoadingsGibbsTruncatedOperator extends SimpleMCMCOperator implements GibbsOperator{
+    MomentDistributionModel prior;
     LatentFactorModel LFM;
     ArrayList<double[][]> precisionArray;
     ArrayList<double[]> meanMidArray;
@@ -58,11 +29,13 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
 
     double priorPrecision;
     double priorMeanPrecision;
+    MatrixParameterInterface loadings;
 
-    public LoadingsGibbsOperator(LatentFactorModel LFM, DistributionLikelihood prior, double weight, boolean randomScan) {
+    public LoadingsGibbsTruncatedOperator(LatentFactorModel LFM, MomentDistributionModel prior, double weight, boolean randomScan, MatrixParameterInterface loadings) {
         setWeight(weight);
 
-        this.prior = (NormalDistribution) prior.getDistribution();
+        this.loadings=loadings;
+        this.prior = prior;
         this.LFM = LFM;
         precisionArray = new ArrayList<double[][]>();
         double[][] temp;
@@ -114,11 +87,11 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
 //
 //
 //            }
-        priorPrecision = 1 / (this.prior.getSD() * this.prior.getSD());
-        priorMeanPrecision = this.prior.getMean() * priorPrecision;
+        priorPrecision = (this.prior.getScaleMatrix()[0][0]);
+        priorMeanPrecision = this.prior.getMean()[0] * priorPrecision;
     }
 
-    private void getPrecisionOfTruncated(MatrixParameter full, int newRowDimension, int row, double[][] answer) {
+    private void getPrecisionOfTruncated(MatrixParameterInterface full, int newRowDimension, int row, double[][] answer) {
 
 //        MatrixParameter answer=new MatrixParameter(null);
 //        answer.setDimensions(this.getRowDimension(), Right.getRowDimension());
@@ -149,8 +122,8 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
 //        answer.setDimensions(this.getRowDimension(), Right.getRowDimension());
 //        System.out.println(answer.getRowDimension());
 //        System.out.println(answer.getColumnDimension());
-        MatrixParameter data = LFM.getScaledData();
-        MatrixParameter Left = LFM.getFactors();
+        MatrixParameterInterface data = LFM.getScaledData();
+        MatrixParameterInterface Left = LFM.getFactors();
         int p = data.getColumnDimension();
         for (int i = 0; i < newRowDimension; i++) {
             double sum = 0;
@@ -210,12 +183,74 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
     }
 
     private void copy(int i, double[] random) {
-        TransposedBlockUpperTriangularMatrixParameter changing = (TransposedBlockUpperTriangularMatrixParameter) LFM.getLoadings();
+        MatrixParameterInterface changing = loadings;
         for (int j = 0; j < random.length; j++) {
             changing.setParameterValueQuietly(i, j, random[j]);
         }
     }
 
+    private double[] getDraws(int row, double[] mean, double[][] Cholesky){
+        double[] temp = new double[mean.length];
+        double[] draws = new double[mean.length];
+        double lowCutoff;
+        double highCutoff;
+        double low;
+        double high;
+        NormalDistribution normal;
+        for (int i = 0; i < temp.length; i++) {
+            highCutoff = Math.sqrt(prior.getCutoff().getParameterValue(row * LFM.getLoadings().getColumnDimension() + i));
+            lowCutoff = -highCutoff;
+            for (int j = 0; j <= i; j++) {
+//                if(Cholesky[i][i] > 0) {
+                    if (i != j) {
+                        lowCutoff = lowCutoff - temp[j] * Cholesky[i][j];
+                        highCutoff = highCutoff - temp[j] * Cholesky[i][j];
+                    } else {
+                        lowCutoff = lowCutoff / Cholesky[i][j];
+                        highCutoff = highCutoff / Cholesky[i][j];
+                    }
+//                }
+//                else{
+//                    if (i != j) {
+//                        cutoffs = cutoffs + temp[j] * Cholesky[i][j];
+//                    } else {
+//                        cutoffs = cutoffs / Cholesky[i][j];
+//                    }
+//                }
+            }
+//            System.out.println(cutoffs);
+            normal = new NormalDistribution(mean[i], 1);
+            low = normal.cdf(lowCutoff);
+            high = normal.cdf(highCutoff);
+//            System.out.println("low: " + low);
+//            System.out.println("high: " + high);
+            double proportion = low/(low + 1 - high);
+            if(Random.nextDouble()<proportion){
+                double quantile=Random.nextDouble() * low;
+                temp[i] = normal.quantile(quantile);
+            }
+            else{
+                double quantile=(1-high) * Random.nextDouble() + high;
+                temp[i] = normal.quantile(quantile);
+            }
+
+        }
+        for (int i = 0; i <mean.length ; i++) {
+            for (int j = 0; j <= i; j++) {
+                draws[i] += Cholesky[i][j] * temp[j];
+//                System.out.println("temp: " + temp[i]);
+//                System.out.println("Cholesky " + i + ", " + j +": " +Cholesky[i][j]);
+            }
+            if(Math.abs(draws[i])<Math.sqrt(prior.getCutoff().getParameterValue(row * LFM.getLoadings().getColumnDimension() + i))) {
+                System.out.println(Math.sqrt(prior.getCutoff().getParameterValue(row * LFM.getLoadings().getColumnDimension() + i)));
+                System.out.println("draws: " + draws[i]);
+            }
+        }
+
+
+        return draws;
+    }
+
     private void drawI(int i, ListIterator<double[][]> currentPrecision, ListIterator<double[]> currentMidMean, ListIterator<double[]> currentMean) {
         double[] draws = null;
         double[][] precision = null;
@@ -250,7 +285,8 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
 
         getMean(i, variance, midMean, mean);
 
-        draws = MultivariateNormalDistribution.nextMultivariateNormalCholesky(mean, cholesky);
+
+        draws = getDraws(i, mean, cholesky);
 //    if(i<draws.length)
 //
 //    {
@@ -260,7 +296,7 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
 //    }
         if (i < draws.length) {
             //if (draws[i] > 0) { TODO implement as option
-                copy(i, draws);
+            copy(i, draws);
             //}
         } else {
             copy(i, draws);
@@ -282,7 +318,7 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
 
     @Override
     public String getOperatorName() {
-        return "loadingsGibbsOperator";  //To change body of implemented methods use File | Settings | File Templates.
+        return "loadingsGibbsTruncatedOperator";  //To change body of implemented methods use File | Settings | File Templates.
     }
 
     @Override
@@ -296,7 +332,7 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
             for (int i = 0; i < size; i++) {
                 drawI(i, currentPrecision, currentMidMean, currentMean);
             }
-            LFM.getLoadings().fireParameterChangedEvent();
+            ((Parameter) loadings).fireParameterChangedEvent();
         } else {
             int i = MathUtils.nextInt(LFM.getLoadings().getRowDimension());
             ListIterator<double[][]> currentPrecision;
@@ -322,3 +358,4 @@ public class LoadingsGibbsOperator extends SimpleMCMCOperator implements GibbsOp
         pathParameter=beta;
     }
 }
+
diff --git a/src/dr/inference/operators/LoadingsIndependenceOperator.java b/src/dr/inference/operators/LoadingsIndependenceOperator.java
index 2093a6e..16622f8 100644
--- a/src/dr/inference/operators/LoadingsIndependenceOperator.java
+++ b/src/dr/inference/operators/LoadingsIndependenceOperator.java
@@ -28,6 +28,7 @@ package dr.inference.operators;
 import dr.inference.distribution.DistributionLikelihood;
 import dr.inference.model.LatentFactorModel;
 import dr.inference.model.MatrixParameter;
+import dr.inference.model.MatrixParameterInterface;
 import dr.inference.model.Parameter;
 import dr.math.MathUtils;
 import dr.math.distributions.MultivariateNormalDistribution;
@@ -120,7 +121,7 @@ public class LoadingsIndependenceOperator extends AbstractCoercableOperator {
         priorMeanPrecision = this.prior.getMean() * priorPrecision;
     }
 
-    private void getPrecisionOfTruncated(MatrixParameter full, int newRowDimension, int row, double[][] answer) {
+    private void getPrecisionOfTruncated(MatrixParameterInterface full, int newRowDimension, int row, double[][] answer) {
 
 //        MatrixParameter answer=new MatrixParameter(null);
 //        answer.setDimensions(this.getRowDimension(), Right.getRowDimension());
@@ -150,8 +151,8 @@ public class LoadingsIndependenceOperator extends AbstractCoercableOperator {
 //        answer.setDimensions(this.getRowDimension(), Right.getRowDimension());
 //        System.out.println(answer.getRowDimension());
 //        System.out.println(answer.getColumnDimension());
-        MatrixParameter data = LFM.getScaledData();
-        MatrixParameter Left = LFM.getFactors();
+        MatrixParameterInterface data = LFM.getScaledData();
+        MatrixParameterInterface Left = LFM.getFactors();
         int p = data.getColumnDimension();
         for (int i = 0; i < newRowDimension; i++) {
             double sum = 0;
diff --git a/src/dr/inference/operators/MaskMoveOperator.java b/src/dr/inference/operators/MaskMoveOperator.java
new file mode 100644
index 0000000..3dab4b2
--- /dev/null
+++ b/src/dr/inference/operators/MaskMoveOperator.java
@@ -0,0 +1,182 @@
+/*
+ * MaskMoveOperator.java
+ *
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inference.operators;
+
+import dr.inference.model.Parameter;
+import dr.math.MathUtils;
+
+import java.util.List;
+
+/**
+ * A generic operator that flips masks.
+ *
+ * @author Marc A. Suchard
+ * @version $Id$
+ */
+public class MaskMoveOperator extends SimpleMCMCOperator {
+
+    public MaskMoveOperator(List<Parameter> masks, Parameter cutPoint, int[] selectBefore, int[] selectAfter, double weight) {
+        this.masks = masks;
+        this.cutPoint = cutPoint;
+        this.selectBefore = selectBefore;
+        this.selectAfter = selectAfter;
+
+        setWeight(weight);
+
+        if (!checkMaskValues(masks, cutPoint, selectBefore, selectAfter)) {
+            throw new IllegalArgumentException("Bad initialization state");
+        }
+    }
+
+    public static boolean checkMaskValues(List<Parameter> masks, Parameter cutPoint, int[] selectBefore, int[] selectAfter) {
+
+        int cut = (int) (cutPoint.getParameterValue(0) + 0.5);
+        for (int i = 0; i < masks.size(); ++i) {
+            Parameter mask = masks.get(i);
+
+            boolean before = (i < cut);
+
+            final int[] ones;
+            final int[] zeros;
+
+            if (before) {
+                ones = selectBefore;
+                zeros = selectAfter;
+            } else {
+                ones = selectAfter;
+                zeros = selectBefore;
+            }
+
+            for (int idx : ones) {
+                if (mask.getParameterValue(idx) != 1) return false;
+            }
+
+            for (int idx: zeros) {
+                if (mask.getParameterValue(idx) != 0) return false;
+            }
+        }
+        return true;
+    }
+
+    private String printMask() {
+        StringBuilder sb = new StringBuilder();
+        int cut = (int) (cutPoint.getParameterValue(0) + 0.5);
+        sb.append("Cut: " + cut + "\n");
+        for (int i = 0; i < masks.size(); ++i) {
+            sb.append((i + 1) + " " + masks.get(i).getParameterValue(selectBefore[0]) + " " + masks.get(i).getParameterValue(selectAfter[0]) + "\n");
+        }
+        return sb.toString();
+    }
+
+    public final double doOperation() {
+        double logq = 0.0;
+
+        StringBuilder sb = null;
+
+        int currentCutPoint = (int) (cutPoint.getParameterValue(0) + 0.5);
+
+        if (DEBUG) {
+            sb = new StringBuilder();
+            sb.append("Starting state\n");
+            sb.append(printMask());
+        }
+
+        final boolean moveUp;
+        if (currentCutPoint == 0) {
+            moveUp = true;
+            logq -= Math.log(2);
+        } else if (currentCutPoint == masks.size()) {
+            moveUp = false;
+            logq -= Math.log(2);
+        } else if (MathUtils.nextDouble() < 0.5) {
+            moveUp = false;
+        } else {
+            moveUp = true;
+        }
+
+        if (DEBUG) {
+            sb.append("moveUp = " + moveUp + "\n");
+        }
+
+        Parameter change = (moveUp) ? masks.get(currentCutPoint) : masks.get(currentCutPoint - 1);
+
+        final int[] ones;
+        final int[] zeros;
+        if (moveUp) {
+            ones = selectBefore;
+                 zeros = selectAfter;
+
+        } else {
+
+
+            ones = selectAfter;
+                      zeros = selectBefore;
+        }
+
+        for (int i : zeros) {
+            change.setParameterValueQuietly(i, 0);
+        }
+
+        for (int i : ones) {
+            change.setParameterValueQuietly(i, 1);
+        }
+
+        change.fireParameterChangedEvent();
+
+        cutPoint.setParameterValue(0, currentCutPoint + ((moveUp) ? 1 : -1));
+
+        if (DEBUG) {
+            if (!checkMaskValues(masks, cutPoint, selectBefore, selectAfter)) {
+                sb.append(printMask());
+                System.err.println(sb.toString());
+                System.exit(-1);
+            }
+        }
+
+        return logq;
+    }
+
+    // Interface MCMCOperator
+    public final String getOperatorName() {
+        return "maskMove(" + cutPoint.getParameterName() + ")";
+    }
+
+    public final String getPerformanceSuggestion() {
+        return "no performance suggestion";
+    }
+
+    public String toString() {
+        return getOperatorName();
+    }
+
+    // Private instance variables
+    final private List<Parameter> masks;
+    final private Parameter cutPoint;
+    final private int[] selectBefore;
+    final private int[] selectAfter;
+
+    final static private boolean DEBUG = false;
+}
diff --git a/src/dr/inference/operators/ModeIndependenceOperator.java b/src/dr/inference/operators/ModeIndependenceOperator.java
new file mode 100644
index 0000000..14822dd
--- /dev/null
+++ b/src/dr/inference/operators/ModeIndependenceOperator.java
@@ -0,0 +1,258 @@
+/*
+ * MultivariateNormalOperator.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inference.operators;
+
+import cern.colt.matrix.impl.DenseDoubleMatrix2D;
+import cern.colt.matrix.linalg.SingularValueDecomposition;
+import dr.inference.model.MatrixParameter;
+import dr.inference.model.Parameter;
+import dr.math.MathUtils;
+import dr.math.matrixAlgebra.CholeskyDecomposition;
+import dr.math.matrixAlgebra.IllegalDimension;
+import dr.math.matrixAlgebra.SymmetricMatrix;
+import dr.xml.*;
+
+
+/**
+ * @author Marc Suchard
+ */
+public class ModeIndependenceOperator extends AbstractCoercableOperator {
+
+    public static final String MVN_OPERATOR = "mvnOperator";
+    public static final String SCALE_FACTOR = "scaleFactor";
+    public static final String VARIANCE_MATRIX = "varMatrix";
+    public static final String FORM_XTX = "formXtXInverse";
+
+    private double scaleFactor;
+    private final Parameter parameter;
+    private final int dim;
+
+    private double[][] cholesky;
+
+    public ModeIndependenceOperator(Parameter parameter, double scaleFactor, double[][] inMatrix, double weight,
+                                    CoercionMode mode, boolean isVarianceMatrix) {
+
+        super(mode);
+        this.scaleFactor = scaleFactor;
+        this.parameter = parameter;
+        setWeight(weight);
+        dim = parameter.getDimension();
+
+        SingularValueDecomposition svd = new SingularValueDecomposition(new DenseDoubleMatrix2D(inMatrix));
+        if (inMatrix[0].length != svd.rank()) {
+            throw new RuntimeException("Variance matrix in mvnOperator is not of full rank");
+        }
+
+        final double[][] matrix;
+        if (isVarianceMatrix) {
+            matrix = inMatrix;
+        } else {
+            matrix = formXtXInverse(inMatrix);
+        }
+
+//        System.err.println("Matrix:");
+//        System.err.println(new Matrix(matrix));
+
+        try {
+            cholesky = (new CholeskyDecomposition(matrix)).getL();
+        } catch (IllegalDimension illegalDimension) {
+            throw new RuntimeException("Unable to decompose matrix in mvnOperator");
+        }
+
+//        System.err.println("Cholesky:");
+//        System.err.println(new Matrix(cholesky));
+//        System.exit(-1);
+    }
+
+    public ModeIndependenceOperator(Parameter parameter, double scaleFactor,
+                                    MatrixParameter varMatrix, double weight, CoercionMode mode, boolean isVariance) {
+        this(parameter, scaleFactor, varMatrix.getParameterAsMatrix(), weight, mode, isVariance);
+    }
+
+    private double[][] formXtXInverse(double[][] X) {
+        int N = X.length;
+        int P = X[0].length;
+
+        double[][] matrix = new double[P][P];
+        for (int i = 0; i < P; i++) {
+            for (int j = 0; j < P; j++) {
+                double total = 0.0;
+                for (int k = 0; k < N; k++) {
+                    total += X[k][i] * X[k][j];
+                }
+                matrix[i][j] = total;
+            }
+        }
+
+//        System.err.println("XtX:");
+//        System.err.println(new Matrix(matrix));
+
+        // Take inverse
+        matrix = new SymmetricMatrix(matrix).inverse().toComponents();
+        return matrix;
+    }
+
+    public double doOperation() throws OperatorFailedException {
+
+        double[] x = parameter.getParameterValues();
+        double[] epsilon = new double[dim];
+        //double[] y = new double[dim];
+        for (int i = 0; i < dim; i++)
+            epsilon[i] = scaleFactor * MathUtils.nextGaussian();
+
+        for (int i = 0; i < dim; i++) {
+            for (int j = i; j < dim; j++) {
+                x[i] += cholesky[j][i] * epsilon[j];
+                // caution: decomposition returns lower triangular
+            }
+            parameter.setParameterValueQuietly(i, x[i]);
+//            System.out.println(i+" : "+x[i]);
+        }
+        parameter.fireParameterChangedEvent();
+//                    System.exit(-1);
+        return 0;
+    }
+
+    //MCMCOperator INTERFACE
+    public final String getOperatorName() {
+        return parameter.getParameterName();
+    }
+
+    public double getCoercableParameter() {
+        return Math.log(scaleFactor);
+    }
+
+    public void setCoercableParameter(double value) {
+        scaleFactor = Math.exp(value);
+    }
+
+    public double getRawParameter() {
+        return scaleFactor;
+    }
+
+    public double getScaleFactor() {
+        return scaleFactor;
+    }
+
+    public double getTargetAcceptanceProbability() {
+        return 0.234;
+    }
+
+    public double getMinimumAcceptanceLevel() {
+        return 0.1;
+    }
+
+    public double getMaximumAcceptanceLevel() {
+        return 0.4;
+    }
+
+    public double getMinimumGoodAcceptanceLevel() {
+        return 0.20;
+    }
+
+    public double getMaximumGoodAcceptanceLevel() {
+        return 0.30;
+    }
+
+    public final String getPerformanceSuggestion() {
+
+        double prob = Utils.getAcceptanceProbability(this);
+        double targetProb = getTargetAcceptanceProbability();
+        dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
+        double sf = OperatorUtils.optimizeWindowSize(scaleFactor, prob, targetProb);
+        if (prob < getMinimumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else if (prob > getMaximumGoodAcceptanceLevel()) {
+            return "Try setting scaleFactor to about " + formatter.format(sf);
+        } else return "";
+    }
+
+    public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
+
+        public String getParserName() {
+            return MVN_OPERATOR;
+        }
+
+        public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+            CoercionMode mode = CoercionMode.parseMode(xo);
+
+            double weight = xo.getDoubleAttribute(WEIGHT);
+            double scaleFactor = xo.getDoubleAttribute(SCALE_FACTOR);
+
+            if (scaleFactor <= 0.0) {
+                throw new XMLParseException("scaleFactor must be greater than 0.0");
+            }
+
+            Parameter parameter = (Parameter) xo.getChild(Parameter.class);
+
+            boolean formXtXInverse = xo.getAttribute(FORM_XTX, false);
+
+            XMLObject cxo = xo.getChild(VARIANCE_MATRIX);
+            MatrixParameter varMatrix = (MatrixParameter) cxo.getChild(MatrixParameter.class);
+
+            // Make sure varMatrix is square and dim(varMatrix) = dim(parameter)
+
+            if (!formXtXInverse) {
+                if (varMatrix.getColumnDimension() != varMatrix.getRowDimension())
+                    throw new XMLParseException("The variance matrix is not square");
+            }
+
+            if (varMatrix.getColumnDimension() != parameter.getDimension())
+                throw new XMLParseException("The parameter and variance matrix have differing dimensions");
+
+            return new ModeIndependenceOperator(parameter, scaleFactor, varMatrix, weight, mode, !formXtXInverse);
+        }
+
+        //************************************************************************
+        // AbstractXMLObjectParser implementation
+        //************************************************************************
+
+        public String getParserDescription() {
+            return "This element returns a multivariate normal random walk operator on a given parameter.";
+        }
+
+        public Class getReturnType() {
+            return MCMCOperator.class;
+        }
+
+        public XMLSyntaxRule[] getSyntaxRules() {
+            return rules;
+        }
+
+        private final XMLSyntaxRule[] rules = {
+                AttributeRule.newDoubleRule(SCALE_FACTOR),
+                AttributeRule.newDoubleRule(WEIGHT),
+                AttributeRule.newBooleanRule(AUTO_OPTIMIZE, true),
+                AttributeRule.newBooleanRule(FORM_XTX, true),
+                new ElementRule(Parameter.class),
+                new ElementRule(VARIANCE_MATRIX,
+                        new XMLSyntaxRule[]{new ElementRule(MatrixParameter.class)})
+
+        };
+
+    };
+}
diff --git a/src/dr/app/beauti/types/RelativeRatesType.java b/src/dr/inference/operators/PathDependentOperator.java
similarity index 71%
copy from src/dr/app/beauti/types/RelativeRatesType.java
copy to src/dr/inference/operators/PathDependentOperator.java
index 88a26ce..6acc9dd 100644
--- a/src/dr/app/beauti/types/RelativeRatesType.java
+++ b/src/dr/inference/operators/PathDependentOperator.java
@@ -1,5 +1,5 @@
 /*
- * RelativeRatesType.java
+ * PathDependentOperator.java
  *
  * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
@@ -23,23 +23,16 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.app.beauti.types;
+package dr.inference.operators;
 
 /**
- * @author Alexei Drummond
- * @author Walter Xie
+ * Created by Guy Baele on 13/01/16.
  */
-public enum RelativeRatesType {
-	MU_RELATIVE_RATES("Codon relative rates"),
-    CLOCK_RELATIVE_RATES("Clock relative rates");
+public interface PathDependentOperator {
 
-	RelativeRatesType(String name) {
-        this.name = name;
-    }
+    /**
+     * Set the path parameter for sampling from power-posterior
+     */
+    public void setPathParameter(double beta);
 
-    public String toString() {
-        return name;
-    }
-
-    private final String name;
 }
diff --git a/src/dr/inference/trace/CnCsPerSiteAnalysis.java b/src/dr/inference/trace/CnCsPerSiteAnalysis.java
index 9b3ff49..3a28c22 100644
--- a/src/dr/inference/trace/CnCsPerSiteAnalysis.java
+++ b/src/dr/inference/trace/CnCsPerSiteAnalysis.java
@@ -27,12 +27,11 @@ package dr.inference.trace;
 
 import dr.util.*;
 import dr.xml.*;
+import mpi.Comm;
 
 import java.io.File;
 import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.StringTokenizer;
+import java.util.*;
 
 /**
  * @author Philippe Lemey
@@ -161,39 +160,39 @@ public class CnCsPerSiteAnalysis implements Citable {
             sb.append(positiveProb);
         }
 
-            if (format.includeSiteClassification) {
-                sb.append(format.separator);
-                sb.append(classification);
-            }
+        if (format.includeSiteClassification) {
+            sb.append(format.separator);
+            sb.append(classification);
+        }
 
 
-            if (format.includeSignificantSymbol) {
-                sb.append(format.separator);
-                if (isSignificant) {
-                    sb.append("*");
-                } else {
-                    // Do nothing?
-                }
+        if (format.includeSignificantSymbol) {
+            sb.append(format.separator);
+            if (isSignificant) {
+                sb.append("*");
+            } else {
+                // Do nothing?
             }
+        }
 
-            if (format.includeSimulationOutcome) {
-                sb.append(format.separator);
-                sb.append(format.siteSimulation[index]);
-                sb.append(format.separator);
-                if (format.siteSimulation[index].equals("+") || format.siteSimulation[index].equals("-")) {
-                    if (classification.equals(format.siteSimulation[index])){
-                        sb.append("TP");   // True Positive
-                    } else {
-                        sb.append("FN");   // True Negative
-                    }
-                }  else {
-                    if (classification.equals(format.siteSimulation[index])){
-                        sb.append("TN");   // True Negative
-                    } else {
-                        sb.append("FP");   // False Positive
-                    }
+        if (format.includeSimulationOutcome) {
+            sb.append(format.separator);
+            sb.append(format.siteSimulation[index]);
+            sb.append(format.separator);
+            if (format.siteSimulation[index].equals("+") || format.siteSimulation[index].equals("-")) {
+                if (classification.equals(format.siteSimulation[index])){
+                    sb.append("TP");   // True Positive
+                } else {
+                    sb.append("FN");   // True Negative
+                }
+            }  else {
+                if (classification.equals(format.siteSimulation[index])){
+                    sb.append("TN");   // True Negative
+                } else {
+                    sb.append("FP");   // False Positive
                 }
             }
+        }
 
 
         sb.append("\n");
@@ -253,24 +252,24 @@ public class CnCsPerSiteAnalysis implements Citable {
         return sb.toString();
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.COUNTING_PROCESSES;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Renaissance counting";
+    }
+
+    @Override
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                new Citation(
-                        new Author[]{
-                                new Author("P", "Lemey"),
-                                new Author("VN", "Minin"),
-                                new Author("MA", "Suchard")
-                        },
-                        Citation.Status.IN_PREPARATION
-                )
-        );
-        return citations;
+        return Collections.singletonList(CommonCitations.LEMEY_2012_RENAISSANCE);
     }
 
     private class OutputFormat {
         boolean includeMeans;
-//        boolean includeHPD;
+        //        boolean includeHPD;
 //        boolean includeSignificanceLevel;
         boolean includePValues;
         boolean includeSignificantSymbol;
@@ -278,7 +277,7 @@ public class CnCsPerSiteAnalysis implements Citable {
         boolean includeSimulationOutcome;
         String[] siteSimulation;
         double cutoff;
-//        double proportion;
+        //        double proportion;
 //        SignificanceTest test;
         String separator;
 
diff --git a/src/dr/inference/trace/CnCsToDnDsPerSiteAnalysis.java b/src/dr/inference/trace/CnCsToDnDsPerSiteAnalysis.java
index 5753bb1..f74793a 100644
--- a/src/dr/inference/trace/CnCsToDnDsPerSiteAnalysis.java
+++ b/src/dr/inference/trace/CnCsToDnDsPerSiteAnalysis.java
@@ -32,9 +32,7 @@ import dr.xml.*;
 
 import java.io.File;
 import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.StringTokenizer;
+import java.util.*;
 
 /**
  * @author Philippe Lemey
@@ -371,19 +369,19 @@ public class CnCsToDnDsPerSiteAnalysis implements Citable {
         return sb.toString();
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.COUNTING_PROCESSES;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Renaissance counting";
+    }
+
+    @Override
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                new Citation(
-                        new Author[]{
-                                new Author("P", "Lemey"),
-                                new Author("VN", "Minin"),
-                                new Author("MA", "Suchard")
-                        },
-                        Citation.Status.IN_PREPARATION
-                )
-        );
-        return citations;
+        return Collections.singletonList(CommonCitations.LEMEY_2012_RENAISSANCE);
     }
 
     private class OutputFormat {
diff --git a/src/dr/inference/trace/DnDsPerSiteAnalysis.java b/src/dr/inference/trace/DnDsPerSiteAnalysis.java
index 502f8f8..efe63ee 100644
--- a/src/dr/inference/trace/DnDsPerSiteAnalysis.java
+++ b/src/dr/inference/trace/DnDsPerSiteAnalysis.java
@@ -32,10 +32,7 @@ import dr.xml.*;
 
 import java.io.File;
 import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.StringTokenizer;
+import java.util.*;
 
 /**
  * @author Philippe Lemey
@@ -333,19 +330,19 @@ public class DnDsPerSiteAnalysis implements Citable {
         return sb.toString();
     }
 
+    @Override
+    public Citation.Category getCategory() {
+        return Citation.Category.COUNTING_PROCESSES;
+    }
+
+    @Override
+    public String getDescription() {
+        return "Renaissance counting";
+    }
+
+    @Override
     public List<Citation> getCitations() {
-        List<Citation> citations = new ArrayList<Citation>();
-        citations.add(
-                new Citation(
-                        new Author[]{
-                                new Author("P", "Lemey"),
-                                new Author("VN", "Minin"),
-                                new Author("MA", "Suchard")
-                        },
-                        Citation.Status.IN_PREPARATION
-                )
-        );
-        return citations;
+        return Collections.singletonList(CommonCitations.LEMEY_2012_RENAISSANCE);
     }
 
     private class OutputFormat {
diff --git a/src/dr/inferencexml/distribution/GammaDistributionModelParser.java b/src/dr/inferencexml/distribution/GammaDistributionModelParser.java
index e44e431..4502ca7 100644
--- a/src/dr/inferencexml/distribution/GammaDistributionModelParser.java
+++ b/src/dr/inferencexml/distribution/GammaDistributionModelParser.java
@@ -27,7 +27,6 @@ package dr.inferencexml.distribution;
 
 import dr.inference.distribution.GammaDistributionModel;
 import dr.inference.model.Parameter;
-import dr.inference.model.Statistic;
 import dr.xml.*;
 
 public class GammaDistributionModelParser extends AbstractXMLObjectParser {
@@ -42,23 +41,48 @@ public class GammaDistributionModelParser extends AbstractXMLObjectParser {
         return GammaDistributionModel.GAMMA_DISTRIBUTION_MODEL;
     }
 
+    private Parameter getParameterOrValue(String name, XMLObject xo) throws XMLParseException {
+        Parameter parameter;
+        if (xo.hasChildNamed(name)) {
+            XMLObject cxo = xo.getChild(name);
+
+            parameter = (Parameter)cxo.getChild(Parameter.class);
+            if (parameter == null) {
+                if (cxo.getChildCount() < 1) {
+                    throw new XMLParseException("Distribution parameter, " + name + ", is missing a value or parameter element");
+                }
+                try {
+                    double value = cxo.getDoubleChild(0);
+                    parameter = new Parameter.Default(value);
+                } catch (XMLParseException xpe) {
+                    throw new XMLParseException("Distribution parameter, " + name + ", has bad value: " + xpe.getMessage());
+                }
+            }
+
+            return parameter;
+        } else {
+            return null;
+        }
+
+    }
+
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
         double offset = xo.getAttribute(OFFSET, 0.0);
 
-        Parameter shapeParameter = (Parameter) xo.getElementFirstChild(SHAPE);
+        Parameter shapeParameter = getParameterOrValue(SHAPE, xo);
 
         Parameter parameter2;
         GammaDistributionModel.GammaParameterizationType parameterization;
 
         if (xo.hasChildNamed(SCALE)) {
-            parameter2 = (Parameter)xo.getElementFirstChild(SCALE);
+            parameter2 = getParameterOrValue(SCALE, xo);
             parameterization = GammaDistributionModel.GammaParameterizationType.ShapeScale;
         } else if (xo.hasChildNamed(RATE)) {
-            parameter2 = (Parameter)xo.getElementFirstChild(RATE);
+            parameter2 = getParameterOrValue(RATE, xo);
             parameterization = GammaDistributionModel.GammaParameterizationType.ShapeRate;
         } else if (xo.hasChildNamed(MEAN)) {
-            parameter2 = (Parameter)xo.getElementFirstChild(MEAN);
+            parameter2 = getParameterOrValue(MEAN, xo);
             parameterization = GammaDistributionModel.GammaParameterizationType.ShapeMean;
         } else {
             parameter2 = null;
@@ -77,12 +101,10 @@ public class GammaDistributionModelParser extends AbstractXMLObjectParser {
     }
 
     private final XMLSyntaxRule[] rules = {
-            new ElementRule(SHAPE,
-                    new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, "Shape parameter"),
-            new XORRule( new ElementRule[] {
-                    new ElementRule(SCALE,  new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, "Scale parameter"),
-                    new ElementRule(RATE,  new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, "Rate parameter"),
-                    new ElementRule(MEAN,  new XMLSyntaxRule[]{new ElementRule(Parameter.class)}, "Mean parameter") }, true),
+            new ElementRule(SHAPE,  new XMLSyntaxRule[]{new ElementRule(Parameter.class, true)}, "Shape parameter"),
+            new ElementRule(SCALE,  new XMLSyntaxRule[]{new ElementRule(Parameter.class, true)}, "Scale parameter", true),
+            new ElementRule(RATE,  new XMLSyntaxRule[]{new ElementRule(Parameter.class, true)}, "Rate parameter", true),
+            new ElementRule(MEAN,  new XMLSyntaxRule[]{new ElementRule(Parameter.class, true)}, "Mean parameter", true),
             AttributeRule.newDoubleRule(OFFSET, true)
     };
 
diff --git a/src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java b/src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
index 054c9c8..9277d64 100644
--- a/src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
+++ b/src/dr/inferencexml/distribution/GeneralizedLinearModelParser.java
@@ -1,7 +1,7 @@
 /*
  * GeneralizedLinearModelParser.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -34,7 +34,6 @@ import dr.inference.distribution.LogisticRegression;
 import dr.inference.model.DesignMatrix;
 import dr.inference.model.Likelihood;
 import dr.inference.model.Parameter;
-import dr.math.matrixAlgebra.Matrix;
 import dr.xml.*;
 
 /**
@@ -66,7 +65,7 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
 
-        System.err.println("PASSED 0");
+//        System.err.println("PASSED 0");
         XMLObject cxo = xo.getChild(DEPENDENT_VARIABLES);
         Parameter dependentParam = null;
         if (cxo != null)
@@ -114,11 +113,11 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
 
             glm.addScaleParameter(scaleParameter, scaleDesign);
         }
-        System.err.println("START 0");
+//        System.err.println("START 0");
         addIndependentParameters(xo, glm, dependentParam);
-        System.err.println("START 1");
+//        System.err.println("START 1");
         addRandomEffects(xo, glm, dependentParam);
-        System.err.println("START 2");
+//        System.err.println("START 2");
 
         boolean checkIdentifiability = xo.getAttribute(CHECK_IDENTIFIABILITY, true);
         if (checkIdentifiability) {
@@ -126,9 +125,9 @@ public class GeneralizedLinearModelParser extends AbstractXMLObjectParser {
                 throw new XMLParseException("All design matrix predictors are not identifiable in "+  xo.getId());
             }
         }
-        System.err.println("PASSED B");
+//        System.err.println("PASSED B");
         checkFullRankOfMatrix = xo.getAttribute(CHECK_FULL_RANK,true);
-        System.err.println("PASSED C");
+//        System.err.println("PASSED C");
         return glm;
     }
 
diff --git a/src/dr/inferencexml/distribution/MomentDistributionModelParser.java b/src/dr/inferencexml/distribution/MomentDistributionModelParser.java
index 9de30fd..ac19f05 100644
--- a/src/dr/inferencexml/distribution/MomentDistributionModelParser.java
+++ b/src/dr/inferencexml/distribution/MomentDistributionModelParser.java
@@ -44,7 +44,9 @@ public class MomentDistributionModelParser extends AbstractXMLObjectParser {
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
         Parameter mean=(Parameter) xo.getChild(MEAN).getChild(0);
         Parameter prec=(Parameter) xo.getChild(PREC).getChild(0);
-        Parameter cutoff=(Parameter) xo.getChild(CUTOFF).getChild(0);
+        Parameter cutoff=null;
+        if(xo.getChild(CUTOFF) != null){
+        cutoff=(Parameter) xo.getChild(CUTOFF).getChild(0);}
         Parameter data=(Parameter) xo.getChild(DATA).getChild(0);
 
         return new MomentDistributionModel(mean, prec, cutoff, data);
@@ -63,8 +65,8 @@ public class MomentDistributionModelParser extends AbstractXMLObjectParser {
             ),
                     new ElementRule(CUTOFF,
                             new XMLSyntaxRule[]{
-                                            new ElementRule(Parameter.class)
-                                    }
+                                            new ElementRule(Parameter.class, true)
+                                    },true
                     ),
                     new ElementRule(PREC,
                             new XMLSyntaxRule[]{
diff --git a/src/dr/inferencexml/model/ComplementParameterParser.java b/src/dr/inferencexml/model/ComplementParameterParser.java
new file mode 100644
index 0000000..9c96d2c
--- /dev/null
+++ b/src/dr/inferencexml/model/ComplementParameterParser.java
@@ -0,0 +1,67 @@
+/*
+ * ComplementParameterParser.java
+ *
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inferencexml.model;
+
+import dr.inference.model.ComplementParameter;
+import dr.inference.model.Parameter;
+import dr.inference.model.SumParameter;
+import dr.xml.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ */
+public class ComplementParameterParser extends AbstractXMLObjectParser {
+
+    public static final String COMPLEMENT_PARAMETER = "complementParameter";
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        Parameter parameter = (Parameter) xo.getChild(Parameter.class);
+        return new ComplementParameter(parameter);
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private final XMLSyntaxRule[] rules = {
+            new ElementRule(Parameter.class),
+    };
+
+    public String getParserDescription() {
+        return "A element-wise complement of parameters.";
+    }
+
+    public Class getReturnType() {
+        return Parameter.class;
+    }
+
+    public String getParserName() {
+        return COMPLEMENT_PARAMETER;
+    }
+}
diff --git a/src/dr/inferencexml/model/CompoundLikelihoodParser.java b/src/dr/inferencexml/model/CompoundLikelihoodParser.java
index 04cf9c1..6360f13 100644
--- a/src/dr/inferencexml/model/CompoundLikelihoodParser.java
+++ b/src/dr/inferencexml/model/CompoundLikelihoodParser.java
@@ -74,7 +74,10 @@ public class CompoundLikelihoodParser extends AbstractXMLObjectParser {
         for (int i = 0; i < xo.getChildCount(); i++) {
             final Object child = xo.getChild(i);
             if (child instanceof Likelihood) {
-            	
+
+                if (likelihoods.contains(child)) {
+                    throw new XMLParseException("The likelihood element, '" + ((Likelihood) child).getId() + "', is already present in the likelihood or prior density.");
+                }
                 likelihoods.add((Likelihood) child);
                 
 //            } else if (child instanceof BeagleBranchLikelihoods){
@@ -108,6 +111,7 @@ public class CompoundLikelihoodParser extends AbstractXMLObjectParser {
             compoundLikelihood = new CompoundLikelihood(likelihoods);
         }
 
+
 //		TODO
 //        System.err.println("CompundLikelihood consists of " + compoundLikelihood.getLikelihoodCount() + " likelihood element(s)");
         
diff --git a/src/dr/inferencexml/model/ElementWiseMatrixMultiplicationParser.java b/src/dr/inferencexml/model/ElementWiseMatrixMultiplicationParser.java
new file mode 100644
index 0000000..0e25cc3
--- /dev/null
+++ b/src/dr/inferencexml/model/ElementWiseMatrixMultiplicationParser.java
@@ -0,0 +1,48 @@
+package dr.inferencexml.model;
+
+
+import dr.inference.model.ElementWiseMatrixMultiplicationParameter;
+import dr.inference.model.MatrixParameter;
+import dr.xml.AbstractXMLObjectParser;
+import dr.xml.XMLObject;
+import dr.xml.XMLParseException;
+import dr.xml.XMLSyntaxRule;
+
+/**
+ * Created by max on 11/30/15.
+ */
+public class ElementWiseMatrixMultiplicationParser extends AbstractXMLObjectParser {
+    public final static String ELEMENT_WISE_MATRIX_MULTIPLICATION_PARAMETER="ElementWiseMatrixMultiplicationParameter";
+    public final static String NAME="name";
+
+    @Override
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+        final String name = xo.hasId() ? xo.getId() : null;
+        MatrixParameter[] matList=new MatrixParameter[xo.getChildCount()];
+        for (int i = 0; i <xo.getChildCount(); i++) {
+            matList[i]=(MatrixParameter) xo.getChild(i);
+        }
+
+        return new ElementWiseMatrixMultiplicationParameter(name, matList);
+    }
+
+    @Override
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return new XMLSyntaxRule[0];
+    }
+
+    @Override
+    public String getParserDescription() {
+        return "Returns element wise matrix multiplication of a series of matrices";
+    }
+
+    @Override
+    public Class getReturnType() {
+        return ElementWiseMatrixMultiplicationParameter.class;
+    }
+
+    @Override
+    public String getParserName() {
+        return ELEMENT_WISE_MATRIX_MULTIPLICATION_PARAMETER;
+    }
+}
diff --git a/src/dr/inferencexml/model/FastBUTMPParser.java b/src/dr/inferencexml/model/FastBUTMPParser.java
new file mode 100644
index 0000000..1aecc01
--- /dev/null
+++ b/src/dr/inferencexml/model/FastBUTMPParser.java
@@ -0,0 +1,108 @@
+/*
+ * BlockUpperTriangularMatrixParameterParser.java
+ *
+ * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inferencexml.model;
+
+import dr.inference.model.*;
+import dr.xml.*;
+
+/**
+ @author Max Tolkoff
+ */
+
+// Block Upper Triangular Matrix Parameter
+public class FastBUTMPParser extends AbstractXMLObjectParser {
+    private static final String FAST_BUTMP ="FastBUTMP";
+    private static final String ROWS="rows";
+    private static final String COLUMNS="columns";
+    private static final String TRANSPOSE="transpose";
+    private static final String DIAGONAL_RESTRICTION="diagonalRestriction";
+
+    @Override
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        final String name = xo.hasId() ? xo.getId() : null;
+        final boolean transpose= xo.getAttribute(TRANSPOSE, false);
+//        int rowDim=xo.getChildCount();
+//        int colDim;
+        final boolean diagonalRestriction=xo.getAttribute(DIAGONAL_RESTRICTION, false);
+        Parameter temp=null;
+//        if(xo.hasAttribute(COLUMN_DIMENSION)) {
+//            colDim = xo.getAttribute(COLUMN_DIMENSION, 1);
+//        }
+//        else
+//        {
+//            temp=(Parameter) xo.getChild(xo.getChildCount()-1);
+//            colDim=temp.getDimension();
+//        }
+        int rows=xo.getAttribute(ROWS,1);
+        int cols=xo.getAttribute(COLUMNS,1)      ;
+
+//        Parameter[] params=new Parameter[xo.getChildCount()];
+
+
+
+//        for (int i = 0; i < xo.getChildCount(); i++) {
+//            temp = (Parameter) xo.getChild(i);
+//            params[i]=temp;}
+
+//        BlockUpperTriangularMatrixParameter ltmp=new BlockUpperTriangularMatrixParameter(name, params, diagonalRestriction);
+        if(transpose){
+            return new FastTransposedBUTMP(name, rows, cols);
+        }
+        else {
+            return new FastBUTMP(name, rows, cols);  //To change body of implemented methods use File | Settings | File Templates.
+        }
+    }
+
+    @Override
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private final XMLSyntaxRule[] rules = {
+//            new ElementRule(Parameter.class, 0, Integer.MAX_VALUE),
+            AttributeRule.newBooleanRule(TRANSPOSE, true),
+            AttributeRule.newIntegerRule(COLUMNS, true),
+            AttributeRule.newIntegerRule(ROWS, true),
+
+    };
+
+
+    @Override
+    public String getParserDescription() {
+        return "Returns a blockUpperTriangularMatrixParameter which is a compoundParameter which forces the last element to be of full length, the second to last element to be of full length-1, etc.";  //To change body of implemented methods use File | Settings | File Templates.
+    }
+
+    @Override
+    public Class getReturnType() {
+        return BlockUpperTriangularMatrixParameter.class;  //To change body of implemented methods use File | Settings | File Templates.
+    }
+
+    @Override
+    public String getParserName() {
+        return FAST_BUTMP;  //To change body of implemented methods use File | Settings | File Templates.
+    }
+}
diff --git a/src/dr/inferencexml/model/ImmutableParameterParser.java b/src/dr/inferencexml/model/ImmutableParameterParser.java
index be8026f..099cd2a 100644
--- a/src/dr/inferencexml/model/ImmutableParameterParser.java
+++ b/src/dr/inferencexml/model/ImmutableParameterParser.java
@@ -31,6 +31,11 @@ public class ImmutableParameterParser extends AbstractXMLObjectParser {
             public void acceptValues() {
                 //do nothing
             }
+
+            public int getDimension() {
+                return statistic.getDimension();
+            }
+
             public void setParameterValue(int dim, double value) {
                 throw new RuntimeException("Forbidden call to ImmutableParameter.");
             }
diff --git a/src/dr/inferencexml/model/IndianBuffetProcessPriorParser.java b/src/dr/inferencexml/model/IndianBuffetProcessPriorParser.java
index 46e4261..5aad1ad 100644
--- a/src/dr/inferencexml/model/IndianBuffetProcessPriorParser.java
+++ b/src/dr/inferencexml/model/IndianBuffetProcessPriorParser.java
@@ -34,7 +34,7 @@ import dr.xml.*;
  * @author Max Tolkoff
  */
 public class IndianBuffetProcessPriorParser extends AbstractXMLObjectParser {
-    public static final String INDIAN_BUFFET_PROCESS="IndianBuffetProcess";
+    public static final String INDIAN_BUFFET_PROCESS="indianBuffetProcess";
     public static final String BETA="beta";
     public static final String ALPHA="alpha";
     public static final String DATA="data";
diff --git a/src/dr/inferencexml/model/LatentFactorModelParser.java b/src/dr/inferencexml/model/LatentFactorModelParser.java
index cf1a817..b78be8c 100644
--- a/src/dr/inferencexml/model/LatentFactorModelParser.java
+++ b/src/dr/inferencexml/model/LatentFactorModelParser.java
@@ -55,10 +55,17 @@ public class LatentFactorModelParser extends AbstractXMLObjectParser {
 
     public Object parseXMLObject(XMLObject xo) throws XMLParseException {
 
-        MatrixParameter factors = MatrixParameter.recast("name",
-                (CompoundParameter) xo.getChild(FACTORS).getChild(CompoundParameter.class));
-        MatrixParameter dataParameter = (MatrixParameter) xo.getChild(DATA).getChild(MatrixParameter.class);
-        MatrixParameter loadings = (MatrixParameter) xo.getChild(LOADINGS).getChild(MatrixParameter.class);
+        MatrixParameterInterface factors;
+        if (xo.getChild(FACTORS).getChild(FastMatrixParameter.class) == null)
+        {
+            CompoundParameter factorsTemp = (CompoundParameter) xo.getChild(FACTORS).getChild(CompoundParameter.class);
+            factors = MatrixParameter.recast(factorsTemp.getParameterName(), factorsTemp);
+        }
+        else {
+            factors = (MatrixParameterInterface) xo.getChild(FACTORS).getChild(MatrixParameterInterface.class);
+        }
+        MatrixParameterInterface dataParameter = (MatrixParameterInterface) xo.getChild(DATA).getChild(MatrixParameterInterface.class);
+        MatrixParameterInterface loadings = (MatrixParameterInterface) xo.getChild(LOADINGS).getChild(MatrixParameterInterface.class);
         DiagonalMatrix rowPrecision = (DiagonalMatrix) xo.getChild(ROW_PRECISION).getChild(MatrixParameter.class);
         DiagonalMatrix colPrecision = (DiagonalMatrix) xo.getChild(COLUMN_PRECISION).getChild(MatrixParameter.class);
         boolean newModel= xo.getAttribute(COMPUTE_RESIDUALS_FOR_DISCRETE, true);
@@ -82,7 +89,7 @@ public class LatentFactorModelParser extends AbstractXMLObjectParser {
 //        }
 
 
-        return new LatentFactorModel(dataParameter, factors, loadings, rowPrecision, colPrecision, scaleData, continuous, newModel,computeResiduals,computeFactors, computeLoadings);
+        return new LatentFactorModel(dataParameter, factors, loadings, rowPrecision, colPrecision, scaleData, continuous, newModel, computeResiduals, computeFactors, computeLoadings);
     }
 
     private static final XMLSyntaxRule[] rules = {
@@ -93,13 +100,13 @@ public class LatentFactorModelParser extends AbstractXMLObjectParser {
             AttributeRule.newBooleanRule(RECOMPUTE_RESIDUALS, true),
             AttributeRule.newBooleanRule(RECOMPUTE_LOADINGS,true),
             new ElementRule(DATA, new XMLSyntaxRule[]{
-                    new ElementRule(MatrixParameter.class),
+                    new ElementRule(MatrixParameterInterface.class),
             }),
             new ElementRule(FACTORS, new XMLSyntaxRule[]{
-                    new ElementRule(CompoundParameter.class),
+                    new ElementRule(CompoundParameter.class)
             }),
             new ElementRule(LOADINGS, new XMLSyntaxRule[]{
-                    new ElementRule(MatrixParameter.class)
+                    new ElementRule(MatrixParameterInterface.class)
             }),
             new ElementRule(ROW_PRECISION, new XMLSyntaxRule[]{
                     new ElementRule(DiagonalMatrix.class)
diff --git a/src/dr/inferencexml/operators/LoadingsGibbsOperatorParser.java b/src/dr/inferencexml/operators/LoadingsGibbsOperatorParser.java
index e41ccb0..cfb3b87 100644
--- a/src/dr/inferencexml/operators/LoadingsGibbsOperatorParser.java
+++ b/src/dr/inferencexml/operators/LoadingsGibbsOperatorParser.java
@@ -26,8 +26,11 @@
 package dr.inferencexml.operators;
 
 import dr.inference.distribution.DistributionLikelihood;
+import dr.inference.distribution.MomentDistributionModel;
 import dr.inference.model.LatentFactorModel;
+import dr.inference.model.MatrixParameterInterface;
 import dr.inference.operators.LoadingsGibbsOperator;
+import dr.inference.operators.LoadingsGibbsTruncatedOperator;
 import dr.xml.*;
 
 /**
@@ -49,9 +52,17 @@ public class LoadingsGibbsOperatorParser extends AbstractXMLObjectParser {
         double weight = Double.parseDouble(weightTemp);
         LatentFactorModel LFM = (LatentFactorModel) xo.getChild(LatentFactorModel.class);
         DistributionLikelihood prior = (DistributionLikelihood) xo.getChild(DistributionLikelihood.class);
+        MomentDistributionModel prior2 = (MomentDistributionModel) xo.getChild(MomentDistributionModel.class);
         boolean randomScan = xo.getAttribute(RANDOM_SCAN, true);
+        MatrixParameterInterface loadings=null;
+        if(xo.getChild(MatrixParameterInterface.class)!=null){
+            loadings=(MatrixParameterInterface) xo.getChild(MatrixParameterInterface.class);
+        }
 
+        if(prior!=null)
         return new LoadingsGibbsOperator(LFM, prior, weight, randomScan);  //To change body of implemented methods use File | Settings | File Templates.
+        else
+            return new LoadingsGibbsTruncatedOperator(LFM, prior2, weight, randomScan, loadings);
     }
 
     @Override
@@ -61,7 +72,11 @@ public class LoadingsGibbsOperatorParser extends AbstractXMLObjectParser {
 
     private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
             new ElementRule(LatentFactorModel.class),
+
+            new XORRule(
             new ElementRule(DistributionLikelihood.class),
+            new ElementRule(MomentDistributionModel.class)
+            ),
 //            new ElementRule(CompoundParameter.class),
             AttributeRule.newDoubleRule(WEIGHT),
     };
diff --git a/src/dr/inferencexml/operators/MaskMoveOperatorParser.java b/src/dr/inferencexml/operators/MaskMoveOperatorParser.java
new file mode 100644
index 0000000..3476c89
--- /dev/null
+++ b/src/dr/inferencexml/operators/MaskMoveOperatorParser.java
@@ -0,0 +1,129 @@
+/*
+ * MaskFlipOperatorParser.java
+ *
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ *
+ * This file is part of BEAST.
+ * See the NOTICE file distributed with this work for additional
+ * information regarding copyright ownership and licensing.
+ *
+ * BEAST is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ *  BEAST is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with BEAST; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA  02110-1301  USA
+ */
+
+package dr.inferencexml.operators;
+
+import dr.inference.model.Parameter;
+import dr.inference.operators.MCMCOperator;
+import dr.inference.operators.MaskMoveOperator;
+import dr.xml.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ *
+ */
+public class MaskMoveOperatorParser extends AbstractXMLObjectParser {
+
+    public static final String MASK_FLIP_OPERATOR = "maskMoveOperator";
+    public static final String CUT_POINT = "cutPoint";
+    public static final String CUT_MASK = "cutMask";
+    public static final String SELECT_BEFORE = "selectBefore";
+    public static final String SELECT_AFTER = "selectAfter";
+//    public static final String BEFORE_VALUE = "before";
+//    public static final String AFTER_VALUE = "after";
+
+    public String getParserName() {
+        return MASK_FLIP_OPERATOR;
+    }
+
+    public Object parseXMLObject(XMLObject xo) throws XMLParseException {
+
+        double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
+
+//        Parameter parameter = (Parameter) xo.getChild(Parameter.class);
+
+        List<Parameter> masks = new ArrayList<Parameter>();
+        for (int i = 0; i < xo.getChildCount(); ++i) {
+            if (xo.getChild(i) instanceof Parameter) {
+                masks.add((Parameter) xo.getChild(i));
+            }
+        }
+
+        Parameter cutPoint = (Parameter) xo.getElementFirstChild(CUT_POINT);
+//        Parameter before = (Parameter) xo.getElementFirstChild(BEFORE_VALUE);
+//        Parameter after = (Parameter) xo.getElementFirstChild(AFTER_VALUE);
+
+        double[] before = xo.getChild(CUT_MASK).getDoubleArrayAttribute(SELECT_BEFORE);
+        double[] after = xo.getChild(CUT_MASK).getDoubleArrayAttribute(SELECT_AFTER);
+
+        int[] beforeList = new int[before.length];
+        for (int i = 0; i < before.length; ++i) {
+            beforeList[i] = ((int) (before[i] -1.0 + 0.5)); // Switch to 0-index
+        }
+
+        int[] afterList = new int[after.length];
+        for (int i = 0; i < before.length; ++i) {
+            afterList[i] = ((int) (after[i] -1.0 + 0.5));
+        }
+
+        if (beforeList.length != afterList.length) {
+            throw new XMLParseException("selectBefore length != selectAfter length");
+        }
+
+
+        if (!MaskMoveOperator.checkMaskValues(masks, cutPoint, beforeList, afterList)) {
+            String name = xo.hasId() ? xo.getId() : "null";
+            throw new XMLParseException("Bad initialization parameter values in " + name);
+        }
+
+        return new MaskMoveOperator(masks, cutPoint, beforeList, afterList, weight);
+    }
+
+    //************************************************************************
+    // AbstractXMLObjectParser implementation
+    //************************************************************************
+
+    public String getParserDescription() {
+        return "This element returns a mask-flip operator on a set of given parameters.";
+    }
+
+    public Class getReturnType() {
+        return MaskMoveOperator.class;
+    }
+
+    public XMLSyntaxRule[] getSyntaxRules() {
+        return rules;
+    }
+
+    private final XMLSyntaxRule[] rules = {
+            AttributeRule.newDoubleRule(MCMCOperator.WEIGHT),
+            new ElementRule(Parameter.class, 1, Integer.MAX_VALUE),
+            new ElementRule(CUT_POINT, new XMLSyntaxRule[] {
+                   new ElementRule(Parameter.class),
+            }),
+            new ElementRule(CUT_MASK, new XMLSyntaxRule[] {
+                    AttributeRule.newDoubleArrayRule(SELECT_BEFORE),
+                    AttributeRule.newDoubleArrayRule(SELECT_AFTER),
+            })
+//            new ElementRule(BEFORE_VALUE, new XMLSyntaxRule[] {
+//                   new ElementRule(Parameter.class),
+//            }),
+//            new ElementRule(AFTER_VALUE, new XMLSyntaxRule[] {
+//                   new ElementRule(Parameter.class),
+//            }),
+    };
+}
diff --git a/src/dr/math/distributions/CompoundGaussianProcess.java b/src/dr/math/distributions/CompoundGaussianProcess.java
index ec94247..f9717ed 100644
--- a/src/dr/math/distributions/CompoundGaussianProcess.java
+++ b/src/dr/math/distributions/CompoundGaussianProcess.java
@@ -28,38 +28,190 @@ package dr.math.distributions;
 import dr.inference.distribution.DistributionLikelihood;
 import dr.inference.model.CompoundLikelihood;
 import dr.inference.model.Likelihood;
+import dr.xml.Reportable;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.concurrent.*;
 
 /**
  * @author Marc A. Suchard
  */
 
-public class CompoundGaussianProcess implements GaussianProcessRandomGenerator {
+public class CompoundGaussianProcess implements GaussianProcessRandomGenerator, Reportable {
 
     private final List<GaussianProcessRandomGenerator> gpList;
     private final List<Integer> copyList;
     private final List<Likelihood> likelihoodList;
     private final CompoundLikelihood compoundLikelihood;
 
+    private final ExecutorService pool;
+    private final int threadCount;
+    private final List<Callable<DrawResult>> callers;
+
+    private static final boolean USE_POOL = false;
+
+    private int dimension = -1;
+
     public CompoundGaussianProcess(List<GaussianProcessRandomGenerator> gpList, List<Likelihood> likelihoodList,
                                    List<Integer> copyList) {
         this.gpList = gpList;
         this.copyList = copyList;
         this.likelihoodList = likelihoodList;
         compoundLikelihood = new CompoundLikelihood(likelihoodList);
+
+        if (USE_POOL) {
+            callers = createTasks();
+            threadCount = callers.size();
+            pool = Executors.newFixedThreadPool(threadCount);
+        } else {
+            callers = null;
+            threadCount = -1;
+            pool = null;
+        }
     }
 
     public boolean contains(Likelihood likelihood) {
         return likelihoodList.contains(likelihood);
     }
 
+    public int getDimension() {
+        if (dimension == -1) {
+            dimension = 0;
+            for (GaussianProcessRandomGenerator gp : gpList) {
+                dimension += gp.getDimension();
+            }
+        }
+        return dimension;
+    }
+
+    @Override
+    public double[][] getPrecisionMatrix() {
+        if (gpList.size() == 1) {
+            return gpList.get(0).getPrecisionMatrix();
+        } else {
+            final int dim = getDimension();
+            double[][] precision = new double[dim][dim];
+
+            int offset = 0;
+            for (GaussianProcessRandomGenerator gp : gpList) {
+                final int d = gp.getDimension();
+                double[][] p = gp.getPrecisionMatrix();
+
+                for (int i = 0; i < d; ++i) {
+                    System.arraycopy(p[i], 0, precision[offset + i], offset, d);
+                }
+
+                offset += d;
+            }
+
+            return precision;
+        }
+    }
+
     @Override
     public Likelihood getLikelihood() { return compoundLikelihood; }
 
     @Override
+    public String getReport() {
+        StringBuilder sb = new StringBuilder();
+        sb.append("compoundGP: " + getLikelihood().getLogLikelihood());
+        return sb.toString();
+    }
+
+    private class DrawResult {
+        final double[] result;
+        final int offset;
+
+        DrawResult(double[] result, int offset) {
+            this.result = result;
+            this.offset = offset;
+        }
+    }
+
+    private class DrawCaller implements Callable<DrawResult> {
+
+        public DrawCaller(GaussianProcessRandomGenerator gp, int copies, int offset, boolean isUnivariate) {
+            this.gp = gp;
+            this.copies = copies;
+            this.offset = offset;
+            this.isUnivariate = isUnivariate;
+        }
+
+        public DrawResult call() throws Exception {
+
+            final double[] vector;
+            if (isUnivariate) {
+                vector = new double[copies];
+                for (int i = 0; i < copies; ++i) {
+                    vector[i] = (Double) gp.nextRandom();
+                }
+            } else {
+                vector = (double[]) gp.nextRandom();
+            }
+
+            return new DrawResult(vector, offset);
+        }
+
+        private final GaussianProcessRandomGenerator gp;
+        private final int copies;
+        private final int offset;
+        private final boolean isUnivariate;
+    }
+
+    private List<Callable<DrawResult>> createTasks() {
+
+        List<Callable<DrawResult>> callers = new ArrayList<Callable<DrawResult>>();
+
+        int offset = 0;
+        int index = 0;
+        for (GaussianProcessRandomGenerator gp : gpList) {
+            final int copies = copyList.get(index);
+            if (likelihoodList.get(index) instanceof DistributionLikelihood) { // Univariate
+                callers.add(new DrawCaller(gp, copies, offset, true));
+                offset += copies;
+            } else {
+                for (int i = 0; i < copies; ++i) {
+                    callers.add(new DrawCaller(gp, 1, offset, false));
+                    offset += gp.getDimension();
+                }
+            }
+        }
+
+        return callers;
+    }
+
+    @Override
     public Object nextRandom() {
+        if (USE_POOL) {
+            return nextRandomParallel();
+        } else {
+            return nextRandomSerial();
+        }
+    }
+
+    private Object nextRandomParallel() {
+
+        double[] vector = new double[getDimension()];
+
+        try {
+            List<Future<DrawResult>> results = pool.invokeAll(callers);
+
+            for (Future<DrawResult> result : results) {
+                DrawResult dr = result.get();
+                System.arraycopy(dr.result, 0, vector, dr.offset, dr.result.length);
+            }
+
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        } catch (ExecutionException e) {
+            e.printStackTrace();
+        }
+
+        return vector;
+    }
+
+    private Object nextRandomSerial() {
 
         int size = 0;
         List<double[]> randomList = new ArrayList<double[]>();
diff --git a/src/dr/math/distributions/DirichletDistribution.java b/src/dr/math/distributions/DirichletDistribution.java
index 9477e66..c7cfd4e 100644
--- a/src/dr/math/distributions/DirichletDistribution.java
+++ b/src/dr/math/distributions/DirichletDistribution.java
@@ -36,6 +36,10 @@ public class DirichletDistribution implements MultivariateDistribution {
     public static final String TYPE = "dirichletDistribution";
     public static final boolean DEBUG = false;
 
+    //4.0 != 3.9999999999999996
+    //Other BEAST classes uses 1E-8 or 1E-6
+    public static final double ACCURACY_THRESHOLD = 1E-12;
+
     private double[] counts;
     private double countSum = 0.0;
     private double countParameterSum;
@@ -82,22 +86,23 @@ public class DirichletDistribution implements MultivariateDistribution {
             parameterSum += x[i];
             if ((!sumToNumberOfElements && x[i] >= 1.0) || x[i] <= 0.0) {
                 if (DEBUG) {
-                    System.err.println("Invalid parameter value");
+                    System.out.println("Invalid parameter value");
                 }
                 logPDF = Double.NEGATIVE_INFINITY;
                 break;
             }
         }
-        if (parameterSum != countParameterSum) {
+        if (Math.abs(parameterSum - countParameterSum) > ACCURACY_THRESHOLD) {
             if (DEBUG) {
-                System.err.println("Parameters do not sum to " + countParameterSum);
+                System.out.println("Parameters do not sum to " + countParameterSum);
                 for (int i = 0; i < dim; i++) {
-                    System.err.println("x[" + i + "] = " + x[i]);
+                    System.out.println("x[" + i + "] = " + x[i]);
                 }
-                System.err.println("Current sum = " + parameterSum);
+                System.out.println("Current sum = " + parameterSum);
             }
             logPDF = Double.NEGATIVE_INFINITY;
         }
+
         return logPDF;
     }
 
@@ -146,6 +151,24 @@ public class DirichletDistribution implements MultivariateDistribution {
         parameterValues[2] = 1.0;
         System.out.println(dd.logPdf(parameterValues));
 
+        counts = new double[4];
+        counts[0] = 1.0;
+        counts[1] = 1.0;
+        counts[2] = 1.0;
+        counts[3] = 1.0;
+        dd = new DirichletDistribution(counts, true);
+        parameterValues = new double[4];
+        parameterValues[0] = 0.5;
+        parameterValues[1] = 1.2;
+        parameterValues[2] = 1.3;
+        parameterValues[3] = 1.0;
+        System.out.println(dd.logPdf(parameterValues));
+        parameterValues[0] = 1.0;
+        parameterValues[1] = 1.0;
+        parameterValues[2] = 1.0;
+        parameterValues[3] = 1.0;
+        System.out.println(dd.logPdf(parameterValues));
+
     }
 
 }
diff --git a/src/dr/math/distributions/GaussianProcessRandomGenerator.java b/src/dr/math/distributions/GaussianProcessRandomGenerator.java
index 187f1b7..918c8bd 100644
--- a/src/dr/math/distributions/GaussianProcessRandomGenerator.java
+++ b/src/dr/math/distributions/GaussianProcessRandomGenerator.java
@@ -35,4 +35,8 @@ import dr.inference.model.Likelihood;
 public interface GaussianProcessRandomGenerator extends RandomGenerator {
     // Only implemented by Gaussian processes
     Likelihood getLikelihood();
+
+    int getDimension();
+
+    double[][] getPrecisionMatrix();
 }
\ No newline at end of file
diff --git a/src/dr/math/distributions/MultivariateNormalDistribution.java b/src/dr/math/distributions/MultivariateNormalDistribution.java
index 9a06851..17d3020 100644
--- a/src/dr/math/distributions/MultivariateNormalDistribution.java
+++ b/src/dr/math/distributions/MultivariateNormalDistribution.java
@@ -344,7 +344,16 @@ public class MultivariateNormalDistribution implements MultivariateDistribution,
         return logPdf(v);
     }
 
+    @Override
     public Likelihood getLikelihood() {
         return null;
     }
+
+    @Override
+    public int getDimension() { return mean.length; }
+
+    @Override
+    public double[][] getPrecisionMatrix() {
+        return precision;
+    }
 }
diff --git a/src/dr/math/distributions/NormalDistribution.java b/src/dr/math/distributions/NormalDistribution.java
index e545974..7b12ef4 100644
--- a/src/dr/math/distributions/NormalDistribution.java
+++ b/src/dr/math/distributions/NormalDistribution.java
@@ -486,6 +486,7 @@ public class NormalDistribution implements Distribution, RandomGenerator {
         testTail(8.25, 0.0, 1.0);
         System.out.println();
         testTail(10, 0.0, 1.0);
+        System.out.println(NormalDistribution.standardCDF(2.0 / 0.5, true));
     }
 
     // RandomGenerator interface
diff --git a/src/dr/util/Citable.java b/src/dr/util/Citable.java
index 2b1c27c..c1e703a 100644
--- a/src/dr/util/Citable.java
+++ b/src/dr/util/Citable.java
@@ -25,7 +25,9 @@
 
 package dr.util;
 
+import java.util.Collection;
 import java.util.List;
+import java.util.Map;
 
 /**
  * Interface for associating a list of citations with an object
@@ -35,20 +37,22 @@ import java.util.List;
 
 public interface Citable {
 
+    Citation.Category getCategory();
+
+    String getDescription();
+
     /**
      * @return a list of citations associated with this object
      */
     List<Citation> getCitations();
 
-    public class Utils {
-
+    class Utils {
         public static String getCitationString(Citable citable, String prepend, String postpend) {
-            List<Citation> citations = citable.getCitations();
-            if (citations == null || citations.size() == 0) {
+            if (citable.getCitations().size() == 0) {
                 return null;
             }
             StringBuilder builder = new StringBuilder();
-            for (Citation citation : citations) {
+            for (Citation citation : citable.getCitations()) {
                 builder.append(prepend);
                 builder.append(citation.toString());
                 builder.append(postpend);
diff --git a/src/dr/util/Citation.java b/src/dr/util/Citation.java
index f4b9e30..d134410 100644
--- a/src/dr/util/Citation.java
+++ b/src/dr/util/Citation.java
@@ -25,26 +25,28 @@
 
 package dr.util;
 
+import java.util.Arrays;
+
 /**
  * @author Alexei Drummond
  * @author Marc A. Suchard
+ * @author Andrew Rambaut
  */
 public class Citation {
 
-    Author[] authors;
-    String title;
-    int year;
-    String journal;
-    int volume;
-    int startpage;
-    int endpage;
-    Status status;
-
-    public Citation() {
-    }
+    private final Author[] authors;
+    private final String title;
+    private final int year;
+    private final String journal;
+    private final String location; // alternative for eJournal
+    private final int volume;
+    private final int startpage;
+    private final int endpage;
+    private final Status status;
+    private final String DOI;
 
     public Citation(Author[] authors, Status status) {
-        this(authors, null, -1, null, -1, -1, -1, status);
+        this(authors, null, null, status);
         if (status != Status.IN_PREPARATION) {
             throw new CitationException("Only citations in preparation may not contain titles or journals");
         }
@@ -52,14 +54,28 @@ public class Citation {
 
     public Citation(Author[] authors, String title, String journal,
                    Status status) {
-        this(authors, title, -1, journal, -1, -1, -1, status);
+        this(authors, title, -1, journal, -1, -1, -1, null, status);
         if (status == Status.PUBLISHED) {
             throw new CitationException("Published citations must have years, volumes and pages");
         }
     }
 
     public Citation(Author[] authors, String title, int year, String journal, int volume, int startpage, int endpage,
-                   Status status) {
+                    Status status) {
+        this(authors, title, year, journal, volume, startpage, endpage, null, status);
+    }
+
+    public Citation(Author[] authors, String title, int year, String journal, int volume, int startpage, int endpage) {
+        this(authors, title, year, journal, volume, startpage, endpage, null, Status.PUBLISHED);
+    }
+
+    public Citation(Author[] authors, String title, int year, String journal, int volume, int startpage, int endpage,
+                   String DOI) {
+        this(authors, title, year, journal, volume, startpage, endpage, DOI, Status.PUBLISHED);
+    }
+
+    public Citation(Author[] authors, String title, int year, String journal, int volume, int startpage, int endpage,
+                    String DOI, Status status) {
         this.authors = authors;
         this.title = title;
         this.year = year;
@@ -67,9 +83,38 @@ public class Citation {
         this.volume = volume;
         this.startpage = startpage;
         this.endpage = endpage;
+        this.location = null;
+        this.DOI = DOI;
         this.status = status;
     }
 
+    public Citation(Author[] authors, String title, int year, String journal, int volume, String location) {
+        this(authors, title, year, journal, volume, location, null);
+    }
+
+    public Citation(Author[] authors, String title, int year, String journal, String location) {
+        this(authors, title, year, journal, -1, location, null);
+    }
+
+    public Citation(Author[] authors, String title, int year, String journal, int volumn, String location,
+                    String DOI) {
+        this.authors = authors;
+        this.title = title;
+        this.year = year;
+        this.journal = journal;
+        this.location = location;
+        this.volume = volumn;
+        this.startpage = -1;
+        this.endpage = -1;
+        this.DOI = DOI;
+        this.status = Status.PUBLISHED;
+    }
+
+    public Citation(Author[] authors, String title, int year, String journal, String location,
+                    String DOI) {
+        this(authors, title, year, journal, -1, location, DOI);
+    }
+
     public String toString() {
         StringBuilder builder = new StringBuilder();
         builder.append(authors[0].toString());
@@ -92,16 +137,23 @@ public class Citation {
         }
         if (status == Status.PUBLISHED) {
              builder.append(". ");
-            builder.append(volume);
-            builder.append(", ");
-            builder.append(startpage);
-            if (endpage > 0) builder.append("-").append(endpage);
+            if (location != null) {
+                builder.append(location);
+            } else {
+                builder.append(volume);
+                builder.append(", ");
+                builder.append(startpage);
+                if (endpage > 0) builder.append("-").append(endpage);
+            }
+
+            if (DOI != null) {
+                builder.append(". DOI:" + DOI);
+            }
         }
         return builder.toString();
     }
 
     public String toHTML() {
-
         StringBuilder builder = new StringBuilder();
         builder.append("<html>");
         builder.append(authors[0].toString());
@@ -112,14 +164,65 @@ public class Citation {
         builder.append(" (").append(year).append(") ");
         builder.append(title).append(". ");
         builder.append("<i>").append(journal).append("</i>");
-        builder.append(" <b>").append(volume).append("</b>:");
-        builder.append(startpage);
-        if (endpage > 0) builder.append("-").append(endpage);
+        if (location != null) {
+            builder.append(" ").append(location);
+        } else {
+            builder.append(" <b>").append(volume).append("</b>:");
+            builder.append(startpage);
+            if (endpage > 0) builder.append("-").append(endpage);
+        }
+        if (DOI != null) {
+            builder.append(" <a href=\"http://doi.org/").append(DOI).append("\">DOI:").append(DOI).append("</a>");
+        }
         builder.append("</html>");
 
         return builder.toString();
     }
 
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        Citation citation = (Citation) o;
+
+        if (year != citation.year) return false;
+        if (volume != citation.volume) return false;
+        if (startpage != citation.startpage) return false;
+        if (endpage != citation.endpage) return false;
+        // Probably incorrect - comparing Object[] arrays with Arrays.equals
+        if (!Arrays.equals(authors, citation.authors)) return false;
+        if (!title.equals(citation.title)) return false;
+        if (journal != null ? !journal.equals(citation.journal) : citation.journal != null) return false;
+        if (location != null ? !location.equals(citation.location) : citation.location != null) return false;
+        if (status != citation.status) return false;
+        return DOI != null ? DOI.equals(citation.DOI) : citation.DOI == null;
+
+    }
+
+    @Override
+    public int hashCode() {
+        int result = Arrays.hashCode(authors);
+        result = 31 * result + title.hashCode();
+        result = 31 * result + year;
+        result = 31 * result + (journal != null ? journal.hashCode() : 0);
+        result = 31 * result + (location != null ? location.hashCode() : 0);
+        result = 31 * result + volume;
+        result = 31 * result + startpage;
+        result = 31 * result + endpage;
+        result = 31 * result + status.hashCode();
+        result = 31 * result + (DOI != null ? DOI.hashCode() : 0);
+        return result;
+    }
+
+    public Status getStatus() {
+        return status;
+    }
+
+    public String getDOI() {
+        return DOI;
+    }
+
     public enum Status {
         IN_PREPARATION("in preparation"),
         IN_SUBMISSION("in submission"),
@@ -138,8 +241,27 @@ public class Citation {
         private final String text;
     }
 
-    class CitationException extends RuntimeException {
+    public enum Category {
+        FRAMEWORK("Framework"),
+        SUBSTITUTION_MODELS("Substitution Models"),
+        PRIOR_MODELS("Prior Models"),
+        TRAIT_MODELS("Trait Models"),
+        DATA_MODELS("Data Models"),
+        SPECIES_MODELS("Species Models"),
+        COUNTING_PROCESSES("Counting Processes"), // TODO Decide where MarkovJumpsBTL goes (multiple categories?)
+        TREE_PRIORS("Tree Density Models"),
+        MOLECULAR_CLOCK("Molecular Clock Models"),
+        MISC("Misc"); // Try to avoid this category
+
 
+        Category(String text) { this.text = text; }
+
+        public String toString() { return text; }
+
+        private final String text;
+    }
+
+    class CitationException extends RuntimeException {
         CitationException(String message) {
             super(message);
         }
diff --git a/src/dr/util/CommonCitations.java b/src/dr/util/CommonCitations.java
index 67b66c0..1635e6d 100644
--- a/src/dr/util/CommonCitations.java
+++ b/src/dr/util/CommonCitations.java
@@ -33,7 +33,56 @@ package dr.util;
  */
 public class CommonCitations {
 
-    public static Citation LEMEY_2010 = new Citation(
+    public static Citation LEMEY_2012_RENAISSANCE = new Citation(
+            new Author[] {
+                    new Author("P", "Lemey"),
+                    new Author("VN", "Minin"),
+                    new Author("F", "Bielejec"),
+                    new Author("SL", "Kosakovsky-Pond"),
+                    new Author("MA", "Suchard"),
+            },
+            "A counting renaissance: combining stochastic mapping and empirical Bayes to quickly detect amino acid sites under positive selection",
+            2012,
+            "Bioinformatics",
+            28,
+            3248, 3256,
+            Citation.Status.PUBLISHED
+    );
+
+    public static Citation LEMEY_2009_BAYESIAN = new Citation(
+            new Author[] {
+                    new Author("P", "Lemey"),
+                    new Author("A", "Rambaut"),
+                    new Author("AJ", "Drummond"),
+            },
+            "Bayesian phylogeography finds its roots",
+            2009,
+            "PLoS Computational Biology",
+            5,
+            "e1000520"
+    );
+
+    public static Citation BEDFORD_2015_INTEGRATING = new Citation(
+            new Author[]{
+                    new Author("T", "Bedford"),
+                    new Author("MA", "Suchard"),
+                    new Author("P", "Lemey"),
+                    new Author("G", "Dudas"),
+                    new Author("V", "Gregory"),
+                    new Author("AJ", "Hay"),
+                    new Author("JW", "McCauley"),
+                    new Author("CA", "Russell"),
+                    new Author("DJ", "Smith"),
+                    new Author("A", "Rambaut")
+            },
+            "Integrating influenza antigenic dynamics with molecular evolution",
+            2015,
+            "eLife",
+            "e01914",
+            "10.7554/eLife.01914"
+    );
+
+    public static Citation LEMEY_2010_PHYLOGEOGRAPHY = new Citation(
             new Author[]{
                     new Author("P", "Lemey"),
                     new Author("A", "Rambaut"),
@@ -48,7 +97,7 @@ public class CommonCitations {
             Citation.Status.PUBLISHED
     );
 
-    public static Citation OBRIEN_2009 = new Citation(
+    public static Citation OBRIEN_2009_LEARNING = new Citation(
             new Author[]{
                     new Author("JB", "O'Brien"),
                     new Author("VN", "Minin"),
@@ -94,16 +143,82 @@ public class CommonCitations {
             Citation.Status.IN_PREPARATION
     );
 
-    public static Citation LEMEY_2012 = new Citation(
+    public static Citation CYBIS_2015_ASSESSING = new Citation(
             new Author[]{
-                    new Author("P", "Lemey"),
+                    new Author("GB", "Cybis"),
+                    new Author("JS", "Sinsheimer"),
                     new Author("T", "Bedford"),
+                    new Author("AE", "Mather"),
+                    new Author("P", "Lemey"),
+                    new Author("MA", "Suchard"),
+            },
+            "Assessing phenotypic correlation through the multivariate phylogenetic latent liability model",
+            2015,
+            "Annals of Applied Statistics",
+            9,
+            969, 991,
+            Citation.Status.PUBLISHED
+    );
+
+    public static Citation LEMEY_2014_UNIFYING = new Citation(
+            new Author[] {
+                    new Author("P", "Lemey"),
                     new Author("A", "Rambaut"),
+                    new Author("T", "Bedford"),
+                    new Author("C", "Thiemann"),
+                    new Author("D", "Grady"),
+                    new Author("F", "Bielejec"),
+                    new Author("G", "Baele"),
+                    new Author("C", "Russell"),
+                    new Author("D", "Smith"),
+                    new Author("D", "Brockman"),
                     new Author("MA", "Suchard"),
             },
-            Citation.Status.IN_PREPARATION
+            "Unifying viral genetics and human transportation data to predict the global transmission dynamics of human influenza H3N2",
+            2014,
+            "PLoS Pathogens",
+            10,
+            "e100392"
+    );
+
+    public static Citation MININ_2008_COUNTING = new Citation(
+            new Author[] {
+                    new Author("VN", "Minin"),
+                    new Author("MA", "Suchard"),
+            },
+            "Counting labeled transitions in continuous-time Markov models of evolution",
+            2008,
+            "Journal of Mathematical Biology",
+            56,
+            391, 412,
+            Citation.Status.PUBLISHED
+    );
+
+    public static Citation MININ_2008_FAST = new Citation(
+            new Author[]{
+                    new Author("VN", "Minin"),
+                    new Author("MA", "Suchard"),
+            },
+            "Fast, accurate and simulation-free stochastic mapping",
+            2008,
+            "Philos Trans R Soc Lond B Biol Sci",
+            363,
+            3985, 3995,
+            Citation.Status.PUBLISHED
     );
 
+//    Minin VN, Suchard MA (2008) . Philos Trans R Soc Lond B Biol Sci 363(1512):3985–3995.
+
+//    public static Citation LEMEY_2012 = new Citation(
+//            new Author[]{
+//                    new Author("P", "Lemey"),
+//                    new Author("T", "Bedford"),
+//                    new Author("A", "Rambaut"),
+//                    new Author("MA", "Suchard"),
+//            },
+//            Citation.Status.IN_PREPARATION
+//    );
+
     public static Citation LEMEY_MIXTURE_2012 = new Citation(
             new Author[]{
                     new Author("P", "Lemey"),
@@ -112,14 +227,22 @@ public class CommonCitations {
             Citation.Status.IN_PREPARATION
     );
 
-    public static Citation BLOOM_2012 = new Citation(
+    public static Citation BLOOM_2013_STABILITY = new Citation(
             new Author[]{
                     new Author("J", "Bloom"),
+                    new Author("LI", "Gong"),
                     new Author("MA", "Suchard"),
             },
-            Citation.Status.IN_PREPARATION
+            "Stability-mediated epistasis constrains the evolution of an influenza protein",
+            2013,
+            "eLife",
+            2,
+            "e00631"
     );
 
+
+//    Gong LI, Suchard MA, Bloom JD. Stability-mediated epistasis constrains the evolution of an influenza protein. eLife, 2, e00631, 2013.
+
     public static Citation SUCHARD_2012_LATENT = new Citation(
             new Author[]{
                     new Author("MA", "Suchard"),
@@ -127,4 +250,68 @@ public class CommonCitations {
             },
             Citation.Status.IN_PREPARATION
     );
+
+    public static Citation SUCHARD_GENERIC = new Citation(
+            new Author[]{
+                    new Author("MA", "Suchard"),
+            },
+            Citation.Status.IN_PREPARATION
+    );
+
+    public static Citation EDWARDS_2011_ANCIENT = new Citation(
+            new Author[] {
+                    new Author("CJ", "Edwards"),
+                    new Author("MA", "Suchard"),
+                    new Author("P", "Lemey"),
+                    new Author("JJ", "Welch"),
+                    new Author("I", "Barnes"),
+                    new Author("TL", "Fulton"),
+                    new Author("R", "Barnett"),
+                    new Author("TC", "O'Connell"),
+                    new Author("P", "Coxon"),
+                    new Author("N", "Monaghan"),
+                    new Author("CE", "Valdiosera"),
+                    new Author("ED", "Lorenzen"),
+                    new Author("E", "Willerslev"),
+                    new Author("GF", "Baryshnikov"),
+                    new Author("A", "Rambaut"),
+                    new Author("MG", "Thomas"),
+                    new Author("DG", "Bradley"),
+                    new Author("B", "Shapiro"),
+            },
+            "Ancient hybridization and an Irish origin for the modern polar bear matriline",
+            2011,
+            "Current Biology",
+            21,
+            1251, 1258,
+            Citation.Status.PUBLISHED
+    );
+
+    public static Citation AYRES_2012_BEAGLE = new Citation(
+            new Author[]{
+                    new Author("", "Ayres et al"),
+            },
+            "BEAGLE: a common application programming inferface and high-performance computing library for statistical phylogenetics",
+            2012,
+            "Syst Biol",
+            61, 170, 173,
+            "10.1093/sysbio/syr100");
+
+    public static Citation VRANCKEN_2015_SIMULTANEOUSLY = new Citation(
+            new Author[] {
+                    new Author("B", "Vrancken"),
+                    new Author("P", "Lemey"),
+                    new Author("B", "Longdon"),
+                    new Author("A", "Rambaut"),
+                    new Author("T", "Bedford"),
+                    new Author("H", "Gunthard"),
+                    new Author("MA", "Suchard"),
+            },
+            "Simultaneously estimating evolutionary history and repeated traits phylogenetic signal: applications to viral phenotypic evolution",
+            2015,
+            "Methods in Ecology and Evolution",
+            6,
+            67, 82,
+            Citation.Status.PUBLISHED
+    );
 }
diff --git a/src/dr/util/MessageLogHandler.java b/src/dr/util/MessageLogHandler.java
index 8be12c0..49024a4 100644
--- a/src/dr/util/MessageLogHandler.java
+++ b/src/dr/util/MessageLogHandler.java
@@ -25,12 +25,18 @@
 
 package dr.util;
 
+import java.io.OutputStream;
 import java.util.logging.*;
 
 public class MessageLogHandler extends StreamHandler {
 
 	public MessageLogHandler() {
-		setOutputStream(System.out);
+		this(System.out);
+		setFormatter(new MessageLogFormatter());
+	}
+
+	public MessageLogHandler(OutputStream stream) {
+		setOutputStream(stream);
 		setFormatter(new MessageLogFormatter());
 	}
 
diff --git a/src/dr/app/beauti/types/RelativeRatesType.java b/src/dr/util/Pair.java
similarity index 52%
rename from src/dr/app/beauti/types/RelativeRatesType.java
rename to src/dr/util/Pair.java
index 88a26ce..5ff78ef 100644
--- a/src/dr/app/beauti/types/RelativeRatesType.java
+++ b/src/dr/util/Pair.java
@@ -1,7 +1,7 @@
 /*
- * RelativeRatesType.java
+ * Pair.java
  *
- * Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
+ * Copyright (c) 2002-2016 Alexei Drummond, Andrew Rambaut and Marc Suchard
  *
  * This file is part of BEAST.
  * See the NOTICE file distributed with this work for additional
@@ -23,23 +23,36 @@
  * Boston, MA  02110-1301  USA
  */
 
-package dr.app.beauti.types;
+package dr.util;
 
 /**
- * @author Alexei Drummond
- * @author Walter Xie
+ * Date: 15/06/2016
+ * Time: 17:57
+ *
+ * @author rambaut
  */
-public enum RelativeRatesType {
-	MU_RELATIVE_RATES("Codon relative rates"),
-    CLOCK_RELATIVE_RATES("Clock relative rates");
+public class Pair<A, B> {
+    public final A fst;
+    public final B snd;
 
-	RelativeRatesType(String name) {
-        this.name = name;
+    public Pair(A var1, B var2) {
+        this.fst = var1;
+        this.snd = var2;
     }
 
     public String toString() {
-        return name;
+        return "Pair[" + this.fst + "," + this.snd + "]";
+    }
+
+    private boolean equals(Object var0, Object var1) {
+        return var0 == null && var1 == null || var0 != null && var0.equals(var1);
     }
 
-    private final String name;
+    public boolean equals(Object var1) {
+        return var1 instanceof Pair && equals(this.fst, ((Pair) var1).fst) && equals(this.snd, ((Pair) var1).snd);
+    }
+
+    public int hashCode() {
+        return this.fst == null ? (this.snd == null ? 0 : this.snd.hashCode() + 1) : (this.snd == null ? this.fst.hashCode() + 2 : this.fst.hashCode() * 17 + this.snd.hashCode());
+    }
 }
diff --git a/src/dr/util/Transform.java b/src/dr/util/Transform.java
index e9cecd1..8140383 100644
--- a/src/dr/util/Transform.java
+++ b/src/dr/util/Transform.java
@@ -90,7 +90,7 @@ public interface Transform {
     public double getLogJacobian(double[] values, int from, int to);
 
 
-    public static class LogTransform implements Transform, Citable {
+    public static class LogTransform implements Transform {
 
         public LogTransform() {
         }
@@ -122,22 +122,9 @@ public interface Transform {
         public double getLogJacobian(double[] values, int from, int to) {
             throw new RuntimeException("Transformation not permitted for this type of parameter, exiting ...");
         }
-
-        public List<Citation> getCitations() {
-            List<Citation> citations = new ArrayList<Citation>();
-            citations.add(new Citation(
-                    new Author[]{
-                            new Author("MA", "Suchard"),
-                            new Author("G", "Baele"),
-                            new Author("P", "Lemey"),
-                    },
-                    Citation.Status.IN_PREPARATION
-                    ));
-            return citations;
-        }
     }
 
-    public static class LogConstrainedSumTransform implements Transform, Citable {
+    public static class LogConstrainedSumTransform implements Transform {
 
         public LogConstrainedSumTransform() {
         }
@@ -193,19 +180,6 @@ public interface Transform {
             return sum;
         }
 
-        public List<Citation> getCitations() {
-            List<Citation> citations = new ArrayList<Citation>();
-            citations.add(new Citation(
-                    new Author[]{
-                            new Author("MA", "Suchard"),
-                            new Author("G", "Baele"),
-                            new Author("P", "Lemey"),
-                    },
-                    Citation.Status.IN_PREPARATION
-            ));
-            return citations;
-        }
-
         public static void main(String[] args) {
 
             //specify starting values
diff --git a/src/dr/xml/AbstractXMLObjectParser.java b/src/dr/xml/AbstractXMLObjectParser.java
index 672750e..fdc7978 100644
--- a/src/dr/xml/AbstractXMLObjectParser.java
+++ b/src/dr/xml/AbstractXMLObjectParser.java
@@ -32,10 +32,11 @@ import java.io.PrintWriter;
 import java.io.StringWriter;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 public abstract class AbstractXMLObjectParser implements XMLObjectParser {
 
-    public final Object parseXMLObject(XMLObject xo, String id, ObjectStore store, boolean strictXML)
+    public final Object parseXMLObject(XMLObject xo, String id, Map<String, XMLObject> store, boolean strictXML)
             throws XMLParseException {
 
         this.store = store;
@@ -197,7 +198,7 @@ public abstract class AbstractXMLObjectParser implements XMLObjectParser {
         return null;
     }
 
-    public final ObjectStore getStore() {
+    public final Map<String, XMLObject> getStore() {
         return store;
     }
 
@@ -309,5 +310,5 @@ public abstract class AbstractXMLObjectParser implements XMLObjectParser {
         return (rules != null && rules.length > 0);
     }
 
-    private ObjectStore store = null;
+    private Map<String, XMLObject>  store = null;
 }
diff --git a/src/dr/xml/XMLObjectParser.java b/src/dr/xml/XMLObjectParser.java
index cb3e2c3..17828e0 100644
--- a/src/dr/xml/XMLObjectParser.java
+++ b/src/dr/xml/XMLObjectParser.java
@@ -25,6 +25,8 @@
 
 package dr.xml;
 
+import java.util.Map;
+
 public interface XMLObjectParser {
 
     /**
@@ -36,7 +38,7 @@ public interface XMLObjectParser {
     /**
      * @param store contains all named objects that have already been parsed.
      */
-    Object parseXMLObject(XMLObject xo, String id, ObjectStore store, boolean strictXML) throws XMLParseException;
+    Object parseXMLObject(XMLObject xo, String id, Map<String, XMLObject> store, boolean strictXML) throws XMLParseException;
 
     /**
      *
diff --git a/src/dr/xml/XMLParser.java b/src/dr/xml/XMLParser.java
index ec4bc83..b870658 100644
--- a/src/dr/xml/XMLParser.java
+++ b/src/dr/xml/XMLParser.java
@@ -29,8 +29,7 @@ import dr.inference.model.Likelihood;
 import dr.inference.model.Model;
 import dr.inference.model.Parameter;
 import dr.inferencexml.loggers.LoggerParser;
-import dr.util.FileHelpers;
-import dr.util.Identifiable;
+import dr.util.*;
 import org.w3c.dom.*;
 import org.xml.sax.InputSource;
 import org.xml.sax.SAXException;
@@ -108,7 +107,7 @@ public class XMLParser {
 
         XMLObject xo = new XMLObject(null /*, objectStore*/);
         xo.setNativeObject(object);
-        store.put(name, xo);
+        objectStore.put(name, xo);
     }
 
     /**
@@ -147,7 +146,7 @@ public class XMLParser {
         }
     }
 
-    public ObjectStore parse(Reader reader, boolean run)
+    public Map<String, XMLObject> parse(Reader reader, boolean run)
             throws java.io.IOException,
             org.xml.sax.SAXException,
             dr.xml.XMLParseException,
@@ -218,7 +217,7 @@ public class XMLParser {
             }
 
 
-            XMLObject restoredXMLObject = (XMLObject) store.get(idref);
+            XMLObject restoredXMLObject = (XMLObject) objectStore.get(idref);
             if (index != -1) {
 
                 if (restoredXMLObject.getNativeObject() instanceof List) {
@@ -267,7 +266,7 @@ public class XMLParser {
                 repeats = Integer.parseInt(e.getAttribute("count"));
             }
 
-            XMLObject xo = new XMLObject(e /*, objectStore*/);
+            XMLObject xo = new XMLObject(e);
 
             final XMLObjectParser parser = doParse ? parserStore.get(xo.getName()) : null;
 
@@ -308,7 +307,7 @@ public class XMLParser {
                 id = e.getAttribute(ID);
             }
 
-            if ((id != null) && store.get(id) != null) {
+            if ((id != null) && objectStore.get(id) != null) {
                 throw new XMLParseException("Object with Id=" + id + " already exists");
             }
 
@@ -316,10 +315,14 @@ public class XMLParser {
             if (parser != null) {
                 obj = parser.parseXMLObject(xo, id, objectStore, strictXML);
 
-                if (id != null && obj instanceof Identifiable) {
+                if (obj instanceof Identifiable) {
                     ((Identifiable) obj).setId(id);
                 }
 
+                if (obj instanceof Citable) {
+                    addCitable((Citable)obj);
+                }
+
                 if (obj instanceof Likelihood) {
                     Likelihood.FULL_LIKELIHOOD_SET.add((Likelihood) obj);
                 } else if (obj instanceof Model) {
@@ -334,7 +337,7 @@ public class XMLParser {
             if (id != null) {
                 if (verbose) System.out.println("  Storing " + xo.getName() + " with id=" + id);
 
-                store.put(id, xo);
+                objectStore.put(id, xo);
             }
 
             if (run) {
@@ -353,6 +356,9 @@ public class XMLParser {
                         waitForThread((Thread) thread1);
                     }
                 } else if (obj instanceof Runnable && !concurrent) {
+
+                    executingRunnable();
+
                     if (obj instanceof Spawnable && !((Spawnable) obj).getSpawnable()) {
                         ((Spawnable) obj).run();
                     } else {
@@ -369,6 +375,14 @@ public class XMLParser {
         }
     }
 
+    protected void executingRunnable() {
+        // do nothing - for overriding by subclasses
+    }
+
+    public Map<Pair<String, String>, List<Citation>> getCitationStore() {
+        return citationStore;
+    }
+
     public static FileReader getFileReader(XMLObject xo, String attributeName) throws XMLParseException {
         if (xo.hasAttribute(attributeName)) {
             final File inFile = getFileHandle(xo, attributeName);
@@ -467,6 +481,9 @@ public class XMLParser {
         return logFile;
     }
 
+    public Map<String, XMLObject> getObjectStore() {
+        return objectStore;
+    }
 
     public class ArrayParser extends AbstractXMLObjectParser {
 
@@ -514,46 +531,62 @@ public class XMLParser {
         }
     }
 
-    //anonymous object store class
-    private final ObjectStore objectStore = new ObjectStore() {
-        public Object getObjectById(Object uid) throws ObjectNotFoundException {
-            XMLObject obj = (XMLObject) store.get(uid);
-            if (obj == null) throw new ObjectNotFoundException("Object with uid=" + uid + " not found in ObjectStore");
-            if (obj.hasNativeObject()) return obj.getNativeObject();
-            return obj;
-        }
-
-        public boolean hasObjectId(Object uid) {
-            Object obj = store.get(uid);
-            return (obj != null);
-        }
-
-        public Set getIdSet() {
-            return store.keySet();
-        }
-
-        public Collection getObjects() {
-            return store.values();
-        }
-
-        public void addIdentifiableObject(Identifiable obj, boolean force) {
-
-            String id = obj.getId();
-            if (id == null) throw new IllegalArgumentException("Object must have a non-null identifier.");
-
-            if (force) {
-                store.put(id, obj);
-            } else {
-                if (store.get(id) == null) {
-                    store.put(id, obj);
-                }
+//    //anonymous object store class
+//    private final ObjectStore objectStore = new ObjectStore() {
+//        public Object getObjectById(Object uid) throws ObjectNotFoundException {
+//            XMLObject obj = (XMLObject) store.get(uid);
+//            if (obj == null) throw new ObjectNotFoundException("Object with uid=" + uid + " not found in ObjectStore");
+//            if (obj.hasNativeObject()) return obj.getNativeObject();
+//            return obj;
+//        }
+//
+//        public boolean hasObjectId(Object uid) {
+//            Object obj = store.get(uid);
+//            return (obj != null);
+//        }
+//
+//        public Set getIdSet() {
+//            return store.keySet();
+//        }
+//
+//        public Collection getObjects() {
+//            return store.values();
+//        }
+//
+////        public void addIdentifiableObject(Identifiable obj, boolean force) {
+////
+////            String id = obj.getId();
+////            if (id == null) throw new IllegalArgumentException("Object must have a non-null identifier.");
+////
+////            if (force) {
+////                store.put(id, obj);
+////            } else {
+////                if (store.get(id) == null) {
+////                    store.put(id, obj);
+////                }
+////            }
+////        }
+//    };
+
+    public void addCitable(Citable citable) {
+        // remove 'In prep' citations
+        List<Citation> citationList = new LinkedList<Citation>();
+        for (Citation citation : citable.getCitations()) {
+            if (citation.getStatus() != Citation.Status.IN_PREPARATION) {
+                citationList.add(citation);
             }
         }
-    };
-
+        if (citationList.size() > 0) {
+            Pair<String, String> pair = new Pair<String, String>(citable.getCategory().toString(),
+                    citable.getDescription());
+            citationStore.put(pair, citationList);
+        }
+    }
 
-    private final Hashtable<String, Object> store = new Hashtable<String, Object>();
-    private final TreeMap<String, XMLObjectParser> parserStore = new TreeMap<String, XMLObjectParser>(new ParserComparator());
+    //    private final Hashtable<String, XMLObject> store = new Hashtable<String, XMLObject>();
+    private final Map<String, XMLObjectParser> parserStore = new TreeMap<String, XMLObjectParser>(new ParserComparator());
+    private final Map<String, XMLObject> objectStore = new LinkedHashMap<String, XMLObject>();
+    private final Map<Pair<String, String>, List<Citation>> citationStore = new LinkedHashMap<Pair<String, String>, List<Citation>>();
     private boolean concurrent = false;
     private XMLObject root = null;
 
@@ -596,6 +629,7 @@ public class XMLParser {
             return name1.compareTo(name2);
         }
     }
+
 }
 
 
diff --git a/src/test/dr/app/beagle/AncestralStateBeagleTreeLikelihoodTest.java b/src/test/dr/app/beagle/AncestralStateBeagleTreeLikelihoodTest.java
index 0912c23..8613c59 100644
--- a/src/test/dr/app/beagle/AncestralStateBeagleTreeLikelihoodTest.java
+++ b/src/test/dr/app/beagle/AncestralStateBeagleTreeLikelihoodTest.java
@@ -102,6 +102,7 @@ public class AncestralStateBeagleTreeLikelihoodTest extends TraceCorrelationAsse
                 null,
                 false,
                 PartialsRescalingScheme.DEFAULT,
+                true,
                 null,
                 hky.getDataType(),
                 "stateTag",
diff --git a/src/test/dr/app/beagle/MarkovJumpsTest.java b/src/test/dr/app/beagle/MarkovJumpsTest.java
index 1908c98..1c6af9f 100644
--- a/src/test/dr/app/beagle/MarkovJumpsTest.java
+++ b/src/test/dr/app/beagle/MarkovJumpsTest.java
@@ -73,6 +73,7 @@ public class MarkovJumpsTest extends TraceCorrelationAssert {
                 null,
                 false,
                 PartialsRescalingScheme.AUTO,
+                true,
                 null,
                 hky.getDataType(),
                 "stateTag",

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/beast-mcmc.git



More information about the debian-med-commit mailing list