[med-svn] [htsjdk] 01/10: Imported Upstream version 2.5.0

Andreas Tille tille at debian.org
Wed Jun 29 12:18:42 UTC 2016


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository htsjdk.

commit c9883b4e9700300c6702d78b53c141d6af0280a4
Author: Andreas Tille <tille at debian.org>
Date:   Wed Jun 29 09:25:26 2016 +0200

    Imported Upstream version 2.5.0
---
 .classpath                                         |   10 -
 .github/PULL_REQUEST_TEMPLATE.md                   |    2 +
 .gitignore                                         |   25 +-
 .idea/.name                                        |    1 -
 .idea/compiler.xml                                 |   23 -
 .idea/copyright/profiles_settings.xml              |    3 -
 .idea/encodings.xml                                |    4 -
 .idea/modules.xml                                  |    9 -
 .idea/modules/htsjdk-build.iml                     |   81 -
 .idea/modules/htsjdk.iml                           |   44 -
 .idea/sbt.xml                                      |   22 -
 .idea/scala_compiler.xml                           |    6 -
 .idea/scopes/scope_settings.xml                    |    5 -
 .idea/vcs.xml                                      |    7 -
 .project                                           |   17 -
 .travis.yml                                        |   23 +-
 README.md                                          |   67 +-
 build.gradle                                       |  226 +
 build.sbt                                          |  154 -
 build.xml                                          |  281 +-
 etc/test/junit-noframes.xsl                        |  581 ---
 etc/test/testng.css                                |   26 -
 gradle/wrapper/gradle-wrapper.jar                  |  Bin 0 -> 53556 bytes
 gradle/wrapper/gradle-wrapper.properties           |    6 +
 gradlew                                            |  164 +
 htsjdk.iml                                         |   66 -
 htsjdk.ipr                                         |  352 --
 project/plugins.sbt                                |    5 -
 {src/scripts => scripts}/explain_sam_flags.py      |    0
 {src/scripts => scripts}/release_picard.sh         |    0
 src/c/inteldeflater/IntelDeflater.c                |  392 --
 src/c/inteldeflater/README                         |    7 -
 src/c/inteldeflater/igzip_lib.h                    |   50 -
 src/c/inteldeflater/internal_state_size.h          |   32 -
 src/c/inteldeflater/types.h                        |   46 -
 src/java/htsjdk/samtools/BAMFileReader.java        |  989 ----
 src/java/htsjdk/samtools/BAMFileWriter.java        |  193 -
 src/java/htsjdk/samtools/CRAMFileReader.java       |  625 ---
 src/java/htsjdk/samtools/CRAMIterator.java         |  306 --
 src/java/htsjdk/samtools/Defaults.java             |  186 -
 src/java/htsjdk/samtools/SAMFileReader.java        |  751 ---
 src/java/htsjdk/samtools/SAMFileWriterFactory.java |  465 --
 src/java/htsjdk/samtools/SAMFlag.java              |  110 -
 src/java/htsjdk/samtools/SAMLineParser.java        |  469 --
 src/java/htsjdk/samtools/SAMRecordSetBuilder.java  |  577 ---
 src/java/htsjdk/samtools/SAMTextWriter.java        |  188 -
 src/java/htsjdk/samtools/SRAFileReader.java        |  306 --
 src/java/htsjdk/samtools/SRAIterator.java          |  248 -
 src/java/htsjdk/samtools/SamFileValidator.java     |  856 ----
 src/java/htsjdk/samtools/SamFiles.java             |   81 -
 src/java/htsjdk/samtools/SamInputResource.java     |  360 --
 src/java/htsjdk/samtools/SamReaderFactory.java     |  507 --
 src/java/htsjdk/samtools/SamStreams.java           |   99 -
 src/java/htsjdk/samtools/cram/CRAIEntry.java       |  177 -
 src/java/htsjdk/samtools/cram/CRAIIndex.java       |  161 -
 .../htsjdk/samtools/cram/common/CramVersions.java  |   11 -
 .../samtools/cram/io/ExternalCompression.java      |  151 -
 .../htsjdk/samtools/cram/ref/ReferenceSource.java  |  274 --
 .../samtools/cram/structure/ContainerHeaderIO.java |   88 -
 .../htsjdk/samtools/example/PrintReadsExample.java |  104 -
 .../htsjdk/samtools/fastq/FastqWriterFactory.java  |   31 -
 src/java/htsjdk/samtools/liftover/LiftOver.java    |  292 --
 src/java/htsjdk/samtools/metrics/MetricsFile.java  |  592 ---
 .../seekablestream/SeekableMemoryStream.java       |   64 -
 .../seekablestream/SeekableStreamFactory.java      |   95 -
 src/java/htsjdk/samtools/sra/ReferenceCache.java   |   79 -
 src/java/htsjdk/samtools/sra/SRAAccession.java     |  136 -
 .../htsjdk/samtools/sra/SRAAlignmentIterator.java  |  194 -
 .../samtools/sra/SRAIndexedSequenceFile.java       |  121 -
 .../samtools/util/BlockCompressedOutputStream.java |  336 --
 src/java/htsjdk/samtools/util/Histogram.java       |  480 --
 src/java/htsjdk/samtools/util/Objects.java         |   12 -
 src/java/htsjdk/samtools/util/OverlapDetector.java |  126 -
 .../htsjdk/samtools/util/SamLocusIterator.java     |  552 ---
 .../htsjdk/samtools/util/SortingCollection.java    |  532 ---
 .../htsjdk/samtools/util/zip/DeflaterFactory.java  |   78 -
 .../htsjdk/samtools/util/zip/IntelDeflater.java    |  567 ---
 src/java/htsjdk/samtools/util/zip/ZStreamRef.java  |   48 -
 src/java/htsjdk/tribble/AsciiFeatureCodec.java     |   97 -
 src/java/htsjdk/tribble/TabixFeatureReader.java    |  221 -
 .../tribble/readers/AsynchronousLineReader.java    |   98 -
 .../htsjdk/tribble/readers/LineReaderUtil.java     |   94 -
 src/java/htsjdk/variant/bcf2/BCF2Codec.java        |  494 --
 .../variant/example/PrintVariantsExample.java      |  105 -
 .../variantcontext/VariantContextBuilder.java      |  500 --
 .../writer/VariantContextWriterBuilder.java        |  525 --
 .../writer/VariantContextWriterFactory.java        |  282 --
 .../variant/vcf/VCFHeaderLineTranslator.java       |  157 -
 .../java/htsjdk/samtools/AbstractBAMFileIndex.java |    0
 .../htsjdk/samtools/AbstractSAMHeaderRecord.java   |    0
 .../java/htsjdk/samtools/AlignmentBlock.java       |    0
 .../java/htsjdk/samtools/AsyncSAMFileWriter.java   |    0
 .../java/htsjdk/samtools/BAMFileConstants.java     |    0
 src/main/java/htsjdk/samtools/BAMFileReader.java   |  913 ++++
 .../java/htsjdk/samtools/BAMFileSpan.java          |    0
 src/main/java/htsjdk/samtools/BAMFileWriter.java   |  200 +
 src/{ => main}/java/htsjdk/samtools/BAMIndex.java  |    0
 .../java/htsjdk/samtools/BAMIndexContent.java      |    0
 .../java/htsjdk/samtools/BAMIndexMetaData.java     |    0
 .../java/htsjdk/samtools/BAMIndexWriter.java       |    0
 .../java/htsjdk/samtools/BAMIndexer.java           |    0
 .../java/htsjdk/samtools/BAMIteratorFilter.java    |   26 +
 .../BAMQueryMultipleIntervalsIteratorFilter.java   |   59 +
 src/{ => main}/java/htsjdk/samtools/BAMRecord.java |    0
 .../java/htsjdk/samtools/BAMRecordCodec.java       |    0
 .../java/htsjdk/samtools/BamFileIoUtils.java       |    0
 .../java/htsjdk/samtools/BamIndexValidator.java    |    0
 src/{ => main}/java/htsjdk/samtools/Bin.java       |    0
 src/{ => main}/java/htsjdk/samtools/BinList.java   |    0
 .../java/htsjdk/samtools/BinaryBAMIndexWriter.java |    0
 .../java/htsjdk/samtools/BinaryCigarCodec.java     |    0
 .../java/htsjdk/samtools/BinaryTagCodec.java       |    0
 .../java/htsjdk/samtools/BinningIndexBuilder.java  |    0
 .../java/htsjdk/samtools/BinningIndexContent.java  |    0
 .../java/htsjdk/samtools/BrowseableBAMIndex.java   |    0
 .../java/htsjdk/samtools/CRAMBAIIndexer.java       |    0
 src/main/java/htsjdk/samtools/CRAMCRAIIndexer.java |  135 +
 .../htsjdk/samtools/CRAMContainerStreamWriter.java |    0
 src/main/java/htsjdk/samtools/CRAMFileReader.java  |  533 +++
 .../java/htsjdk/samtools/CRAMFileWriter.java       |    0
 src/main/java/htsjdk/samtools/CRAMIterator.java    |  306 ++
 .../java/htsjdk/samtools/CachingBAMFileIndex.java  |    0
 .../samtools/ChainedDownsamplingIterator.java      |    0
 src/{ => main}/java/htsjdk/samtools/Chunk.java     |    0
 src/{ => main}/java/htsjdk/samtools/Cigar.java     |    0
 .../java/htsjdk/samtools/CigarElement.java         |    0
 .../java/htsjdk/samtools/CigarOperator.java        |    0
 .../samtools/ComparableSamRecordIterator.java      |    0
 .../ConstantMemoryDownsamplingIterator.java        |    0
 .../samtools/CoordinateSortedPairInfoMap.java      |    0
 .../java/htsjdk/samtools/CustomReaderFactory.java  |    0
 .../htsjdk/samtools/DefaultSAMRecordFactory.java   |    0
 src/main/java/htsjdk/samtools/Defaults.java        |  174 +
 .../htsjdk/samtools/DiskBasedBAMFileIndex.java     |    0
 .../java/htsjdk/samtools/DownsamplingIterator.java |    0
 .../samtools/DownsamplingIteratorFactory.java      |    0
 .../htsjdk/samtools/DuplicateScoringStrategy.java  |    0
 .../java/htsjdk/samtools/DuplicateSet.java         |    0
 .../java/htsjdk/samtools/DuplicateSetIterator.java |    0
 .../htsjdk/samtools/FileTruncatedException.java    |    0
 .../java/htsjdk/samtools/FixBAMFile.java           |    0
 .../java/htsjdk/samtools/GenomicIndexUtil.java     |    0
 .../samtools/HighAccuracyDownsamplingIterator.java |    0
 .../java/htsjdk/samtools/LinearIndex.java          |    0
 .../htsjdk/samtools/MergingSamRecordIterator.java  |    0
 .../samtools/NotPrimarySkippingIterator.java       |    0
 .../java/htsjdk/samtools/QueryInterval.java        |    0
 .../java/htsjdk/samtools/ReservedTagConstants.java |    0
 .../SAMBinaryTagAndUnsignedArrayValue.java         |    0
 .../java/htsjdk/samtools/SAMBinaryTagAndValue.java |    0
 .../java/htsjdk/samtools/SAMException.java         |    0
 .../java/htsjdk/samtools/SAMFileHeader.java        |    0
 src/main/java/htsjdk/samtools/SAMFileReader.java   |  751 +++
 .../java/htsjdk/samtools/SAMFileSource.java        |    0
 .../java/htsjdk/samtools/SAMFileSpan.java          |    0
 .../htsjdk/samtools/SAMFileTruncatedReader.java    |    0
 .../java/htsjdk/samtools/SAMFileWriter.java        |    0
 .../java/htsjdk/samtools/SAMFileWriterFactory.java |  505 ++
 .../java/htsjdk/samtools/SAMFileWriterImpl.java    |    0
 src/main/java/htsjdk/samtools/SAMFlag.java         |  110 +
 .../java/htsjdk/samtools/SAMFormatException.java   |    0
 .../htsjdk/samtools/SAMHeaderRecordComparator.java |    0
 src/main/java/htsjdk/samtools/SAMLineParser.java   |  487 ++
 .../java/htsjdk/samtools/SAMProgramRecord.java     |    0
 .../java/htsjdk/samtools/SAMReadGroupRecord.java   |    0
 src/{ => main}/java/htsjdk/samtools/SAMRecord.java |    0
 .../java/htsjdk/samtools/SAMRecordComparator.java  |    0
 .../samtools/SAMRecordCoordinateComparator.java    |    0
 .../samtools/SAMRecordDuplicateComparator.java     |    0
 .../java/htsjdk/samtools/SAMRecordFactory.java     |    0
 .../java/htsjdk/samtools/SAMRecordIterator.java    |    0
 .../samtools/SAMRecordQueryHashComparator.java     |    0
 .../samtools/SAMRecordQueryNameComparator.java     |    0
 .../java/htsjdk/samtools/SAMRecordSetBuilder.java  |  576 +++
 .../java/htsjdk/samtools/SAMRecordUtil.java        |    0
 .../htsjdk/samtools/SAMSequenceDictionary.java     |    0
 .../java/htsjdk/samtools/SAMSequenceRecord.java    |    0
 .../java/htsjdk/samtools/SAMSortOrderChecker.java  |    0
 src/{ => main}/java/htsjdk/samtools/SAMTag.java    |    0
 .../java/htsjdk/samtools/SAMTagUtil.java           |    0
 .../java/htsjdk/samtools/SAMTestUtil.java          |    0
 .../java/htsjdk/samtools/SAMTextHeaderCodec.java   |    0
 .../java/htsjdk/samtools/SAMTextReader.java        |    0
 src/main/java/htsjdk/samtools/SAMTextWriter.java   |  221 +
 src/{ => main}/java/htsjdk/samtools/SAMTools.java  |    0
 src/{ => main}/java/htsjdk/samtools/SAMUtils.java  |    0
 .../java/htsjdk/samtools/SAMValidationError.java   |    0
 src/{ => main}/java/htsjdk/samtools/SQTagUtil.java |    0
 src/main/java/htsjdk/samtools/SRAFileReader.java   |  308 ++
 src/{ => main}/java/htsjdk/samtools/SRAIndex.java  |    0
 src/main/java/htsjdk/samtools/SRAIterator.java     |  256 +
 .../java/htsjdk/samtools/SamFileHeaderMerger.java  |    0
 .../java/htsjdk/samtools/SamFileValidator.java     |  856 ++++
 src/main/java/htsjdk/samtools/SamFiles.java        |   98 +
 src/main/java/htsjdk/samtools/SamFlagField.java    |  203 +
 .../java/htsjdk/samtools/SamIndexes.java           |    0
 .../java/htsjdk/samtools/SamInputResource.java     |  473 ++
 .../java/htsjdk/samtools/SamPairUtil.java          |    0
 src/{ => main}/java/htsjdk/samtools/SamReader.java |    0
 .../java/htsjdk/samtools/SamReaderFactory.java     |  528 +++
 src/main/java/htsjdk/samtools/SamStreams.java      |  164 +
 .../SecondaryOrSupplementarySkippingIterator.java  |    0
 .../StreamInflatingIndexingOutputStream.java       |    0
 .../samtools/TagValueAndUnsignedArrayFlag.java     |    0
 .../java/htsjdk/samtools/TextCigarCodec.java       |    0
 .../java/htsjdk/samtools/TextTagCodec.java         |    0
 .../htsjdk/samtools/TextualBAMIndexWriter.java     |    0
 .../java/htsjdk/samtools/ValidationStringency.java |    0
 .../java/htsjdk/samtools/apps/TimeChannel.java     |    0
 .../htsjdk/samtools/apps/TimeRandomAccessFile.java |    0
 src/main/java/htsjdk/samtools/cram/CRAIEntry.java  |  195 +
 src/main/java/htsjdk/samtools/cram/CRAIIndex.java  |  267 ++
 .../java/htsjdk/samtools/cram/CRAMException.java   |    0
 .../cram/build/CompressionHeaderFactory.java       |    0
 .../samtools/cram/build/ContainerFactory.java      |    0
 .../samtools/cram/build/ContainerParser.java       |    0
 .../samtools/cram/build/Cram2SamRecordFactory.java |    0
 .../samtools/cram/build/CramContainerIterator.java |    0
 .../java/htsjdk/samtools/cram/build/CramIO.java    |    0
 .../htsjdk/samtools/cram/build/CramNormalizer.java |    0
 .../cram/build/CramSpanContainerIterator.java      |    0
 .../samtools/cram/build/Sam2CramRecordFactory.java |    0
 .../java/htsjdk/samtools/cram/build/Utils.java     |    0
 .../samtools/cram/common/CramVersionPolicies.java  |    0
 .../htsjdk/samtools/cram/common/CramVersions.java  |   11 +
 .../htsjdk/samtools/cram/common/IntHashMap.java    |    0
 .../htsjdk/samtools/cram/common/MutableInt.java    |    0
 .../java/htsjdk/samtools/cram/common/Version.java  |    0
 .../samtools/cram/digest/AbstractSerialDigest.java |    0
 .../samtools/cram/digest/ByteSumCombine.java       |    0
 .../java/htsjdk/samtools/cram/digest/Combine.java  |    0
 .../samtools/cram/digest/ContentDigests.java       |    0
 .../htsjdk/samtools/cram/digest/Crc32Hasher.java   |    0
 .../samtools/cram/digest/IntegerSumCombine.java    |    0
 .../samtools/cram/digest/MessageDigestHasher.java  |    0
 .../java/htsjdk/samtools/cram/digest/SERIES.java   |    0
 .../samtools/cram/encoding/AbstractBitCodec.java   |    0
 .../samtools/cram/encoding/BetaIntegerCodec.java   |    0
 .../cram/encoding/BetaIntegerEncoding.java         |    0
 .../htsjdk/samtools/cram/encoding/BitCodec.java    |    0
 .../cram/encoding/ByteArrayLenEncoding.java        |    0
 .../cram/encoding/ByteArrayStopEncoding.java       |    0
 .../htsjdk/samtools/cram/encoding/DataSeries.java  |    0
 .../samtools/cram/encoding/DataSeriesMap.java      |    0
 .../samtools/cram/encoding/DataSeriesType.java     |    0
 .../htsjdk/samtools/cram/encoding/Encoding.java    |    0
 .../samtools/cram/encoding/EncodingFactory.java    |    0
 .../cram/encoding/ExternalByteArrayCodec.java      |    0
 .../cram/encoding/ExternalByteArrayEncoding.java   |    0
 .../samtools/cram/encoding/ExternalByteCodec.java  |    0
 .../cram/encoding/ExternalByteEncoding.java        |    0
 .../samtools/cram/encoding/ExternalCompressor.java |    0
 .../cram/encoding/ExternalIntegerCodec.java        |    0
 .../cram/encoding/ExternalIntegerEncoding.java     |    0
 .../samtools/cram/encoding/ExternalLongCodec.java  |    0
 .../cram/encoding/ExternalLongEncoding.java        |    0
 .../samtools/cram/encoding/GammaIntegerCodec.java  |    0
 .../cram/encoding/GammaIntegerEncoding.java        |    0
 .../samtools/cram/encoding/GolombIntegerCodec.java |    0
 .../cram/encoding/GolombIntegerEncoding.java       |    0
 .../samtools/cram/encoding/GolombLongCodec.java    |    0
 .../samtools/cram/encoding/GolombLongEncoding.java |    0
 .../cram/encoding/GolombRiceIntegerCodec.java      |    0
 .../cram/encoding/GolombRiceIntegerEncoding.java   |    0
 .../htsjdk/samtools/cram/encoding/NullCodec.java   |    0
 .../samtools/cram/encoding/NullEncoding.java       |    0
 .../cram/encoding/SubexponentialIntegerCodec.java  |    0
 .../encoding/SubexponentialIntegerEncoding.java    |    0
 .../cram/encoding/huffman/HuffmanCode.java         |    0
 .../cram/encoding/huffman/HuffmanLeaf.java         |    0
 .../cram/encoding/huffman/HuffmanNode.java         |    0
 .../cram/encoding/huffman/HuffmanTree.java         |    0
 .../huffman/codec/CanonicalHuffmanByteCodec.java   |    0
 .../codec/CanonicalHuffmanIntegerCodec.java        |    0
 .../encoding/huffman/codec/HuffmanBitCode.java     |    0
 .../huffman/codec/HuffmanByteEncoding.java         |    0
 .../encoding/huffman/codec/HuffmanByteHelper.java  |    0
 .../encoding/huffman/codec/HuffmanIntHelper.java   |    0
 .../huffman/codec/HuffmanIntegerEncoding.java      |    0
 .../huffman/codec/HuffmanParamsCalculator.java     |    0
 .../samtools/cram/encoding/rans/Constants.java     |    0
 .../htsjdk/samtools/cram/encoding/rans/D04.java    |    0
 .../htsjdk/samtools/cram/encoding/rans/D14.java    |    0
 .../samtools/cram/encoding/rans/Decoding.java      |    0
 .../htsjdk/samtools/cram/encoding/rans/E04.java    |    0
 .../htsjdk/samtools/cram/encoding/rans/E14.java    |    0
 .../samtools/cram/encoding/rans/Encoding.java      |    0
 .../samtools/cram/encoding/rans/Frequencies.java   |    0
 .../htsjdk/samtools/cram/encoding/rans/RANS.java   |    0
 .../htsjdk/samtools/cram/encoding/rans/Utils.java  |    0
 .../cram/encoding/reader/AbstractReader.java       |    0
 .../cram/encoding/reader/CramRecordReader.java     |    0
 .../samtools/cram/encoding/reader/DataReader.java  |    0
 .../cram/encoding/reader/DataReaderFactory.java    |    0
 .../cram/encoding/reader/RefSeqIdReader.java       |    0
 .../encoding/readfeatures/BaseQualityScore.java    |    0
 .../samtools/cram/encoding/readfeatures/Bases.java |    0
 .../cram/encoding/readfeatures/Deletion.java       |    0
 .../cram/encoding/readfeatures/HardClip.java       |    0
 .../cram/encoding/readfeatures/InsertBase.java     |    0
 .../cram/encoding/readfeatures/Insertion.java      |    0
 .../cram/encoding/readfeatures/Padding.java        |    0
 .../cram/encoding/readfeatures/ReadBase.java       |    0
 .../cram/encoding/readfeatures/ReadFeature.java    |    0
 .../cram/encoding/readfeatures/RefSkip.java        |    0
 .../cram/encoding/readfeatures/Scores.java         |    0
 .../cram/encoding/readfeatures/SoftClip.java       |    0
 .../cram/encoding/readfeatures/Substitution.java   |    0
 .../samtools/cram/encoding/writer/DataWriter.java  |    0
 .../cram/encoding/writer/DataWriterFactory.java    |    0
 .../samtools/cram/encoding/writer/Writer.java      |    0
 .../htsjdk/samtools/cram/io/BitInputStream.java    |    0
 .../htsjdk/samtools/cram/io/BitOutputStream.java   |    0
 .../htsjdk/samtools/cram/io/CRC32InputStream.java  |    0
 .../htsjdk/samtools/cram/io/CRC32OutputStream.java |    0
 .../samtools/cram/io/CountingInputStream.java      |    0
 .../java/htsjdk/samtools/cram/io/CramArray.java    |    0
 .../java/htsjdk/samtools/cram/io/CramInt.java      |    0
 .../samtools/cram/io/DefaultBitInputStream.java    |    0
 .../samtools/cram/io/DefaultBitOutputStream.java   |    0
 .../cram/io/ExposedByteArrayOutputStream.java      |    0
 .../samtools/cram/io/ExternalCompression.java      |  152 +
 .../java/htsjdk/samtools/cram/io/ITF8.java         |    0
 .../htsjdk/samtools/cram/io/InputStreamUtils.java  |    0
 .../java/htsjdk/samtools/cram/io/LTF8.java         |    0
 .../htsjdk/samtools/cram/lossy/BaseCategory.java   |    0
 .../samtools/cram/lossy/BaseCategoryType.java      |    0
 .../java/htsjdk/samtools/cram/lossy/Binning.java   |    0
 .../samtools/cram/lossy/PreservationPolicy.java    |    0
 .../cram/lossy/QualityScorePreservation.java       |    0
 .../samtools/cram/lossy/QualityScoreTreatment.java |    0
 .../cram/lossy/QualityScoreTreatmentType.java      |    0
 .../htsjdk/samtools/cram/lossy/ReadCategory.java   |    0
 .../samtools/cram/lossy/ReadCategoryType.java      |    0
 .../samtools/cram/ref/CRAMReferenceSource.java     |    0
 .../htsjdk/samtools/cram/ref/EnaRefService.java    |    0
 .../cram/ref/InMemoryReferenceSequenceFile.java    |    0
 .../htsjdk/samtools/cram/ref/ReferenceRegion.java  |    0
 .../htsjdk/samtools/cram/ref/ReferenceSource.java  |  274 ++
 .../htsjdk/samtools/cram/ref/ReferenceTracks.java  |    0
 .../samtools/cram/structure/AlignmentSpan.java     |    0
 .../java/htsjdk/samtools/cram/structure/Block.java |    0
 .../cram/structure/BlockCompressionMethod.java     |    0
 .../samtools/cram/structure/BlockContentType.java  |    0
 .../samtools/cram/structure/CompressionHeader.java |    0
 .../htsjdk/samtools/cram/structure/Container.java  |    0
 .../samtools/cram/structure/ContainerHeaderIO.java |   96 +
 .../samtools/cram/structure/ContainerIO.java       |    0
 .../cram/structure/CramCompressionRecord.java      |    0
 .../htsjdk/samtools/cram/structure/CramHeader.java |    0
 .../htsjdk/samtools/cram/structure/EncodingID.java |    0
 .../samtools/cram/structure/EncodingKey.java       |    0
 .../samtools/cram/structure/EncodingParams.java    |    0
 .../htsjdk/samtools/cram/structure/ReadTag.java    |    0
 .../java/htsjdk/samtools/cram/structure/Slice.java |    0
 .../htsjdk/samtools/cram/structure/SliceIO.java    |    0
 .../cram/structure/SubstitutionMatrix.java         |    0
 .../htsjdk/samtools/example/ExampleSamUsage.java   |    0
 .../htsjdk/samtools/example/PrintReadsExample.java |   98 +
 .../htsjdk/samtools/fastq/AsyncFastqWriter.java    |    0
 .../htsjdk/samtools/fastq/BasicFastqWriter.java    |    0
 .../java/htsjdk/samtools/fastq/FastqConstants.java |    0
 .../java/htsjdk/samtools/fastq/FastqReader.java    |    0
 .../java/htsjdk/samtools/fastq/FastqRecord.java    |    0
 .../java/htsjdk/samtools/fastq/FastqWriter.java    |    0
 .../htsjdk/samtools/fastq/FastqWriterFactory.java  |   31 +
 .../samtools/filter/AbstractJavascriptFilter.java  |    0
 .../htsjdk/samtools/filter/AggregateFilter.java    |    0
 .../java/htsjdk/samtools/filter/AlignedFilter.java |    0
 .../samtools/filter/DuplicateReadFilter.java       |    0
 .../filter/FailsVendorReadQualityFilter.java       |    0
 .../htsjdk/samtools/filter/FilteringIterator.java  |    0
 .../samtools/filter/FilteringSamIterator.java      |    0
 .../htsjdk/samtools/filter/InsertSizeFilter.java   |    0
 .../htsjdk/samtools/filter/IntervalFilter.java     |    0
 .../samtools/filter/JavascriptSamRecordFilter.java |    0
 .../samtools/filter/MappingQualityFilter.java      |    0
 .../samtools/filter/NotPrimaryAlignmentFilter.java |    0
 .../samtools/filter/OverclippedReadFilter.java     |    0
 .../htsjdk/samtools/filter/ReadNameFilter.java     |    0
 .../htsjdk/samtools/filter/SamRecordFilter.java    |    0
 .../samtools/filter/SecondaryAlignmentFilter.java  |    0
 .../filter/SecondaryOrSupplementaryFilter.java     |    0
 .../htsjdk/samtools/filter/SolexaNoiseFilter.java  |    0
 .../java/htsjdk/samtools/filter/TagFilter.java     |    0
 .../samtools/filter/WholeReadClippedFilter.java    |    0
 .../java/htsjdk/samtools/liftover/Chain.java       |    0
 .../java/htsjdk/samtools/liftover/LiftOver.java    |  319 ++
 .../java/htsjdk/samtools/metrics/Header.java       |    0
 .../java/htsjdk/samtools/metrics/MetricBase.java   |    0
 .../java/htsjdk/samtools/metrics/MetricsFile.java  |  594 +++
 .../java/htsjdk/samtools/metrics/StringHeader.java |    0
 .../htsjdk/samtools/metrics/VersionHeader.java     |    0
 .../reference/AbstractFastaSequenceFile.java       |    0
 .../samtools/reference/FastaSequenceFile.java      |    0
 .../samtools/reference/FastaSequenceIndex.java     |    0
 .../reference/IndexedFastaSequenceFile.java        |    0
 .../samtools/reference/ReferenceSequence.java      |    0
 .../samtools/reference/ReferenceSequenceFile.java  |    0
 .../reference/ReferenceSequenceFileFactory.java    |    0
 .../reference/ReferenceSequenceFileWalker.java     |    0
 .../seekablestream/ByteArraySeekableStream.java    |    0
 .../seekablestream/ISeekableStreamFactory.java     |    0
 .../seekablestream/SeekableBufferedStream.java     |    0
 .../samtools/seekablestream/SeekableFTPStream.java |    0
 .../seekablestream/SeekableFTPStreamHelper.java    |    0
 .../seekablestream/SeekableFileStream.java         |    0
 .../seekablestream/SeekableHTTPStream.java         |    0
 .../seekablestream/SeekableMemoryStream.java       |   64 +
 .../seekablestream/SeekablePathStream.java         |  121 +
 .../samtools/seekablestream/SeekableStream.java    |    0
 .../seekablestream/SeekableStreamFactory.java      |   97 +
 .../samtools/seekablestream/UserPasswordInput.java |    0
 .../java/htsjdk/samtools/sra/ReferenceCache.java   |   46 +
 .../java/htsjdk/samtools/sra/SRAAccession.java     |  174 +
 .../htsjdk/samtools/sra/SRAAlignmentIterator.java  |  209 +
 .../samtools/sra/SRAIndexedSequenceFile.java       |  112 +
 .../java/htsjdk/samtools/sra/SRALazyRecord.java    |    0
 .../samtools/sra/SRAUnalignmentIterator.java       |    0
 .../java/htsjdk/samtools/sra/SRAUtils.java         |    0
 .../htsjdk/samtools/util/AbstractAsyncWriter.java  |    0
 .../htsjdk/samtools/util/AbstractIterator.java     |    0
 .../samtools/util/AbstractProgressLogger.java      |    0
 .../java/htsjdk/samtools/util/AsciiWriter.java     |    0
 .../samtools/util/AsyncBufferedIterator.java       |    0
 .../java/htsjdk/samtools/util/BinaryCodec.java     |    0
 .../util/BlockCompressedFilePointerUtil.java       |    0
 .../samtools/util/BlockCompressedInputStream.java  |    0
 .../samtools/util/BlockCompressedOutputStream.java |  392 ++
 .../util/BlockCompressedStreamConstants.java       |    0
 .../java/htsjdk/samtools/util/BlockGunzipper.java  |    0
 .../htsjdk/samtools/util/BufferedLineReader.java   |    0
 .../java/htsjdk/samtools/util/CigarUtil.java       |    0
 .../htsjdk/samtools/util/CloseableIterator.java    |    0
 .../java/htsjdk/samtools/util/CloserUtil.java      |    0
 .../java/htsjdk/samtools/util/CodeUtil.java        |    0
 .../java/htsjdk/samtools/util/CollectionUtil.java  |    0
 .../java/htsjdk/samtools/util/ComparableTuple.java |    0
 .../java/htsjdk/samtools/util/CoordMath.java       |    0
 .../htsjdk/samtools/util/CoordSpanInputSteam.java  |    0
 .../java/htsjdk/samtools/util/DateParser.java      |    0
 .../htsjdk/samtools/util/DelegatingIterator.java   |    0
 .../java/htsjdk/samtools/util/DiskBackedQueue.java |    0
 .../java/htsjdk/samtools/util/FastLineReader.java  |    0
 .../htsjdk/samtools/util/FastqQualityFormat.java   |    0
 .../samtools/util/FileAppendStreamLRUCache.java    |    0
 .../java/htsjdk/samtools/util/FormatUtil.java      |    0
 src/main/java/htsjdk/samtools/util/Histogram.java  |  567 +++
 .../java/htsjdk/samtools/util/HttpUtils.java       |    0
 .../java/htsjdk/samtools/util/IOUtil.java          |    0
 .../java/htsjdk/samtools/util/Interval.java        |    0
 .../java/htsjdk/samtools/util/IntervalList.java    |    0
 .../util/IntervalListReferenceSequenceMask.java    |    0
 .../java/htsjdk/samtools/util/IntervalTree.java    |    0
 .../java/htsjdk/samtools/util/IntervalTreeMap.java |    0
 .../java/htsjdk/samtools/util/IntervalUtil.java    |    0
 .../java/htsjdk/samtools/util/Iso8601Date.java     |    0
 .../java/htsjdk/samtools/util/IterableAdapter.java |    0
 .../htsjdk/samtools/util/IterableOnceIterator.java |    0
 .../java/htsjdk/samtools/util/Iterables.java       |    0
 src/{ => main}/java/htsjdk/samtools/util/Lazy.java |    0
 .../java/htsjdk/samtools/util/LineReader.java      |    0
 .../java/htsjdk/samtools/util/ListMap.java         |    0
 .../java/htsjdk/samtools/util/Locatable.java       |    0
 .../java/htsjdk/samtools/util/LocationAware.java   |    0
 .../java/htsjdk/samtools/util/Locus.java           |    0
 .../java/htsjdk/samtools/util/LocusComparator.java |    0
 .../java/htsjdk/samtools/util/LocusImpl.java       |    0
 src/{ => main}/java/htsjdk/samtools/util/Log.java  |    0
 .../samtools/util/Md5CalculatingInputStream.java   |    0
 .../samtools/util/Md5CalculatingOutputStream.java  |    0
 .../java/htsjdk/samtools/util/MergingIterator.java |    0
 .../java/htsjdk/samtools/util/Murmur3.java         |    0
 .../java/htsjdk/samtools/util/OverlapDetector.java |  193 +
 .../java/htsjdk/samtools/util/PeekIterator.java    |    0
 .../htsjdk/samtools/util/PeekableIterator.java     |    0
 .../java/htsjdk/samtools/util/ProcessExecutor.java |    0
 .../java/htsjdk/samtools/util/ProgressLogger.java  |    0
 .../samtools/util/ProgressLoggerInterface.java     |    0
 .../samtools/util/QualityEncodingDetector.java     |    0
 .../java/htsjdk/samtools/util/QualityUtil.java     |    0
 .../samtools/util/ReferenceSequenceMask.java       |    0
 .../htsjdk/samtools/util/RelativeIso8601Date.java  |    0
 .../htsjdk/samtools/util/ResourceLimitedMap.java   |    0
 .../samtools/util/ResourceLimitedMapFunctor.java   |    0
 .../htsjdk/samtools/util/RuntimeEOFException.java  |    0
 .../htsjdk/samtools/util/RuntimeIOException.java   |    0
 .../samtools/util/RuntimeScriptException.java      |    0
 .../htsjdk/samtools/util/SamLocusIterator.java     |  679 +++
 .../util/SamRecordIntervalIteratorFactory.java     |    0
 .../samtools/util/SamRecordTrackingBuffer.java     |    0
 .../htsjdk/samtools/util/SamRecordWithOrdinal.java |    0
 .../java/htsjdk/samtools/util/SequenceUtil.java    |    0
 .../java/htsjdk/samtools/util/SnappyLoader.java    |    0
 .../samtools/util/SolexaQualityConverter.java      |    0
 .../htsjdk/samtools/util/SortingCollection.java    |  514 ++
 .../samtools/util/SortingLongCollection.java       |    0
 .../java/htsjdk/samtools/util/StopWatch.java       |    0
 .../htsjdk/samtools/util/StringLineReader.java     |    0
 .../java/htsjdk/samtools/util/StringUtil.java      |    0
 .../htsjdk/samtools/util/TempStreamFactory.java    |    0
 .../java/htsjdk/samtools/util/TestUtil.java        |    0
 .../java/htsjdk/samtools/util/TrimmingUtil.java    |    0
 .../java/htsjdk/samtools/util/Tuple.java           |    0
 .../util/WholeGenomeReferenceSequenceMask.java     |    0
 .../java/htsjdk/samtools/util/ftp/FTPClient.java   |    0
 .../java/htsjdk/samtools/util/ftp/FTPReply.java    |    0
 .../java/htsjdk/samtools/util/ftp/FTPStream.java   |    0
 .../java/htsjdk/samtools/util/ftp/FTPUtils.java    |    0
 .../htsjdk/samtools/util/zip/DeflaterFactory.java  |   48 +
 .../java/htsjdk/tribble/AbstractFeatureCodec.java  |    0
 .../java/htsjdk/tribble/AbstractFeatureReader.java |    0
 .../java/htsjdk/tribble/AsciiFeatureCodec.java     |   92 +
 .../java/htsjdk/tribble/BinaryFeatureCodec.java    |    0
 .../htsjdk/tribble/CloseableTribbleIterator.java   |    0
 src/{ => main}/java/htsjdk/tribble/Feature.java    |    0
 .../java/htsjdk/tribble/FeatureCodec.java          |    0
 .../java/htsjdk/tribble/FeatureCodecHeader.java    |    0
 .../java/htsjdk/tribble/FeatureReader.java         |    0
 .../java/htsjdk/tribble/NameAwareCodec.java        |    0
 .../java/htsjdk/tribble/SimpleFeature.java         |    0
 .../java/htsjdk/tribble/TabixFeatureReader.java    |  217 +
 src/{ => main}/java/htsjdk/tribble/Tribble.java    |    0
 .../java/htsjdk/tribble/TribbleException.java      |    0
 .../tribble/TribbleIndexedFeatureReader.java       |    0
 .../java/htsjdk/tribble/annotation/Strand.java     |    0
 .../java/htsjdk/tribble/bed/BEDCodec.java          |    0
 .../java/htsjdk/tribble/bed/BEDFeature.java        |    0
 .../java/htsjdk/tribble/bed/FullBEDFeature.java    |    0
 .../java/htsjdk/tribble/bed/SimpleBEDFeature.java  |    0
 .../java/htsjdk/tribble/example/CountRecords.java  |    0
 .../htsjdk/tribble/example/ExampleBinaryCodec.java |    0
 .../java/htsjdk/tribble/example/IndexToTable.java  |    0
 .../htsjdk/tribble/example/IndicesAreEqual.java    |    0
 .../tribble/example/ProfileIndexReading.java       |    0
 .../exception/CodecLineParsingException.java       |    0
 .../tribble/exception/UnsortedFileException.java   |    0
 .../htsjdk/tribble/gelitext/DiploidGenotype.java   |    0
 .../htsjdk/tribble/gelitext/GeliTextCodec.java     |    0
 .../htsjdk/tribble/gelitext/GeliTextFeature.java   |    0
 .../java/htsjdk/tribble/index/AbstractIndex.java   |    0
 .../java/htsjdk/tribble/index/Block.java           |    0
 .../java/htsjdk/tribble/index/ChrIndex.java        |    0
 .../htsjdk/tribble/index/DynamicIndexCreator.java  |    0
 .../java/htsjdk/tribble/index/Index.java           |    0
 .../java/htsjdk/tribble/index/IndexCreator.java    |    0
 .../java/htsjdk/tribble/index/IndexFactory.java    |    0
 .../java/htsjdk/tribble/index/MutableIndex.java    |    0
 .../htsjdk/tribble/index/TribbleIndexCreator.java  |    0
 .../htsjdk/tribble/index/interval/Interval.java    |    0
 .../index/interval/IntervalIndexCreator.java       |    0
 .../tribble/index/interval/IntervalTree.java       |    0
 .../tribble/index/interval/IntervalTreeIndex.java  |    0
 .../htsjdk/tribble/index/linear/LinearIndex.java   |    0
 .../tribble/index/linear/LinearIndexCreator.java   |    0
 .../htsjdk/tribble/index/tabix/TabixFormat.java    |    0
 .../htsjdk/tribble/index/tabix/TabixIndex.java     |    0
 .../tribble/index/tabix/TabixIndexCreator.java     |    0
 .../htsjdk/tribble/readers/AsciiLineReader.java    |    0
 .../tribble/readers/AsciiLineReaderIterator.java   |    0
 .../java/htsjdk/tribble/readers/LineIterator.java  |    0
 .../htsjdk/tribble/readers/LineIteratorImpl.java   |    0
 .../java/htsjdk/tribble/readers/LineReader.java    |    0
 .../htsjdk/tribble/readers/LineReaderUtil.java     |   56 +
 .../tribble/readers/LongLineBufferedReader.java    |    0
 .../java/htsjdk/tribble/readers/Positional.java    |    0
 .../tribble/readers/PositionalBufferedStream.java  |    0
 .../tribble/readers/SynchronousLineReader.java     |   61 +
 .../tribble/readers/TabixIteratorLineReader.java   |    0
 .../java/htsjdk/tribble/readers/TabixReader.java   |    0
 src/{ => main}/java/htsjdk/tribble/readme.txt      |    0
 .../java/htsjdk/tribble/util/FTPHelper.java        |    0
 .../java/htsjdk/tribble/util/HTTPHelper.java       |    0
 .../tribble/util/LittleEndianInputStream.java      |    0
 .../tribble/util/LittleEndianOutputStream.java     |    0
 .../java/htsjdk/tribble/util/MathUtils.java        |    0
 .../java/htsjdk/tribble/util/ParsingUtils.java     |    0
 .../java/htsjdk/tribble/util/RemoteURLHelper.java  |    0
 .../java/htsjdk/tribble/util/TabixUtils.java       |    0
 .../java/htsjdk/tribble/util/URLHelper.java        |    0
 .../java/htsjdk/tribble/util/ftp/.gitignore        |    0
 .../util/popgen/HardyWeinbergCalculation.java      |    0
 src/main/java/htsjdk/variant/bcf2/BCF2Codec.java   |  491 ++
 .../java/htsjdk/variant/bcf2/BCF2Decoder.java      |    0
 .../variant/bcf2/BCF2GenotypeFieldDecoders.java    |    0
 .../variant/bcf2/BCF2LazyGenotypesDecoder.java     |    0
 .../java/htsjdk/variant/bcf2/BCF2Type.java         |    0
 .../java/htsjdk/variant/bcf2/BCF2Utils.java        |    0
 .../java/htsjdk/variant/bcf2/BCFVersion.java       |    0
 .../variant/example/PrintVariantsExample.java      |  103 +
 .../java/htsjdk/variant/utils/GeneralUtils.java    |    0
 .../utils/SAMSequenceDictionaryExtractor.java      |    0
 .../java/htsjdk/variant/variantcontext/Allele.java |    0
 .../htsjdk/variant/variantcontext/CommonInfo.java  |    0
 .../variant/variantcontext/FastGenotype.java       |    0
 .../htsjdk/variant/variantcontext/Genotype.java    |    0
 .../variant/variantcontext/GenotypeBuilder.java    |    0
 .../variantcontext/GenotypeJEXLContext.java        |    0
 .../variantcontext/GenotypeLikelihoods.java        |    0
 .../variant/variantcontext/GenotypeType.java       |    0
 .../variant/variantcontext/GenotypesContext.java   |    0
 .../htsjdk/variant/variantcontext/JEXLMap.java     |    0
 .../variantcontext/LazyGenotypesContext.java       |    0
 .../variant/variantcontext/VariantContext.java     |    0
 .../variantcontext/VariantContextBuilder.java      |  498 ++
 .../variantcontext/VariantContextComparator.java   |    0
 .../variantcontext/VariantContextUtils.java        |    0
 .../variant/variantcontext/VariantJEXLContext.java |    0
 .../variantcontext/filter/CompoundFilter.java      |    0
 .../variantcontext/filter/FilteringIterator.java   |    0
 .../filter/FilteringVariantContextIterator.java    |    0
 .../filter/GenotypeQualityFilter.java              |    0
 .../filter/HeterozygosityFilter.java               |    0
 .../filter/JavascriptVariantFilter.java            |    0
 .../filter/PassingVariantFilter.java               |    0
 .../variant/variantcontext/filter/SnpFilter.java   |    0
 .../filter/VariantContextFilter.java               |    0
 .../writer/AsyncVariantContextWriter.java          |    0
 .../variant/variantcontext/writer/BCF2Encoder.java |    0
 .../variantcontext/writer/BCF2FieldEncoder.java    |    0
 .../variantcontext/writer/BCF2FieldWriter.java     |    0
 .../writer/BCF2FieldWriterManager.java             |    0
 .../variant/variantcontext/writer/BCF2Writer.java  |    0
 .../writer/IndexingVariantContextWriter.java       |    0
 .../writer/IntGenotypeFieldAccessors.java          |    0
 .../variant/variantcontext/writer/Options.java     |    0
 .../writer/SortingVariantContextWriter.java        |    0
 .../writer/SortingVariantContextWriterBase.java    |    0
 .../variant/variantcontext/writer/VCFWriter.java   |    0
 .../writer/VariantContextWriter.java               |    0
 .../writer/VariantContextWriterBuilder.java        |  537 +++
 .../writer/VariantContextWriterFactory.java        |  282 ++
 .../java/htsjdk/variant/vcf/AbstractVCFCodec.java  |    0
 .../java/htsjdk/variant/vcf/VCF3Codec.java         |    0
 .../java/htsjdk/variant/vcf/VCFCodec.java          |    0
 .../htsjdk/variant/vcf/VCFCompoundHeaderLine.java  |    0
 .../java/htsjdk/variant/vcf/VCFConstants.java      |    0
 .../htsjdk/variant/vcf/VCFContigHeaderLine.java    |    0
 .../java/htsjdk/variant/vcf/VCFEncoder.java        |    0
 .../java/htsjdk/variant/vcf/VCFFileReader.java     |    0
 .../htsjdk/variant/vcf/VCFFilterHeaderLine.java    |    0
 .../htsjdk/variant/vcf/VCFFormatHeaderLine.java    |    0
 .../java/htsjdk/variant/vcf/VCFHeader.java         |    0
 .../java/htsjdk/variant/vcf/VCFHeaderLine.java     |    0
 .../htsjdk/variant/vcf/VCFHeaderLineCount.java     |    0
 .../variant/vcf/VCFHeaderLineTranslator.java       |  183 +
 .../java/htsjdk/variant/vcf/VCFHeaderLineType.java |    0
 .../java/htsjdk/variant/vcf/VCFHeaderVersion.java  |    0
 .../java/htsjdk/variant/vcf/VCFIDHeaderLine.java   |    0
 .../java/htsjdk/variant/vcf/VCFInfoHeaderLine.java |    0
 .../java/htsjdk/variant/vcf/VCFRecordCodec.java    |    0
 .../htsjdk/variant/vcf/VCFSimpleHeaderLine.java    |    0
 .../htsjdk/variant/vcf/VCFStandardHeaderLines.java |    0
 .../java/htsjdk/variant/vcf/VCFUtils.java          |    0
 src/scripts/build_intel_deflater.sh                |   78 -
 .../htsjdk/cram/io/ExternalCompressionTest.java    |   30 +
 .../htsjdk/samtools/AbstractBAMFileIndexTest.java  |    0
 .../java/htsjdk/samtools/BAMCigarOverflowTest.java |   29 +
 .../java/htsjdk/samtools/BAMFileIndexTest.java     |  527 +++
 .../java/htsjdk/samtools/BAMFileWriterTest.java    |    0
 .../java/htsjdk/samtools/BAMIndexWriterTest.java   |  227 +
 src/test/java/htsjdk/samtools/BAMIteratorTest.java |   71 +
 ...AMQueryMultipleIntervalsIteratorFilterTest.java |  146 +
 .../java/htsjdk/samtools/BAMRemoteFileTest.java    |  326 ++
 .../java/htsjdk/samtools/BinTest.java              |    0
 .../java/htsjdk/samtools/CRAMBAIIndexerTest.java   |    0
 .../java/htsjdk/samtools/CRAMCRAIIndexerTest.java  |  178 +
 .../java/htsjdk/samtools/CRAMComplianceTest.java   |  150 +
 .../samtools/CRAMContainerStreamWriterTest.java    |  184 +
 .../java/htsjdk/samtools/CRAMEdgeCasesTest.java    |  158 +
 .../java/htsjdk/samtools/CRAMFileBAIIndexTest.java |  301 ++
 .../htsjdk/samtools/CRAMFileCRAIIndexTest.java     |  368 ++
 .../java/htsjdk/samtools/CRAMFileReaderTest.java   |  163 +
 .../java/htsjdk/samtools/CRAMFileWriterTest.java   |  282 ++
 .../samtools/CRAMFileWriterWithIndexTest.java      |  209 +
 .../java/htsjdk/samtools/CRAMIndexQueryTest.java   |  708 +++
 .../java/htsjdk/samtools/ChunkTest.java            |    0
 .../java/htsjdk/samtools/CigarCodecTest.java       |    0
 .../java/htsjdk/samtools/CigarTest.java            |    0
 .../htsjdk/samtools/DownsamplingIteratorTests.java |   87 +
 .../htsjdk/samtools/DuplicateSetIteratorTest.java  |    0
 .../java/htsjdk/samtools/GenomicIndexUtilTest.java |    0
 ...MergingSamRecordIteratorGroupCollisionTest.java |    0
 .../samtools/MergingSamRecordIteratorTest.java     |    0
 .../htsjdk/samtools/ProgramRecordChainingTest.java |    0
 .../samtools/SAMBinaryTagAndValueUnitTest.java     |    0
 .../java/htsjdk/samtools/SAMCloneTest.java         |    0
 .../java/htsjdk/samtools/SAMFileReaderTest.java    |  170 +
 .../htsjdk/samtools/SAMFileWriterFactoryTest.java  |  307 ++
 .../java/htsjdk/samtools/SAMFlagTest.java          |    0
 .../java/htsjdk/samtools/SAMIntegerTagTest.java    |  329 ++
 .../samtools/SAMRecordDuplicateComparatorTest.java |    0
 .../java/htsjdk/samtools/SAMRecordUnitTest.java    |  963 ++++
 .../htsjdk/samtools/SAMSequenceDictionaryTest.java |    0
 .../java/htsjdk/samtools/SAMTextReaderTest.java    |    0
 .../java/htsjdk/samtools/SAMTextWriterTest.java    |  130 +
 .../java/htsjdk/samtools/SAMUtilsTest.java         |    0
 .../htsjdk/samtools/SamFileHeaderMergerTest.java   |  265 ++
 src/test/java/htsjdk/samtools/SamFilesTest.java    |   91 +
 .../java/htsjdk/samtools/SamFlagFieldTest.java     |  150 +
 .../samtools/SamHeaderRecordComparatorTest.java    |    0
 src/test/java/htsjdk/samtools/SamIndexesTest.java  |  193 +
 .../java/htsjdk/samtools/SamPairUtilTest.java      |    0
 .../java/htsjdk/samtools/SamReaderFactoryTest.java |  437 ++
 .../java/htsjdk/samtools/SamReaderSortTest.java    |  102 +
 src/test/java/htsjdk/samtools/SamSpecIntTest.java  |   97 +
 src/test/java/htsjdk/samtools/SamStreamsTest.java  |  124 +
 .../SequenceNameTruncationAndValidationTest.java   |   98 +
 .../java/htsjdk/samtools/ValidateSamFileTest.java  |  471 ++
 .../java/htsjdk/samtools/apps/.gitignore           |    0
 .../java/htsjdk/samtools/cram/CRAIEntryTest.java   |    0
 .../java/htsjdk/samtools/cram/CRAIIndexTest.java   |  220 +
 .../samtools/cram/LosslessRoundTripTest.java       |    0
 .../java/htsjdk/samtools/cram/VersionTest.java     |  106 +
 .../cram/build/CompressionHeaderFactoryTest.java   |    0
 .../samtools/cram/build/ContainerFactoryTest.java  |    0
 .../samtools/cram/build/ContainerParserTest.java   |    0
 .../htsjdk/samtools/cram/build/CramIOTest.java     |    0
 .../cram/encoding/huffman/codec/HuffmanTest.java   |    0
 .../samtools/cram/encoding/rans/RansTest.java      |    0
 .../java/htsjdk/samtools/cram/io/ITF8Test.java     |    0
 .../java/htsjdk/samtools/cram/io/LTF8Test.java     |    0
 .../cram/lossy/QualityScorePreservationTest.java   |    0
 .../samtools/cram/ref/EnaRefServiceTest.java       |    0
 .../cram/structure/CramCompressionRecordTest.java  |    0
 .../samtools/cram/structure/ReadTagTest.java       |    0
 .../htsjdk/samtools/cram/structure/SliceTests.java |    0
 .../cram/structure/SubstitutionMatrixTest.java     |    0
 .../java/htsjdk/samtools/fastq/.gitignore          |    0
 .../htsjdk/samtools/fastq/FastqRecordTest.java     |  204 +
 .../htsjdk/samtools/fastq/FastqWriterTest.java     |   73 +
 .../filter/FailsVendorReadQualityFilterTest.java   |    0
 .../samtools/filter/InsertSizeFilterTest.java      |    0
 .../filter/JavascriptSamRecordFilterTest.java      |   74 +
 .../samtools/filter/MappingQualityFilterTest.java  |    0
 .../samtools/filter/OverclippedReadFilterTest.java |    0
 .../samtools/filter/SolexaNoiseFilterTest.java     |    0
 .../java/htsjdk/samtools/filter/TagFilterTest.java |    0
 .../htsjdk/samtools/liftover/LiftOverTest.java     |  468 ++
 .../htsjdk/samtools/metrics/MetricBaseTest.java    |    0
 .../htsjdk/samtools/metrics/MetricsFileTest.java   |  213 +
 .../htsjdk/samtools/metrics/StringHeaderTest.java  |    0
 .../htsjdk/samtools/metrics/VersionHeaderTest.java |    0
 .../reference/FakeReferenceSequenceFile.java       |    0
 .../samtools/reference/FastaSequenceFileTest.java  |   85 +
 .../samtools/reference/FastaSequenceIndexTest.java |  256 +
 .../reference/InMemoryReferenceSequenceFile.java   |    0
 .../reference/IndexedFastaSequenceFileTest.java    |  308 ++
 .../ReferenceSequenceFileFactoryTests.java         |   39 +
 .../reference/ReferenceSequenceFileWalkerTest.java |   73 +
 .../samtools/reference/ReferenceSequenceTests.java |    0
 .../seekablestream/SeekableBufferedStreamTest.java |  162 +
 .../seekablestream/SeekableFTPStreamTest.java      |    0
 .../seekablestream/SeekableFileStreamTest.java     |   52 +
 .../seekablestream/SeekableMemoryStreamTest.java   |   93 +
 .../seekablestream/SeekablePathStreamTest.java     |   51 +
 .../seekablestream/SeekableStreamFactoryTest.java  |   44 +
 .../java/htsjdk/samtools/sra/AbstractSRATest.java  |    0
 .../java/htsjdk/samtools/sra/SRAAccessionTest.java |   29 +
 .../java/htsjdk/samtools/sra/SRAIndexTest.java     |    0
 .../htsjdk/samtools/sra/SRALazyRecordTest.java     |    0
 .../java/htsjdk/samtools/sra/SRAQueryTest.java     |    0
 .../java/htsjdk/samtools/sra/SRAReferenceTest.java |   95 +
 src/test/java/htsjdk/samtools/sra/SRATest.java     |  432 ++
 .../samtools/util/AsyncBufferedIteratorTest.java   |    0
 .../java/htsjdk/samtools/util/BinaryCodecTest.java |    0
 .../util/BlockCompressedFilePointerUtilTest.java   |    0
 .../util/BlockCompressedOutputStreamTest.java      |  165 +
 .../util/BlockCompressedTerminatorTest.java        |   59 +
 .../java/htsjdk/samtools/util/CigarUtilTest.java   |    0
 .../samtools/util/CloseableIteratorTest.java       |    0
 .../java/htsjdk/samtools/util/CodeUtilTest.java    |    0
 .../htsjdk/samtools/util/ComparableTupleTest.java  |    0
 .../samtools/util/CoordSpanInputSteamTest.java     |    0
 .../htsjdk/samtools/util/DiskBackedQueueTest.java  |    0
 .../java/htsjdk/samtools/util/HistogramTest.java   |  366 ++
 .../htsjdk/samtools/util/IntervalListTest.java     |  520 ++
 .../htsjdk/samtools/util/IntervalTreeMapTest.java  |    0
 .../htsjdk/samtools/util/IntervalTreeTest.java     |    0
 src/test/java/htsjdk/samtools/util/IoUtilTest.java |  190 +
 .../java/htsjdk/samtools/util/Iso8601DateTest.java |    0
 .../java/htsjdk/samtools/util/IupacTest.java       |    0
 .../htsjdk/samtools/util/MergingIteratorTest.java  |    0
 .../htsjdk/samtools/util/OverlapDetectorTest.java  |  240 +
 .../samtools/util/QualityEncodingDetectorTest.java |  109 +
 .../samtools/util/RelativeIso8601DateTest.java     |    0
 .../htsjdk/samtools/util/SamLocusIteratorTest.java |  676 +++
 .../htsjdk/samtools/util/SequenceUtilTest.java     |    0
 .../samtools/util/SolexaQualityConverterTest.java  |    0
 .../samtools/util/SortingCollectionTest.java       |    0
 .../samtools/util/SortingLongCollectionTest.java   |    0
 .../htsjdk/samtools/util/StringLineReaderTest.java |    0
 .../java/htsjdk/samtools/util/StringUtilTest.java  |    0
 .../htsjdk/samtools/util/TrimmingUtilTest.java     |    0
 .../java/htsjdk/samtools/util/TupleTest.java       |    0
 .../htsjdk/tribble/AbstractFeatureReaderTest.java  |    0
 .../java/htsjdk/tribble/BinaryFeaturesTest.java    |    0
 .../java/htsjdk/tribble/FeatureReaderTest.java     |    0
 src/test/java/htsjdk/tribble/TestUtils.java        |   28 +
 .../tribble/TribbleIndexFeatureReaderTest.java     |    0
 .../java/htsjdk/tribble/TribbleTest.java           |    0
 .../java/htsjdk/tribble/bed/BEDCodecTest.java      |    0
 .../java/htsjdk/tribble/gelitext/GeliTextTest.java |    0
 .../htsjdk/tribble/index/IndexFactoryTest.java     |  115 +
 .../java/htsjdk/tribble/index/IndexTest.java       |    0
 .../tribble/index/interval/IntervalTreeTest.java   |    0
 .../tribble/index/linear/LinearIndexTest.java      |    0
 .../htsjdk/tribble/index/tabix/TabixIndexTest.java |  143 +
 .../tribble/readers/AsciiLineReaderTest.java       |    0
 .../readers/LongLineBufferedReaderTest.java        |    0
 .../readers/PositionalBufferedStreamTest.java      |    0
 .../java/htsjdk/tribble/readers/ReaderTest.java    |    0
 .../readers/SynchronousLineReaderUnitTest.java     |   39 +
 .../htsjdk/tribble/readers/TabixReaderTest.java    |    0
 .../java/htsjdk/tribble/source/.gitignore          |    0
 .../java/htsjdk/tribble/util/ParsingUtilsTest.java |    0
 .../htsjdk/tribble/util/ftp/FTPClientTest.java     |    0
 .../java/htsjdk/tribble/util/ftp/FTPUtilsTest.java |    0
 .../util/popgen/HardyWeinbergCalculationTest.java  |    0
 .../htsjdk/variant/PrintVariantsExampleTest.java   |   65 +
 src/test/java/htsjdk/variant/VariantBaseTest.java  |  278 ++
 .../variant/bcf2/BCF2EncoderDecoderUnitTest.java   |    0
 .../htsjdk/variant/bcf2/BCF2UtilsUnitTest.java     |    0
 .../utils/SAMSequenceDictionaryExtractorTest.java  |   65 +
 .../variant/variantcontext/AlleleUnitTest.java     |    0
 .../GenotypeLikelihoodsUnitTest.java               |  337 ++
 .../variant/variantcontext/GenotypeUnitTest.java   |    0
 .../variantcontext/GenotypesContextUnitTest.java   |    0
 .../variantcontext/VariantContextBuilderTest.java  |    0
 .../variantcontext/VariantContextTestProvider.java | 1014 ++++
 .../variantcontext/VariantContextUnitTest.java     | 1450 ++++++
 .../variantcontext/VariantJEXLContextUnitTest.java |    0
 .../variantcontext/filter/AllFailFilter.java       |    0
 .../variantcontext/filter/AllPassFilter.java       |    0
 .../variantcontext/filter/CompoundFilterTest.java  |    0
 .../FilteringVariantContextIteratorTest.java       |   88 +
 .../filter/GenotypeQualityFilterTest.java          |    0
 .../filter/HeterozygosityFilterTest.java           |    0
 .../filter/JavascriptVariantFilterTest.java        |   72 +
 .../filter/PassingVariantFilterTest.java           |    0
 .../variantcontext/filter/SnpFilterTest.java       |    0
 .../writer/TabixOnTheFlyIndexCreationTest.java     |   66 +
 .../variantcontext/writer/VCFWriterUnitTest.java   |  280 ++
 .../VariantContextWriterBuilderUnitTest.java       |  399 ++
 .../writer/VariantContextWritersUnitTest.java      |    0
 .../htsjdk/variant/vcf/AbstractVCFCodecTest.java   |   53 +
 .../htsjdk/variant/vcf/IndexFactoryUnitTest.java   |    0
 .../variant/vcf/VCFCompoundHeaderLineUnitTest.java |    0
 .../java/htsjdk/variant/vcf/VCFEncoderTest.java    |    0
 .../vcf/VCFHeaderLineTranslatorUnitTest.java       |   62 +
 .../htsjdk/variant/vcf/VCFHeaderLineUnitTest.java  |    0
 .../java/htsjdk/variant/vcf/VCFHeaderUnitTest.java |  561 +++
 .../vcf/VCFStandardHeaderLinesUnitTest.java        |    0
 .../BAMCigarOverflowTest/CigarOverflowTest.bam     |  Bin
 .../samtools/BAMFileIndexTest/index_test.bam       |  Bin
 .../samtools/BAMFileIndexTest/index_test.bam.bai   |  Bin
 .../BAMFileIndexTest/symlink_with_index.bam        |    0
 .../BAMFileIndexTest/symlink_with_index.bam.bai    |    0
 .../symlink_without_linked_index.bam               |    0
 .../SAMIntegerTagTest/variousAttributes.bam        |  Bin
 .../SAMIntegerTagTest/variousAttributes.sam        |    0
 .../SamFileHeaderMergerTest/Chromosome1to10.bam    |  Bin
 .../SamFileHeaderMergerTest/Chromosome5to9.bam     |  Bin
 .../case1/chr11sub_file1.sam                       |    0
 .../case1/chr11sub_file2.sam                       |    0
 .../case1/expected_output.sam                      |    0
 .../case2/chr11sub_file1.sam                       |    0
 .../case2/chr11sub_file2.sam                       |    0
 .../case2/chr11sub_file3.sam                       |    0
 .../case2/chr11sub_file4.sam                       |    0
 .../case2/expected_output.sam                      |    0
 .../SamFileHeaderMergerTest/unsorted_input/1.sam   |    0
 .../SamFileHeaderMergerTest/unsorted_input/2.sam   |    0
 .../samtools/ValidateSamFileTest/bad_index.bai     |  Bin
 .../samtools/ValidateSamFileTest/bad_index.bam     |  Bin
 .../samtools/ValidateSamFileTest/buggyHeader.sam   |    0
 .../samtools/ValidateSamFileTest/duplicate_rg.sam  |    0
 .../ValidateSamFileTest/duplicated_reads.sam       |    0
 .../duplicated_reads_out_of_order.sam              |    0
 .../samtools/ValidateSamFileTest/hanging_tab.sam   |    0
 .../inappropriate_mate_cigar_string.sam            |    0
 .../invalid_coord_sort_order.sam                   |    0
 .../invalid_mate_cigar_string.sam                  |    0
 .../ValidateSamFileTest/invalid_platform_unit.sam  |    0
 .../invalid_queryname_sort_order.sam               |    0
 .../ValidateSamFileTest/mate_cigar_and_no_oqs.sam  |    0
 .../ValidateSamFileTest/mate_cigar_and_oqs.sam     |    0
 .../ValidateSamFileTest/missing_fields.sam         |    0
 .../ValidateSamFileTest/missing_platform_unit.sam  |    0
 .../test_samfile_version_1pt5.bam                  |  Bin
 .../samtools/ValidateSamFileTest/truncated.bam     |  Bin
 .../ValidateSamFileTest/truncated_quals.sam        |    0
 .../samtools/ValidateSamFileTest/truncated_tag.sam |    0
 .../htsjdk/samtools/ValidateSamFileTest/valid.sam  |    0
 .../ValidateSamFileTest/zero_length_read.sam       |    0
 .../htsjdk/samtools/block_compressed.sam.gz        |  Bin
 .../test/resources}/htsjdk/samtools/compressed.bam |  Bin
 .../resources}/htsjdk/samtools/compressed.sam.gz   |  Bin
 .../htsjdk/samtools/coordinate_sorted.sam          |    0
 .../cram/CRAMException/testContigNotInRef.cram     |  Bin
 .../cram/CRAMException/testContigNotInRef.cram.bai |  Bin
 .../cram/CRAMException/testContigNotInRef.dict     |    0
 .../cram/CRAMException/testContigNotInRef.fa       |    0
 .../cram/CRAMException/testContigNotInRef.fa.fai   |    0
 .../cram/CRAMException/testContigNotInRef.fasta    |    0
 ...78.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram |  Bin 0 -> 47259 bytes
 ....21.1-100.100-SeqsPerSlice.0-unMapped.cram.crai |  Bin 0 -> 81 bytes
 ...78.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram |  Bin 0 -> 47796 bytes
 ....21.1-100.100-SeqsPerSlice.1-unMapped.cram.crai |  Bin 0 -> 94 bytes
 ....20.21.1-100.100-SeqsPerSlice.500-unMapped.cram |  Bin 0 -> 77664 bytes
 ...1.1-100.100-SeqsPerSlice.500-unMapped.cram.crai |  Bin 0 -> 124 bytes
 .../htsjdk/samtools/cram/auxf#values.2.1.cram      |  Bin
 .../htsjdk/samtools/cram/auxf#values.3.0.cram      |  Bin
 .../htsjdk/samtools/cram/auxf#values.sam           |    0
 .../test/resources}/htsjdk/samtools/cram/auxf.fa   |    0
 .../resources}/htsjdk/samtools/cram/auxf.fa.fai    |    0
 .../resources}/htsjdk/samtools/cram/auxf.fasta     |    0
 .../htsjdk/samtools/cram/c1#bounds.2.1.cram        |  Bin
 .../htsjdk/samtools/cram/c1#bounds.3.0.cram        |  Bin
 .../resources}/htsjdk/samtools/cram/c1#bounds.sam  |    0
 .../htsjdk/samtools/cram/c1#clip.2.1.cram          |  Bin
 .../htsjdk/samtools/cram/c1#clip.3.0.cram          |  Bin
 .../resources}/htsjdk/samtools/cram/c1#clip.sam    |    0
 .../htsjdk/samtools/cram/c1#noseq.2.1.cram         |  Bin
 .../htsjdk/samtools/cram/c1#noseq.3.0.cram         |  Bin
 .../resources}/htsjdk/samtools/cram/c1#noseq.sam   |    0
 .../htsjdk/samtools/cram/c1#pad1.2.1.cram          |  Bin
 .../htsjdk/samtools/cram/c1#pad1.3.0.cram          |  Bin
 .../resources}/htsjdk/samtools/cram/c1#pad1.sam    |    0
 .../htsjdk/samtools/cram/c1#pad2.2.1.cram          |  Bin
 .../htsjdk/samtools/cram/c1#pad2.3.0.cram          |  Bin
 .../resources}/htsjdk/samtools/cram/c1#pad2.sam    |    0
 .../htsjdk/samtools/cram/c1#pad3.2.1.cram          |  Bin
 .../htsjdk/samtools/cram/c1#pad3.3.0.cram          |  Bin
 .../resources}/htsjdk/samtools/cram/c1#pad3.sam    |    0
 .../htsjdk/samtools/cram/c1#unknown.2.1.cram       |  Bin
 .../htsjdk/samtools/cram/c1#unknown.3.0.cram       |  Bin
 .../resources}/htsjdk/samtools/cram/c1#unknown.sam |    0
 .../test/resources}/htsjdk/samtools/cram/c1.fa     |    0
 .../test/resources}/htsjdk/samtools/cram/c1.fa.fai |    0
 .../htsjdk/samtools/cram/c2#pad.2.1.cram           |  Bin
 .../htsjdk/samtools/cram/c2#pad.3.0.cram           |  Bin
 .../resources}/htsjdk/samtools/cram/ce#1.2.1.cram  |  Bin
 .../resources}/htsjdk/samtools/cram/ce#1.3.0.cram  |  Bin
 .../test/resources}/htsjdk/samtools/cram/ce#1.sam  |    0
 .../resources}/htsjdk/samtools/cram/ce#2.2.1.cram  |  Bin
 .../resources}/htsjdk/samtools/cram/ce#2.3.0.cram  |  Bin
 .../test/resources}/htsjdk/samtools/cram/ce#2.sam  |    0
 .../resources}/htsjdk/samtools/cram/ce#5.2.1.cram  |  Bin
 .../resources}/htsjdk/samtools/cram/ce#5.3.0.cram  |  Bin
 .../test/resources}/htsjdk/samtools/cram/ce#5.sam  |    0
 .../resources}/htsjdk/samtools/cram/ce#5b.2.1.cram |  Bin
 .../resources}/htsjdk/samtools/cram/ce#5b.3.0.cram |  Bin
 .../test/resources}/htsjdk/samtools/cram/ce#5b.sam |    0
 .../htsjdk/samtools/cram/ce#large_seq.2.1.cram     |  Bin
 .../htsjdk/samtools/cram/ce#large_seq.3.0.cram     |  Bin
 .../htsjdk/samtools/cram/ce#large_seq.sam          |    0
 .../htsjdk/samtools/cram/ce#supp.2.1.cram          |  Bin
 .../htsjdk/samtools/cram/ce#supp.3.0.cram          |  Bin
 .../resources}/htsjdk/samtools/cram/ce#supp.sam    |    0
 .../htsjdk/samtools/cram/ce#tag_depadded.2.1.cram  |  Bin
 .../htsjdk/samtools/cram/ce#tag_depadded.3.0.cram  |  Bin
 .../htsjdk/samtools/cram/ce#tag_depadded.sam       |    0
 .../htsjdk/samtools/cram/ce#tag_padded.2.1.cram    |  Bin
 .../htsjdk/samtools/cram/ce#tag_padded.3.0.cram    |  Bin
 .../htsjdk/samtools/cram/ce#tag_padded.sam         |    0
 .../htsjdk/samtools/cram/ce#unmap.2.1.cram         |  Bin
 .../htsjdk/samtools/cram/ce#unmap.3.0.cram         |  Bin
 .../resources}/htsjdk/samtools/cram/ce#unmap.sam   |    0
 .../htsjdk/samtools/cram/ce#unmap1.2.1.cram        |  Bin
 .../htsjdk/samtools/cram/ce#unmap1.3.0.cram        |  Bin
 .../resources}/htsjdk/samtools/cram/ce#unmap1.sam  |    0
 .../htsjdk/samtools/cram/ce#unmap2.2.1.cram        |  Bin
 .../htsjdk/samtools/cram/ce#unmap2.3.0.cram        |  Bin
 .../resources}/htsjdk/samtools/cram/ce#unmap2.sam  |    0
 .../test/resources}/htsjdk/samtools/cram/ce.fa     |    0
 .../test/resources}/htsjdk/samtools/cram/ce.fa.fai |    0
 .../htsjdk/samtools/cram/cramQueryTest.cram        |  Bin 0 -> 2863 bytes
 .../htsjdk/samtools/cram/cramQueryTest.cram.bai    |  Bin 0 -> 336 bytes
 .../htsjdk/samtools/cram/cramQueryTestEmpty.cram   |  Bin 0 -> 2183 bytes
 .../samtools/cram/cramQueryTestEmpty.cram.bai      |  Bin 0 -> 48 bytes
 .../htsjdk/samtools/cram/cramQueryWithBAI.cram     |  Bin 0 -> 8947 bytes
 .../htsjdk/samtools/cram/cramQueryWithBAI.cram.bai |  Bin 0 -> 152 bytes
 .../htsjdk/samtools/cram/cramQueryWithCRAI.cram    |  Bin 0 -> 4131 bytes
 .../samtools/cram/cramQueryWithCRAI.cram.crai      |  Bin 0 -> 59 bytes
 .../resources}/htsjdk/samtools/cram/fieldarith.sam |    0
 .../samtools/cram/human_g1k_v37.20.21.1-100.dict   |    3 +
 .../samtools/cram/human_g1k_v37.20.21.1-100.fasta  |   11 +
 .../cram/human_g1k_v37.20.21.1-100.fasta.fai       |    2 +
 .../cram/human_g1k_v37.20.21.10M-10M200k.dict      |    3 +
 .../cram/human_g1k_v37.20.21.10M-10M200k.fasta     | 5002 ++++++++++++++++++++
 .../cram/human_g1k_v37.20.21.10M-10M200k.fasta.fai |    2 +
 .../htsjdk/samtools/cram/io/bzip2-test.bz2         |  Bin 0 -> 56 bytes
 .../test/resources}/htsjdk/samtools/cram/test.cram |  Bin
 .../resources}/htsjdk/samtools/cram/test.cram.bai  |  Bin
 .../test/resources}/htsjdk/samtools/cram/test.dict |    0
 .../test/resources}/htsjdk/samtools/cram/test.fa   |    0
 .../resources}/htsjdk/samtools/cram/test.fa.fai    |    0
 .../resources}/htsjdk/samtools/cram/test.fasta     |    0
 .../resources}/htsjdk/samtools/cram/test2.cram     |  Bin
 .../resources}/htsjdk/samtools/cram/test2.dict     |    0
 .../test/resources}/htsjdk/samtools/cram/test2.fa  |    0
 .../resources}/htsjdk/samtools/cram/test2.fa.fai   |    0
 .../resources}/htsjdk/samtools/cram/test2.fasta    |    0
 .../htsjdk/samtools/cram/xx#blank.2.1.cram         |  Bin
 .../htsjdk/samtools/cram/xx#blank.3.0.cram         |  Bin
 .../resources}/htsjdk/samtools/cram/xx#blank.sam   |    0
 .../htsjdk/samtools/cram/xx#large_aux.2.1.cram     |  Bin
 .../htsjdk/samtools/cram/xx#large_aux.3.0.cram     |  Bin
 .../htsjdk/samtools/cram/xx#large_aux.sam          |    0
 .../htsjdk/samtools/cram/xx#large_aux2.2.1.cram    |  Bin
 .../htsjdk/samtools/cram/xx#large_aux2.3.0.cram    |  Bin
 .../htsjdk/samtools/cram/xx#large_aux2.sam         |    0
 .../htsjdk/samtools/cram/xx#minimal.2.1.cram       |  Bin
 .../htsjdk/samtools/cram/xx#minimal.3.0.cram       |  Bin
 .../resources}/htsjdk/samtools/cram/xx#minimal.sam |    0
 .../htsjdk/samtools/cram/xx#pair.2.1.cram          |  Bin
 .../htsjdk/samtools/cram/xx#pair.3.0.cram          |  Bin
 .../resources}/htsjdk/samtools/cram/xx#pair.sam    |    0
 .../resources}/htsjdk/samtools/cram/xx#rg.2.1.cram |  Bin
 .../resources}/htsjdk/samtools/cram/xx#rg.3.0.cram |  Bin
 .../test/resources}/htsjdk/samtools/cram/xx#rg.sam |    0
 .../htsjdk/samtools/cram/xx#tlen.2.1.cram          |  Bin
 .../htsjdk/samtools/cram/xx#tlen.3.0.cram          |  Bin
 .../htsjdk/samtools/cram/xx#tlen2.2.1.cram         |  Bin
 .../htsjdk/samtools/cram/xx#tlen2.3.0.cram         |  Bin
 .../htsjdk/samtools/cram/xx#triplet.2.1.cram       |  Bin
 .../htsjdk/samtools/cram/xx#triplet.3.0.cram       |  Bin
 .../resources}/htsjdk/samtools/cram/xx#triplet.sam |    0
 .../htsjdk/samtools/cram/xx#unsorted.2.1.cram      |  Bin
 .../htsjdk/samtools/cram/xx#unsorted.3.0.cram      |  Bin
 .../htsjdk/samtools/cram/xx#unsorted.sam           |    0
 .../test/resources}/htsjdk/samtools/cram/xx.fa     |    0
 .../test/resources}/htsjdk/samtools/cram/xx.fa.fai |    0
 .../htsjdk/samtools/cram_query_sorted.cram         |  Bin
 .../htsjdk/samtools/cram_query_sorted.fasta        |    0
 .../htsjdk/samtools/cram_query_sorted.fasta.fai    |    0
 .../resources}/htsjdk/samtools/cram_tlen.fasta     |    0
 .../resources}/htsjdk/samtools/cram_tlen.fasta.fai |    0
 .../htsjdk/samtools/cram_tlen_reads.sorted.sam     |    0
 .../htsjdk/samtools/cram_with_bai_index.cram       |  Bin
 .../htsjdk/samtools/cram_with_bai_index.cram.bai   |  Bin
 .../htsjdk/samtools/cram_with_crai_index.cram      |  Bin
 .../htsjdk/samtools/cram_with_crai_index.cram.crai |  Bin
 .../test/resources}/htsjdk/samtools/empty.bai      |  Bin
 .../test/resources}/htsjdk/samtools/empty.bam      |  Bin
 .../htsjdk/samtools/empty_no_empty_gzip_block.bai  |  Bin
 .../htsjdk/samtools/empty_no_empty_gzip_block.bam  |  Bin
 src/test/resources/htsjdk/samtools/hg19mini.dict   |    5 +
 .../test/resources}/htsjdk/samtools/hg19mini.fasta |    0
 .../resources}/htsjdk/samtools/hg19mini.fasta.fai  |    0
 .../intervallist/IntervalListFromVCFTest.vcf       |    0
 .../IntervalListFromVCFTestComp.interval_list      |    0
 ...ntervalListFromVCFTestCompInverse.interval_list |    0
 ...lListFromVCFTestCompInverseManual.interval_list |    0
 .../intervallist/IntervalListFromVCFTestManual.vcf |    0
 ...IntervalListFromVCFTestManualComp.interval_list |    0
 .../IntervalListchr123_empty.interval_list         |    0
 .../test/resources}/htsjdk/samtools/inttest.bam    |  Bin
 .../test/resources}/htsjdk/samtools/inttest.sam    |    0
 .../resources}/htsjdk/samtools/io/5newline5.txt    |    0
 .../test/resources}/htsjdk/samtools/io/empty.txt   |    0
 .../resources}/htsjdk/samtools/io/slurptest.txt    |    0
 .../test/resources}/htsjdk/samtools/issue76.bam    |  Bin
 .../resources}/htsjdk/samtools/issue76.bam.bai     |  Bin
 .../htsjdk/samtools/liftover/hg18ToHg19.over.chain |    0
 .../htsjdk/samtools/metrics/metricsOne.metrics     |    0
 .../htsjdk/samtools/metrics/metricsOneCopy.metrics |    0
 .../metrics/metricsOneModifiedHistogram.metrics    |    0
 .../metrics/metricsOneModifiedMetrics.metrics      |    0
 .../htsjdk/samtools/queryname_sorted.sam           |    0
 .../htsjdk/samtools/readWithBadRname.sam           |    0
 .../reference/Homo_sapiens_assembly18.fasta.fai    |    0
 .../reference/Homo_sapiens_assembly18.trimmed.dict |    0
 .../Homo_sapiens_assembly18.trimmed.fasta          |    0
 .../Homo_sapiens_assembly18.trimmed.fasta.fai      |    0
 .../Homo_sapiens_assembly18.trimmed.nodict.fasta   |    0
 ...omo_sapiens_assembly18.trimmed.nodict.fasta.fai |    0
 .../Homo_sapiens_assembly18.trimmed.noindex.fasta  |    0
 .../reference_with_trailing_whitespace.dict        |    0
 .../reference_with_trailing_whitespace.fasta       |    0
 .../htsjdk/samtools/reference/testing.fai          |    0
 .../test/resources}/htsjdk/samtools/roundtrip.sam  |    0
 .../test/resources}/htsjdk/samtools/samFilter01.js |    0
 .../test/resources}/htsjdk/samtools/samFilter02.js |    0
 .../samtools/seekablestream/megabyteZeros.dat      |  Bin
 .../htsjdk/samtools/seekablestream/seekTest.txt    |    0
 .../htsjdk/samtools/sequenceWithSpace.bam          |  Bin
 .../htsjdk/samtools/sequenceWithSpace.sam          |    0
 .../htsjdk/samtools/serialization_test.bam         |  Bin
 .../htsjdk/samtools/serialization_test.bam.bai     |  Bin
 .../htsjdk/samtools/serialization_test.sam         |    0
 .../htsjdk/samtools/sra/test_archive.sra           |  Bin
 .../resources}/htsjdk/samtools/uncompressed.sam    |    0
 .../test/resources}/htsjdk/samtools/unsorted.sam   |    0
 .../5k-30BB2AAXX.3.aligned.sam.fastq               |    0
 .../illumina-as-standard.bam                       |  Bin
 .../QualityEncodingDetectorTest/s_1_sequence.txt   |    0
 .../solexa-as-standard.bam                         |  Bin
 .../solexa_full_range_as_solexa.fastq              |    0
 .../util/QualityEncodingDetectorTest/unmapped.sam  |    0
 .../htsjdk/samtools/util/defective_bgzf.bam        |  Bin
 .../htsjdk/samtools/util/no_bgzf_terminator.bam    |  Bin
 .../htsjdk/samtools/util/whitespace_text_file.txt  |    0
 .../test/resources}/htsjdk/tribble/Tb.vcf.idx      |  Bin
 .../resources}/htsjdk/tribble/basicDbSNP.dbsnp     |    0
 ....deletions.10kbp.het.gq99.hand_curated.hg19.bed |    0
 ...ions.10kbp.het.gq99.hand_curated.hg19_fixed.bed |    0
 .../htsjdk/tribble/bed/Unigene.sample.bed          |    0
 .../htsjdk/tribble/bed/disconcontigs.bed           |    0
 .../resources}/htsjdk/tribble/bed/unsorted.bed     |    0
 .../resources}/htsjdk/tribble/complexExample.vcf   |    0
 .../test/resources}/htsjdk/tribble/fakeBed.bed     |    0
 .../test/resources}/htsjdk/tribble/fakeVCF.vcf.gz  |  Bin
 .../resources}/htsjdk/tribble/fakeVCF.vcf.gz.tbi   |  Bin
 .../resources}/htsjdk/tribble/gwas/smallp.gwas     |    0
 .../htsjdk/tribble/index/chrY_Y4_small.bed         |    0
 .../test/resources}/htsjdk/tribble/large.txt       |    0
 .../test/resources}/htsjdk/tribble/sites.bed       |    0
 .../test/resources}/htsjdk/tribble/soapsnp.txt     |    0
 .../resources}/htsjdk/tribble/split/test.bin.part1 |  Bin
 .../resources}/htsjdk/tribble/split/test.bin.part2 |  Bin
 .../resources}/htsjdk/tribble/split/test.bin.part3 |  Bin
 .../resources}/htsjdk/tribble/split/test.bin.split |    0
 .../tabix/YRI.trio.2010_07.indel.sites.vcf.gz      |  Bin
 .../tabix/YRI.trio.2010_07.indel.sites.vcf.gz.tbi  |  Bin
 .../htsjdk/tribble/tabix/bigger.vcf.gz.tbi         |  Bin
 .../htsjdk/tribble/tabix/testTabixIndex.vcf        |    0
 .../htsjdk/tribble/tabix/testTabixIndex.vcf.gz     |  Bin
 .../resources}/htsjdk/tribble/tabix/trioDup.vcf    |    0
 .../resources}/htsjdk/tribble/tabix/trioDup.vcf.gz |  Bin
 .../htsjdk/tribble/tabix/trioDup.vcf.gz.tbi        |  Bin
 .../test/resources}/htsjdk/tribble/test.bed        |    0
 .../test/resources}/htsjdk/tribble/test.gff        |    0
 .../resources}/htsjdk/tribble/test.tabix.bed.gz    |  Bin
 .../htsjdk/tribble/test.tabix.bed.gz.tbi           |  Bin
 .../test/resources}/htsjdk/tribble/test.vcf        |    0
 .../test/resources}/htsjdk/tribble/test.vcf.gz     |  Bin
 .../resources}/htsjdk/tribble/testGeliText.txt     |    0
 .../htsjdk/tribble/testIntervalList.list           |    0
 .../test/resources}/htsjdk/tribble/tmp/.gitignore  |    0
 .../test/resources}/htsjdk/tribble/trio.vcf        |    0
 .../test/resources}/htsjdk/tribble/trio.vcf.idx    |  Bin
 .../test/resources}/htsjdk/tribble/trioDup.vcf     |    0
 .../test/resources}/htsjdk/tribble/vcfexample.vcf  |    0
 .../resources}/htsjdk/tribble/vcfexample.vcf.gz    |  Bin
 .../htsjdk/tribble/vcfexample.vcf.gz.tbi           |  Bin
 .../test/resources}/htsjdk/variant/HiSeq.10000.vcf |    0
 ...baseline.20111114.both.exome.genotypes.1000.vcf |    0
 .../resources/htsjdk/variant/VCF4HeaderTest.vcf    |   42 +
 .../resources}/htsjdk/variant/VQSR.mixedTest.recal |    0
 .../test/resources}/htsjdk/variant/breakpoint.vcf  |    0
 .../htsjdk/variant/dbsnp_135.b37.1000.vcf          |    0
 .../htsjdk/variant/diagnosis_targets_testfile.vcf  |    0
 .../test/resources}/htsjdk/variant/ex2.bgzf.bcf    |  Bin
 .../resources}/htsjdk/variant/ex2.bgzf.bcf.csi     |  Bin
 .../resources}/htsjdk/variant/ex2.uncompressed.bcf |  Bin
 .../test/resources}/htsjdk/variant/ex2.vcf         |    0
 .../htsjdk/variant/serialization_test.bcf          |  Bin
 .../htsjdk/variant/serialization_test.bcf.idx      |  Bin
 .../Homo_sapiens_assembly18.trimmed.dict           |    0
 .../Homo_sapiens_assembly18.trimmed.fasta          |    0
 .../Homo_sapiens_assembly18.trimmed.fasta.fai      |    0
 .../ScreenSamReads.100.input.sam                   |    0
 .../empty.interval_list                            |    0
 .../SamSequenceDictionaryExtractor/test1.dict      |    0
 .../utils/SamSequenceDictionaryExtractor/test1.vcf |    0
 .../SamSequenceDictionaryExtractor/test1.vcf.idx   |  Bin
 .../test1_comp.interval_list                       |    0
 .../test2_comp.interval_list                       |    0
 .../test3_comp.interval_list                       |    0
 .../test4_comp.interval_list                       |    0
 .../resources}/htsjdk/variant/variantFilter01.js   |    0
 .../resources}/htsjdk/variant/variantFilter02.js   |    0
 .../Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C59.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C60.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C61.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C62.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C63.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C64.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C65.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C66.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/s_1_1101.filter     |  Bin
 .../125T125T/Data/Intensities/L001/s_1_1101.clocs  |  Bin
 .../Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl  |  Bin
 .../Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl   |  Bin
 .../Intensities/BaseCalls/L001/s_1_1101.filter     |  Bin
 .../sf/picard/illumina/25T8B25T/sams/ACAGGTAT.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/ACAGTTGA.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/ACTAAGAC.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/AGGTCGCA.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/ATTATCAA.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/CAATAGTC.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/CAGCGGTA.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/CGCTATGT.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/CTGTAATC.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/GACCAGGA.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/GCCGTCGA.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/GCCTAGCC.sam  |    0
 .../net/sf/picard/illumina/25T8B25T/sams/N.sam     |    0
 .../sf/picard/illumina/25T8B25T/sams/TGCAAGTA.sam  |    0
 .../sf/picard/illumina/25T8B25T/sams/TGTAATCA.sam  |    0
 src/{tests => test}/resources/testng.xml           |    0
 .../test/resources}/tribble/tmp/.gitignore         |    0
 .../java/htsjdk/samtools/BAMCigarOverflowTest.java |   29 -
 .../java/htsjdk/samtools/BAMFileIndexTest.java     |  527 ---
 .../java/htsjdk/samtools/BAMIndexWriterTest.java   |  227 -
 .../java/htsjdk/samtools/BAMIteratorTest.java      |   71 -
 .../java/htsjdk/samtools/BAMRemoteFileTest.java    |  326 --
 .../java/htsjdk/samtools/CRAMComplianceTest.java   |  150 -
 .../samtools/CRAMContainerStreamWriterTest.java    |  184 -
 .../java/htsjdk/samtools/CRAMEdgeCasesTest.java    |  158 -
 .../java/htsjdk/samtools/CRAMFileBAIIndexTest.java |  294 --
 .../java/htsjdk/samtools/CRAMFileReaderTest.java   |  163 -
 .../java/htsjdk/samtools/CRAMFileWriterTest.java   |  282 --
 .../samtools/CRAMFileWriterWithIndexTest.java      |  203 -
 .../htsjdk/samtools/DownsamplingIteratorTests.java |   82 -
 .../java/htsjdk/samtools/SAMFileReaderTest.java    |  170 -
 .../htsjdk/samtools/SAMFileWriterFactoryTest.java  |  317 --
 .../java/htsjdk/samtools/SAMIntegerTagTest.java    |  329 --
 .../java/htsjdk/samtools/SAMRecordUnitTest.java    |  963 ----
 .../java/htsjdk/samtools/SAMTextWriterTest.java    |  107 -
 .../htsjdk/samtools/SamFileHeaderMergerTest.java   |  265 --
 src/tests/java/htsjdk/samtools/SamFilesTest.java   |   81 -
 src/tests/java/htsjdk/samtools/SamIndexesTest.java |  192 -
 .../java/htsjdk/samtools/SamReaderFactoryTest.java |  293 --
 .../java/htsjdk/samtools/SamReaderSortTest.java    |  102 -
 src/tests/java/htsjdk/samtools/SamSpecIntTest.java |   97 -
 src/tests/java/htsjdk/samtools/SamStreamsTest.java |   60 -
 .../SequenceNameTruncationAndValidationTest.java   |   98 -
 .../java/htsjdk/samtools/ValidateSamFileTest.java  |  471 --
 .../java/htsjdk/samtools/cram/CRAIIndexTest.java   |  213 -
 .../htsjdk/samtools/fastq/FastqWriterTest.java     |   73 -
 .../filter/JavascriptSamRecordFilterTest.java      |   74 -
 .../htsjdk/samtools/liftover/LiftOverTest.java     |  458 --
 .../htsjdk/samtools/metrics/MetricsFileTest.java   |  213 -
 .../samtools/reference/FastaSequenceFileTest.java  |   85 -
 .../samtools/reference/FastaSequenceIndexTest.java |  256 -
 .../reference/IndexedFastaSequenceFileTest.java    |  308 --
 .../ReferenceSequenceFileFactoryTests.java         |   39 -
 .../reference/ReferenceSequenceFileWalkerTest.java |   73 -
 .../seekablestream/SeekableBufferedStreamTest.java |  162 -
 .../seekablestream/SeekableFileStreamTest.java     |   52 -
 .../seekablestream/SeekableStreamFactoryTest.java  |   15 -
 .../java/htsjdk/samtools/sra/SRAAccessionTest.java |   29 -
 .../java/htsjdk/samtools/sra/SRAReferenceTest.java |   23 -
 src/tests/java/htsjdk/samtools/sra/SRATest.java    |  431 --
 .../util/BlockCompressedOutputStreamTest.java      |  112 -
 .../util/BlockCompressedTerminatorTest.java        |   59 -
 .../java/htsjdk/samtools/util/HistogramTest.java   |   97 -
 .../htsjdk/samtools/util/IntelDeflaterTest.java    |  130 -
 .../htsjdk/samtools/util/IntervalListTest.java     |  520 --
 .../java/htsjdk/samtools/util/IoUtilTest.java      |  190 -
 .../samtools/util/QualityEncodingDetectorTest.java |  109 -
 .../htsjdk/samtools/util/SamLocusIteratorTest.java |  231 -
 src/tests/java/htsjdk/tribble/TestUtils.java       |   28 -
 .../htsjdk/tribble/index/IndexFactoryTest.java     |  115 -
 .../htsjdk/tribble/index/tabix/TabixIndexTest.java |  143 -
 .../readers/AsynchronousLineReaderTest.java        |   34 -
 .../htsjdk/tribble/readers/LineReaderUtilTest.java |   27 -
 .../htsjdk/variant/PrintVariantsExampleTest.java   |   65 -
 src/tests/java/htsjdk/variant/VariantBaseTest.java |  278 --
 .../utils/SAMSequenceDictionaryExtractorTest.java  |   65 -
 .../GenotypeLikelihoodsUnitTest.java               |  336 --
 .../variantcontext/VariantContextTestProvider.java | 1017 ----
 .../variantcontext/VariantContextUnitTest.java     | 1450 ------
 .../FilteringVariantContextIteratorTest.java       |   88 -
 .../filter/JavascriptVariantFilterTest.java        |   72 -
 .../writer/TabixOnTheFlyIndexCreationTest.java     |   66 -
 .../variantcontext/writer/VCFWriterUnitTest.java   |  280 --
 .../VariantContextWriterBuilderUnitTest.java       |  377 --
 .../htsjdk/variant/vcf/AbstractVCFCodecTest.java   |   53 -
 .../java/htsjdk/variant/vcf/VCFHeaderUnitTest.java |  491 --
 1387 files changed, 39826 insertions(+), 32670 deletions(-)

diff --git a/.classpath b/.classpath
deleted file mode 100644
index 35d723d..0000000
--- a/.classpath
+++ /dev/null
@@ -1,10 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" path="src/java"/>
-	<classpathentry kind="src" path="src/tests/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
-	<classpathentry kind="lib" path="lib/testng/testng-5.5-jdk15.jar"/>
-	<classpathentry kind="lib" path="lib/commons-jexl-2.1.1.jar"/>
-	<classpathentry kind="lib" path="lib/snappy-java-1.0.3-rc3.jar"/>
-	<classpathentry kind="output" path="bin"/>
-</classpath>
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 2475bc9..300af57 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -9,3 +9,5 @@ Explain the **motivation** for making this change. What existing problem does th
 - [ ] New tests covering changes and new functionality
 - [ ] All tests passing
 - [ ] Extended the README / documentation, if necessary
+- [ ] Is not backward compatible (breaks binary or source compatibility)
+
diff --git a/.gitignore b/.gitignore
index c1f7e90..a20c2c9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,16 +1,19 @@
 htsjdk.iws
 .command_tmp
-classes
-testclasses
-javadoc
-dist
-contracts
 atlassian-ide-plugin.xml
-intellij.testclasses
-intellij.classes
 /htsjdk.version.properties
-/bin
 /test-output
-target
-.idea/libraries
-.idea/workspace.xml
\ No newline at end of file
+
+#intellij
+.idea/
+src/htsjdk.iml
+*.iml
+*.ipr
+*.iws
+
+
+
+
+#gradle stuff
+.gradle/
+build/
diff --git a/.idea/.name b/.idea/.name
deleted file mode 100644
index 9ca710b..0000000
--- a/.idea/.name
+++ /dev/null
@@ -1 +0,0 @@
-htsjdk
\ No newline at end of file
diff --git a/.idea/compiler.xml b/.idea/compiler.xml
deleted file mode 100644
index a852314..0000000
--- a/.idea/compiler.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="CompilerConfiguration">
-    <option name="DEFAULT_COMPILER" value="Javac" />
-    <resourceExtensions />
-    <wildcardResourcePatterns>
-      <entry name="!?*.java" />
-      <entry name="!?*.form" />
-      <entry name="!?*.class" />
-      <entry name="!?*.groovy" />
-      <entry name="!?*.scala" />
-      <entry name="!?*.flex" />
-      <entry name="!?*.kt" />
-      <entry name="!?*.clj" />
-      <entry name="!?*.aj" />
-    </wildcardResourcePatterns>
-    <annotationProcessing>
-      <profile default="true" name="Default" enabled="false">
-        <processorPath useClasspath="true" />
-      </profile>
-    </annotationProcessing>
-  </component>
-</project>
\ No newline at end of file
diff --git a/.idea/copyright/profiles_settings.xml b/.idea/copyright/profiles_settings.xml
deleted file mode 100644
index e7bedf3..0000000
--- a/.idea/copyright/profiles_settings.xml
+++ /dev/null
@@ -1,3 +0,0 @@
-<component name="CopyrightManager">
-  <settings default="" />
-</component>
\ No newline at end of file
diff --git a/.idea/encodings.xml b/.idea/encodings.xml
deleted file mode 100644
index d821048..0000000
--- a/.idea/encodings.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
-</project>
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
deleted file mode 100644
index 258602b..0000000
--- a/.idea/modules.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="ProjectModuleManager">
-    <modules>
-      <module fileurl="file://$PROJECT_DIR$/.idea/modules/htsjdk.iml" filepath="$PROJECT_DIR$/.idea/modules/htsjdk.iml" />
-      <module fileurl="file://$PROJECT_DIR$/.idea/modules/htsjdk-build.iml" filepath="$PROJECT_DIR$/.idea/modules/htsjdk-build.iml" />
-    </modules>
-  </component>
-</project>
\ No newline at end of file
diff --git a/.idea/modules/htsjdk-build.iml b/.idea/modules/htsjdk-build.iml
deleted file mode 100644
index 5ebb09a..0000000
--- a/.idea/modules/htsjdk-build.iml
+++ /dev/null
@@ -1,81 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module external.linked.project.id="htsjdk-build" external.linked.project.path="$MODULE_DIR$/../../project" external.root.project.path="$MODULE_DIR$/../.." external.system.id="SBT" sbt.imports="sbt._, Keys._, _root_.de.johoop.testngplugin.TestNGPlugin._, _root_.sbtassembly.Plugin._, _root_.com.typesafe.sbt.SbtPgp.autoImport._, _root_.sbt.plugins.IvyPlugin, _root_.sbt.plugins.JvmPlugin, _root_.sbt.plugins.CorePlugin, _root_.sbt.plugins.JUnitXmlReportPlugin, _root_.com.typesafe.sbt.SbtPgp" [...]
-  <component name="NewModuleRootManager" inherit-compiler-output="false">
-    <output url="file://$MODULE_DIR$/../../project/target/idea-classes" />
-    <output-test url="file://$MODULE_DIR$/../../project/target/idea-test-classes" />
-    <exclude-output />
-    <content url="file://$MODULE_DIR$/../../project">
-      <sourceFolder url="file://$MODULE_DIR$/../../project" isTestSource="false" />
-      <excludeFolder url="file://$MODULE_DIR$/../../project/project/target" />
-      <excludeFolder url="file://$MODULE_DIR$/../../project/target" />
-    </content>
-    <orderEntry type="inheritedJdk" />
-    <orderEntry type="sourceFolder" forTests="false" />
-    <orderEntry type="module-library">
-      <library name="SBT: sbt-and-plugins">
-        <CLASSES>
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/compiler-ivy-integration-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/net.databinder/dispatch-futures_2.10/jars/dispatch-futures_2.10-0.8.10.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/lib/scala-reflect.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/relation-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/apply-macro-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/jline-2.11.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/lib/jansi.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/actions-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/tasks-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/commons-codec/commons-codec/jars/commons-codec-1.4.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/testing-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/completion-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/cache-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/test-interface-1.0.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.httpcomponents/httpclient/jars/httpclient-4.1.3.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/classpath-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/compile-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/cross-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/org.scala-lang/scala-actors/jars/scala-actors-2.10.2.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/sbinary_2.10-0.4.2.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/persist-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/compiler-integration-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/xsbti/interface-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/commons-logging/commons-logging/jars/commons-logging-1.1.1.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/org.bouncycastle/bcprov-jdk15on/jars/bcprov-jdk15on-1.51.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/sbt-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/com.jsuereth/pgp-library_2.10/jars/pgp-library_2.10-1.0.0.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/process-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/jsch-0.1.46.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/logic-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/lib/scala-library.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/lib/jline.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/classfile-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/control-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/main-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/scala_2.10/sbt_0.13/com.jsuereth/sbt-pgp/jars/sbt-pgp-1.0.0.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/io-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/incremental-compiler-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/run-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/collections-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/scala-reflect-2.10.4.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/task-system-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/net.databinder/dispatch-http_2.10/jars/dispatch-http_2.10-0.8.10.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/net.databinder/dispatch-core_2.10/jars/dispatch-core_2.10-0.8.10.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/api-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/lib/scala-compiler.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/ivy-2.3.0.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/tracking-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/launcher-interface-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/org.bouncycastle/bcpg-jdk15on/jars/bcpg-jdk15on-1.51.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/ivy-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/org.apache.httpcomponents/httpcore/jars/httpcore-4.1.4.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/test-agent-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/command-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/logging-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/scala_2.10/sbt_0.13/com.eed3si9n/sbt-assembly/jars/sbt-assembly-0.11.2.jar!/" />
-          <root url="jar://$USER_HOME$/.sbt/boot/scala-2.10.4/org.scala-sbt/sbt/0.13.5/main-settings-0.13.5.jar!/" />
-          <root url="jar://$USER_HOME$/.ivy2/cache/scala_2.10/sbt_0.13/de.johoop/sbt-testng-plugin/jars/sbt-testng-plugin-3.0.2.jar!/" />
-        </CLASSES>
-        <JAVADOC />
-        <SOURCES />
-      </library>
-    </orderEntry>
-  </component>
-</module>
\ No newline at end of file
diff --git a/.idea/modules/htsjdk.iml b/.idea/modules/htsjdk.iml
deleted file mode 100644
index 57d35da..0000000
--- a/.idea/modules/htsjdk.iml
+++ /dev/null
@@ -1,44 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module external.linked.project.id="htsjdk" external.linked.project.path="$MODULE_DIR$/../.." external.root.project.path="$MODULE_DIR$/../.." external.system.id="SBT" type="JAVA_MODULE" version="4">
-  <component name="NewModuleRootManager" inherit-compiler-output="false">
-    <output url="file://$MODULE_DIR$/../../target/classes" />
-    <output-test url="file://$MODULE_DIR$/../../target/test-classes" />
-    <exclude-output />
-    <content url="file://$MODULE_DIR$/../..">
-      <sourceFolder url="file://$MODULE_DIR$/../../src/java" isTestSource="false" />
-      <sourceFolder url="file://$MODULE_DIR$/../../src/main/scala" isTestSource="false" />
-      <sourceFolder url="file://$MODULE_DIR$/../../target/src_managed/main" isTestSource="false" />
-      <sourceFolder url="file://$MODULE_DIR$/../../src/test/scala" isTestSource="true" />
-      <sourceFolder url="file://$MODULE_DIR$/../../src/tests" isTestSource="true" />
-      <sourceFolder url="file://$MODULE_DIR$/../../target/src_managed/test" isTestSource="true" />
-      <sourceFolder url="file://$MODULE_DIR$/../../src/main/resources" type="java-resource" />
-      <sourceFolder url="file://$MODULE_DIR$/../../target/resource_managed/main" type="java-resource" />
-      <sourceFolder url="file://$MODULE_DIR$/../../src/test/resources" type="java-test-resource" />
-      <sourceFolder url="file://$MODULE_DIR$/../../target/resource_managed/test" type="java-test-resource" />
-      <excludeFolder url="file://$MODULE_DIR$/../../target" />
-    </content>
-    <orderEntry type="inheritedJdk" />
-    <orderEntry type="sourceFolder" forTests="false" />
-    <orderEntry type="library" name="SBT: com.beust:jcommander:1.27:jar" level="project" />
-    <orderEntry type="library" name="SBT: commons-logging:commons-logging:1.2:jar" level="project" />
-    <orderEntry type="library" scope="TEST" name="SBT: de.johoop:sbt-testng-interface_2.11:3.0.2:jar" level="project" />
-    <orderEntry type="library" name="SBT: org.apache.commons:commons-jexl:2.1.1:jar" level="project" />
-    <orderEntry type="library" name="SBT: org.beanshell:bsh:2.0b4:jar" level="project" />
-    <orderEntry type="library" name="SBT: org.scala-lang:scala-library:2.11.4:jar" level="project" />
-    <orderEntry type="library" name="SBT: org.testng:testng:6.8.8:jar" level="project" />
-    <orderEntry type="library" name="SBT: org.xerial.snappy:snappy-java:1.1.1.3:jar" level="project" />
-    <orderEntry type="library" scope="TEST" name="SBT: org.yaml:snakeyaml:1.12:jar" level="project" />
-    <orderEntry type="module-library">
-      <library name="SBT: unmanaged-jars">
-        <CLASSES>
-          <root url="jar://$MODULE_DIR$/../../lib/commons-logging-1.1.1.jar!/" />
-          <root url="jar://$MODULE_DIR$/../../lib/snappy-java-1.0.3-rc3.jar!/" />
-          <root url="jar://$MODULE_DIR$/../../lib/commons-jexl-2.1.1.jar!/" />
-          <root url="jar://$MODULE_DIR$/../../lib/ngs-java-1.2.2.jar!/" />
-        </CLASSES>
-        <JAVADOC />
-        <SOURCES />
-      </library>
-    </orderEntry>
-  </component>
-</module>
diff --git a/.idea/sbt.xml b/.idea/sbt.xml
deleted file mode 100644
index 1ed7562..0000000
--- a/.idea/sbt.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="ScalaSbtSettings">
-    <option name="linkedExternalProjectsSettings">
-      <SbtProjectSettings>
-        <option name="externalProjectPath" value="$PROJECT_DIR$" />
-        <option name="jdk" value="1.6" />
-        <option name="modules">
-          <set>
-            <option value="$PROJECT_DIR$" />
-            <option value="$PROJECT_DIR$/project" />
-          </set>
-        </option>
-        <option name="sbtVersion" value="0.13.5" />
-      </SbtProjectSettings>
-      <SbtProjectSettings>
-        <option name="externalProjectPath" value="C:\workspace\htsjdk" />
-        <option name="useAutoImport" value="true" />
-      </SbtProjectSettings>
-    </option>
-  </component>
-</project>
\ No newline at end of file
diff --git a/.idea/scala_compiler.xml b/.idea/scala_compiler.xml
deleted file mode 100644
index 90bcca7..0000000
--- a/.idea/scala_compiler.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="ScalaCompilerConfiguration">
-    <profile name="SBT 1" modules="htsjdk" />
-  </component>
-</project>
\ No newline at end of file
diff --git a/.idea/scopes/scope_settings.xml b/.idea/scopes/scope_settings.xml
deleted file mode 100644
index 922003b..0000000
--- a/.idea/scopes/scope_settings.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<component name="DependencyValidationManager">
-  <state>
-    <option name="SKIP_IMPORT_STATEMENTS" value="false" />
-  </state>
-</component>
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
deleted file mode 100644
index 8306744..0000000
--- a/.idea/vcs.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="VcsDirectoryMappings">
-    <mapping directory="" vcs="Git" />
-    <mapping directory="$PROJECT_DIR$" vcs="Git" />
-  </component>
-</project>
\ No newline at end of file
diff --git a/.project b/.project
deleted file mode 100644
index d179654..0000000
--- a/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>htsjdk</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/.travis.yml b/.travis.yml
index 1f5abbe..1e92599 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,19 +1,20 @@
 language: java
+dist: trusty
 sudo: true
+before_cache:
+  - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
+cache:
+  directories:
+    - $HOME/.gradle/caches/
+    - $HOME/.gradle/wrapper/
+    - $HOME/.m2
 jdk:
   - oraclejdk8
-install: ant
-script: ant all test sra-test
+script: ./gradlew jacocoTestReport testSRA;
 after_success:
   - echo "TRAVIS_BRANCH='$TRAVIS_BRANCH'";
     echo "JAVA_HOME='$JAVA_HOME'";
-    if [ "$TRAVIS_BRANCH" == "master" ] && [ "$JAVA_HOME" == "/usr/lib/jvm/java-8-oracle" ]; then
-      sbt \
-        'set buildSnapshot := true' \
-        'set javacOptions in (Compile, doc) ++= Seq("-quiet")' \
-        'set test in publish := {}' \
-        'set resolvers += Resolver.url("bintray-sbt-plugin-releases", url("http://dl.bintray.com/content/sbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)' \
-        'set publishTo := Option("artifactory-snapshots-publish" at "https://artifactory.broadinstitute.org/artifactory/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)' \
-        "set credentials += Credentials(\"Artifactory Realm\", \"artifactory.broadinstitute.org\", \"${ARTIFACTORY_USERNAME}\", \"${ARTIFACTORY_PASSWORD}\")" \
-        publish;
+    ./gradlew coveralls;
+    if [ "$TRAVIS_BRANCH" == "master" ]; then
+       ./gradlew uploadArchives;
     fi
diff --git a/README.md b/README.md
index 7d8f2e3..284fa9a 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,8 @@
-Status of master branch build: [![Build Status](https://travis-ci.org/samtools/htsjdk.svg?branch=master)](https://travis-ci.org/samtools/htsjdk)
+[![Coverage Status](https://coveralls.io/repos/github/samtools/htsjdk/badge.svg?branch=master)](https://coveralls.io/github/samtools/htsjdk?branch=master)
+[![Build Status](https://travis-ci.org/samtools/htsjdk.svg?branch=master)](https://travis-ci.org/samtools/htsjdk)
+[![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.github.samtools/htsjdk/badge.svg)](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.github.samtools%22%20AND%20a%3A%22htsjdk%22)
+[![License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/samtools/htsjdk)
+[![Language](http://img.shields.io/badge/language-java-brightgreen.svg)](https://www.java.com/)
 
 Status of downstream projects automatically built on top of the current htsjdk master branch. See [gatk-jenkins](https://gatk-jenkins.broadinstitute.org/view/HTSJDK%20Release%20Tests/) for detailed logs. Failure may indicate problems  in htsjdk, but may also be due to expected incompatibilities between versions, or unrelated failures in downstream projects.
 - [Picard](https://github.com/broadinstitute/picard):  [![Build Status](https://gatk-jenkins.broadinstitute.org/buildStatus/icon?job=picard-on-htsjdk-master)](https://gatk-jenkins.broadinstitute.org/job/picard-on-htsjdk-master/)
@@ -15,6 +19,67 @@ Please see the [HTSJDK Documentation](http://samtools.github.io/htsjdk) for more
 
 > **NOTE: _HTSJDK does not currently support the latest Variant Call Format Specification (VCFv4.3 and BCFv2.2)._**
 
+#### Building HTSJDK
+
+HTSJDK is now built using [gradle](http://gradle.org/).
+
+A wrapper script (`gradlew`) is included which will download the appropriate version of gradle on the first invocation.
+
+Example gradle usage from the htsjdk root directory:
+ - compile and build a jar 
+ ```
+ ./gradlew
+ ```
+ or
+ ```
+ ./gradlew jar
+ ```
+ The jar will be in build/libs/htsjdk-\<version\>.jar where version is based on the current git commit.
+
+ - run tests, a specific test class, or run a test and wait for the debugger to connect
+ ```
+ ./gradlew test
+
+ ./gradlew test --tests htsjdk.variant.variantcontext.AlleleUnitTest
+ ./gradlew test --tests "*AlleleUnitTest"
+
+ ./gradlew test --tests "*AlleleUnitTest" --debug-jvm
+ ```
+
+- run tests and collect coverage information (report will be in `build/reports/jacoco/test/html/index.html`)
+```
+./gradlew jacocoTestReport
+```
+
+ - clean the project directory
+ ```
+ ./gradlew clean
+ ```
+
+ - build a monolithic jar that includes all of htsjdk's dependencies
+ ```
+ ./gradlew shadowJar
+ ```
+ 
+ - create a snapshot and install it into your local maven repository
+ ```
+ ./gradlew install
+ ```
+
+ - for an exhaustive list of all available targets
+ ```
+ ./gradlew tasks
+ ```
+
+#### Create an HTSJDK project in IntelliJ
+To create a project in IntelliJ IDE for htsjdk do the following:
+
+1. Select fom the menu: `File -> New -> Project from Existing Sources`
+2. In the resulting dialog, chose `Import from existing model`, select `Gradle` and `Next`
+3. Choose the `default gradle wrapper` and `Finish`.
+
+From time to time if dependencies change in htsjdk you may need to refresh the project from the `View -> Gradle` menu.
+
 #### Licensing Information
 
 Not all sub-packages of htsjdk are subject to the same license, so a license notice is included in each source file or sub-package as appropriate. Please check the relevant license notice whenever you start working with a part of htsjdk that you have not previously worked with to avoid any surprises. 
diff --git a/build.gradle b/build.gradle
new file mode 100644
index 0000000..174b702
--- /dev/null
+++ b/build.gradle
@@ -0,0 +1,226 @@
+buildscript {
+    repositories {
+        mavenCentral()
+    }
+}
+
+plugins {
+    id "java"
+    id 'maven'
+    id 'signing'
+    id 'jacoco'
+    id 'com.palantir.git-version' version '0.5.1'
+    id 'com.github.johnrengelman.shadow' version '1.2.3'
+    id "com.github.kt3k.coveralls" version "2.6.3"
+}
+
+repositories {
+    mavenCentral()
+}
+
+jacocoTestReport {
+    dependsOn test
+    group = "Reporting"
+    description = "Generate Jacoco coverage reports after running tests."
+    additionalSourceDirs = files(sourceSets.main.allJava.srcDirs)
+
+    reports {
+        xml.enabled = true // coveralls plugin depends on xml format report
+        html.enabled = true
+    }
+}
+
+jacoco {
+    toolVersion = "0.7.5.201505241946"
+}
+
+dependencies {
+    compile "org.apache.commons:commons-jexl:2.1.1"
+    compile "commons-logging:commons-logging:1.1.1"
+    compile "org.xerial.snappy:snappy-java:1.0.3-rc3"
+    compile "org.apache.commons:commons-compress:1.4.1"
+    compile "org.tukaani:xz:1.5"
+    compile "gov.nih.nlm.ncbi:ngs-java:1.2.4"
+
+    testCompile "org.testng:testng:6.9.9"
+}
+
+sourceCompatibility = 1.8
+targetCompatibility = 1.8
+
+final isRelease = Boolean.getBoolean("release")
+final gitVersion = gitVersion().replaceAll(".dirty", "")
+version = isRelease ? gitVersion : gitVersion + "-SNAPSHOT"
+
+logger.info("build for version:" + version)
+group = 'com.github.samtools'
+
+defaultTasks 'jar'
+
+jar {
+    manifest {
+        attributes 'Implementation-Title': 'HTSJDK',
+                'Implementation-Vendor' : 'Samtools Organization',
+                'Implementation-Version': version
+    }
+}
+
+import org.gradle.internal.os.OperatingSystem;
+
+tasks.withType(Test) {
+    outputs.upToDateWhen { false } // tests will always rerun
+    useTestNG()
+
+    // set heap size for the test JVM(s)
+    minHeapSize = "1G"
+    maxHeapSize = "2G"
+
+    jvmArgs '-Djava.awt.headless=true'  //this prevents awt from displaying a java icon while the tests are running
+
+    if (System.env.CI == "true") {  //if running under a CI output less into the logs
+        int count = 0
+
+        beforeTest { descriptor ->
+            count++
+            if( count % 100 == 0) {
+                logger.lifecycle("Finished "+ Integer.toString(count++) + " tests")
+            }
+        }
+    } else {
+        // show standard out and standard error of the test JVM(s) on the console
+        testLogging.showStandardStreams = true
+        beforeTest { descriptor ->
+            logger.lifecycle("Running Test: " + descriptor)
+        }
+
+        // listen to standard out and standard error of the test JVM(s)
+        onOutput { descriptor, event ->
+            logger.lifecycle("Test: " + descriptor + " produced standard out/err: " + event.message )
+        }
+    }
+
+    testLogging {
+        testLogging {
+            events "skipped", "failed"
+            exceptionFormat = "full"
+        }
+        afterSuite { desc, result ->
+            if (!desc.parent) { // will match the outermost suite
+                println "Results: ${result.resultType} (${result.testCount} tests, ${result.successfulTestCount} successes, ${result.failedTestCount} failures, ${result.skippedTestCount} skipped)"
+            }
+        }
+    }
+}
+
+test {
+    description = "Runs the unit tests other than the SRA tests"
+
+    useTestNG {
+        if( OperatingSystem.current().isUnix() ){
+            excludeGroups "slow", "broken", "sra"
+        } else {
+            excludeGroups "slow", "broken", "unix", "sra"
+        }
+    }
+}
+
+task testSRA(type: Test) {
+    jvmArgs '-Dsamjdk.sra_libraries_download=true'
+
+    description "Run the SRA tests"
+    useTestNG {
+        includeGroups "sra"
+    }
+}
+
+task wrapper(type: Wrapper) {
+    description = "Regenerate the gradle wrapper"
+    gradleVersion = '2.13'
+}
+
+// This is a hack to disable the java 8 default javadoc lint until we fix the html formatting
+if (JavaVersion.current().isJava8Compatible()) {
+    tasks.withType(Javadoc) {
+        options.addStringOption('Xdoclint:none', '-quiet')
+    }
+}
+
+task javadocJar(type: Jar, dependsOn: javadoc) {
+    classifier = 'javadoc'
+    from 'build/docs/javadoc'
+}
+
+task sourcesJar(type: Jar) {
+    from sourceSets.main.allSource
+    classifier = 'sources'
+}
+
+/**
+ *This specifies what artifacts will be built and uploaded when performing a maven upload.
+ */
+artifacts {
+    archives jar
+    archives javadocJar
+    archives sourcesJar
+}
+
+/**
+ * Sign non-snapshot releases with our secret key.  This should never need to be invoked directly.
+ */
+signing {
+    required { isRelease && gradle.taskGraph.hasTask("uploadArchives") }
+    sign configurations.archives
+}
+
+/**
+ * Upload a release to sonatype.  You must be an authorized uploader and have your sonatype
+ * username and password information in your gradle properties file.  See the readme for more info.
+ *
+ * For releasing to your local maven repo, use gradle install
+ */
+uploadArchives {
+    repositories {
+        mavenDeployer {
+            beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
+
+            repository(url: "https://oss.sonatype.org/service/local/staging/deploy/maven2/") {
+                authentication(userName: project.findProperty("sonatypeUsername"), password: project.findProperty("sonatypePassword"))
+            }
+
+            snapshotRepository(url: "https://artifactory.broadinstitute.org/artifactory/libs-snapshot-local/") {
+                authentication(userName: System.env.ARTIFACTORY_USERNAME, password: System.env.ARTIFACTORY_PASSWORD)
+            }
+
+            pom.project {
+                name 'HTSJDK'
+                packaging 'jar'
+                description 'A Java API for high-throughput sequencing data (HTS) formats'
+                url 'http://samtools.github.io/htsjdk/'
+
+                developers {
+                    developer {
+                        id 'picard'
+                        name 'Picard Team'
+                        url 'http://broadinstitute.github.io/picard'
+                    }
+                }
+
+                scm {
+                    url 'git at github.com:samtools/htsjdk.git'
+                    connection 'scm:git:git at github.com:samtools/htsjdk.git'
+                }
+
+                licenses {
+                    license {
+                        name 'MIT License'
+                        url 'http://opensource.org/licenses/MIT'
+                        distribution 'repo'
+                    }
+                }
+            }
+        }
+    }
+    doFirst{
+        System.out.println("Uploading version $version")
+    }
+}
diff --git a/build.sbt b/build.sbt
deleted file mode 100644
index ebcce1b..0000000
--- a/build.sbt
+++ /dev/null
@@ -1,154 +0,0 @@
-import com.typesafe.sbt.SbtGit._
-import de.johoop.testngplugin.TestNGPlugin._
-import sbt.Package.ManifestAttributes
-
-//added as a workaround for #https://github.com/samtools/htsjdk/issues/573
-resolvers += Resolver.sbtPluginRepo("releases")
-
-name := "htsjdk"
-
-val buildVersion = "2.3.0"
-
-organization := "com.github.samtools"
-
-libraryDependencies += "gov.nih.nlm.ncbi" % "ngs-java" % "1.2.2"
-
-libraryDependencies += "org.apache.commons" % "commons-jexl" % "2.1.1"
-
-libraryDependencies += "commons-logging" % "commons-logging" % "1.1.1"
-
-libraryDependencies += "org.xerial.snappy" % "snappy-java" % "1.0.3-rc3"
-
-libraryDependencies += "org.apache.commons" % "commons-compress" % "1.4.1"
-
-libraryDependencies += "org.tukaani" % "xz" % "1.5"
- 
-libraryDependencies += "org.apache.ant" % "ant" % "1.8.2"
-
-libraryDependencies += "org.testng" % "testng" % "6.8.8"
-
-unmanagedBase := baseDirectory.value
-
-mappings in (Compile, packageBin) ++= Seq(
-  (baseDirectory.value / "lib/jni/libIntelDeflater.so") -> "lib/jni/libIntelDeflater.so"
-)
-
-javaSource in Compile := baseDirectory.value / "src/java"
-
-javaSource in Test := baseDirectory.value / "src/tests"
-
-testNGSettings
-
-testNGSuites := Seq("src/tests/resources/testng.xml")
-
-autoScalaLibrary := false
-
-publishMavenStyle := true
-
-publishArtifact in Test := false
-
-pomIncludeRepository := { _ => false}
-
-val gitVersion = settingKey[String]("The head commit git hash.")
-
-gitVersion := git.gitHeadCommit.value.get
-
-val gitBranch = settingKey[String]("The git branch.")
-
-gitBranch := git.gitCurrentBranch.value
-
-val buildSnapshot = settingKey[Boolean]("Is this build a snapshot.")
-
-buildSnapshot := false
-
-version := {
-  if (buildSnapshot.value) {
-    s"$buildVersion-${gitVersion.value.substring(0, 7)}-SNAPSHOT"
-  } else {
-    s"$buildVersion"
-  }
-}
-
-val implementationVersion = settingKey[String]("Implementation version.")
-
-implementationVersion := {
-  if (buildSnapshot.value)
-    s"$buildVersion(${gitVersion.value})(SNAPSHOT)"
-  else
-    s"$buildVersion(${gitVersion.value})"
-}
-
-publishTo := {
-  val nexus = "https://oss.sonatype.org/"
-  if (isSnapshot.value)
-    Some("snapshots" at nexus + "content/repositories/snapshots")
-  else
-    Some("releases" at nexus + "service/local/staging/deploy/maven2")
-}
-
-artifactName := { (sv: ScalaVersion, module: ModuleID, artifact: Artifact) =>
-  val classifierStr = artifact.classifier match {
-    case None => "";
-    case Some(c) => "-" + c
-  }
-  artifact.name + "-" + module.revision + classifierStr + "." + artifact.extension
-}
-
-crossPaths := false
-
-javacOptions in (Compile,doc) ++= Seq("-Xdoclint:none")
-
-packageOptions := Seq(ManifestAttributes(
-  ("Implementation-Version", s"${implementationVersion.value}"),
-  ("Implementation-Vendor", "Broad Institute")
-))
-
-assemblyJarName := s"${name.value}-${version.value}.jar"
-
-assemblyMergeStrategy in assembly := {
-  case x if Assembly.isConfigFile(x) =>
-    MergeStrategy.concat
-  case PathList(ps at _*) if Assembly.isReadme(ps.last) || Assembly.isLicenseFile(ps.last) =>
-    MergeStrategy.rename
-  case PathList("META-INF", path at _*) =>
-    path map {
-      _.toLowerCase
-    } match {
-      case ("manifest.mf" :: Nil) | ("index.list" :: Nil) | ("dependencies" :: Nil) =>
-        MergeStrategy.discard
-      case ps@(x :: xs) if ps.last.endsWith(".sf") || ps.last.endsWith(".dsa") =>
-        MergeStrategy.discard
-      case "plexus" :: xs =>
-        MergeStrategy.discard
-      case "spring.tooling" :: xs =>
-        MergeStrategy.discard
-      case "services" :: xs =>
-        MergeStrategy.filterDistinctLines
-      case ("spring.schemas" :: Nil) | ("spring.handlers" :: Nil) =>
-        MergeStrategy.filterDistinctLines
-      case _ => MergeStrategy.deduplicate
-    }
-  case "asm-license.txt" | "overview.html" =>
-    MergeStrategy.discard
-  case _ => MergeStrategy.deduplicate
-}
-
-pomExtra := <url>http://samtools.github.io/htsjdk/</url>
-  <licenses>
-    <license>
-      <name>MIT License</name>
-      <url>http://opensource.org/licenses/MIT</url>
-      <distribution>repo</distribution>
-    </license>
-  </licenses>
-  <scm>
-    <url>git at github.com:samtools/htsjdk.git</url>
-    <connection>scm:git:git at github.com:samtools/htsjdk.git</connection>
-  </scm>
-  <developers>
-    <developer>
-      <id>picard</id>
-      <name>Picard Team</name>
-      <url>http://broadinstitute.github.io/picard/</url>
-    </developer>
-  </developers>
diff --git a/build.xml b/build.xml
index 9a5fb16..59bebde 100755
--- a/build.xml
+++ b/build.xml
@@ -25,273 +25,38 @@
 
 <project name="htsjdk" basedir="." default="all">
 
-    <property name="src" value="src/java"/>
-    <property name="src.test" value="src/tests"/>
-    <property name="src.test.java" value="${src.test}/java"/>
-    <property name="lib" value="lib"/>
-    <property name="dist" value="dist"/>
-    <property name="classes" value="classes"/>
-    <property name="classes.test" value="testclasses"/>
-    <property name="scripts" value="src/scripts"/>
-    <property name="test.output" value="dist/test"/>
-    <property name="intelDeflator" value="${lib}/jni/libIntelDeflater.so"/>
 
-    <property name="javac.target" value="1.8"/>
-    <property name="javac.debug" value="true"/>
+    <property name="gradle.executable" location="gradlew"/>
 
-    <!-- Get GIT hash, if available, otherwise leave it blank.  -->
+    <echo>
+        ANT IS DEPRECATED FOR BUILDING HTSJDK
 
-    <property name="repository.revision" value=""/>
-    <property name="htsjdk-version" value="2.3.0"/>
-    <property name="htsjdk-version-file" value="htsjdk.version.properties"/>
-    <property name="testng.verbosity" value="2"/>
-    <property name="test.debug.port" value="5005" />  <!-- override on the command line if desired -->
+        Please switch to using gradlew
 
-    <condition property="isUnix">
-        <and>
-            <os family="unix"/>
-            <not><os family="mac"/></not>
-        </and>
-    </condition>
-    <target name="set_excluded_test_groups_unix" if="isUnix">
-        <property name="excludedTestGroups" value="slow, broken"/>
-    </target>
-    <target name="set_excluded_test_groups_non_unix" unless="isUnix">
-        <property name="excludedTestGroups" value="slow, broken, unix, intel"/>
-    </target>
-    <target name="set_excluded_test_groups" depends="set_excluded_test_groups_unix,set_excluded_test_groups_non_unix"/>
+        Examples:
+        compile htsjdk or it's tests
+          ./gradlew compileJava
+          ./gradlew compileTest
 
-    <!-- VERSION PROPERTY --> 
-    <target name="write-version-property">
-        <propertyfile
-            file="${htsjdk-version-file}"
-            comment="htsjdk version">
-            <entry  key="htsjdk-version" value="${htsjdk-version}"/>
-        </propertyfile>
-    </target>
+        build a jar
+          ./gradlew jar
 
-    <!-- INIT -->
-    <target name="init" depends="write-version-property">
-        <path id="classpath">
-            <fileset dir="${lib}">
-                <include name="**/*.jar"/>
-            </fileset>
-        </path>
-    </target>
+        build a jar, along with source and document jars
+        ./gradlew build
 
-    <!-- CLEAN -->
-    <target name="clean">
-        <delete dir="${classes}"/>
-        <delete dir="${classes.test}"/>
-        <delete dir="${test.output}"/>
-        <delete dir="${dist}"/>
-        <delete dir="javadoc"/>
-        <delete file="${htsjdk-version-file}"/>
-    </target>
+        build a jar that packages all of htsjdk's dependencies in a single jar
+        ./gradlew shadowJar
 
-    <!-- COMPILE -->
-    <target name="compile" depends="compile-src, compile-tests"
-            description="Compile files without cleaning">
-    </target>
+        run tests, or a single test, or run a test and wait for the debugger
+          ./gradlew test
+          ./gradlew test --tests "*AlleleUnitTest"
+          ./gradlew test --tests "*AlleleUnitTest" --debug-jvm
 
-    <target name="compile-src" depends="compile-samtools, compile-tribble, compile-variant" description="Compile files without cleaning"/>
+        clean the project directory
+          ./gradlew clean
 
-    <target name="compile-samtools" depends="init" description="Compile sam-samtools files without cleaning">
-        <compile-src includes="htsjdk/samtools/**/*.*"/>
-    </target>
+        see an exhaustive list of all available targets
+          ./gradlew tasks
+    </echo>
 
-    <target name="compile-tribble" depends="init, compile-samtools" description="Compile tribble files without cleaning">
-        <compile-src includes="htsjdk/tribble/**/*.*"/>
-    </target>
-
-    <target name="compile-variant" depends="init, compile-tribble" description="Compile variant files without cleaning">
-		<compile-src includes="htsjdk/variant/**/*.*"/>
-    </target>
-
-    <target name="compile-tests" depends="compile-samtools-tests, compile-tribble-tests, compile-variant-tests" description="Compile test files without cleaning"/>
-
-    <target name="compile-samtools-tests" depends="init" description="Compile samtools test files without cleaning">
-        <compile-tests includes="htsjdk/samtools/**/*.*"/>
-    </target>
-
-    <target name="compile-tribble-tests" depends="init" description="Compile tribble test files without cleaning">
-        <compile-tests includes="htsjdk/tribble/**/*.*"/>
-    </target>
-
-    <target name="compile-variant-tests" depends="init" description="Compile variant test files without cleaning">
-        <compile-tests includes="htsjdk/variant/**/*.*"/>
-    </target>
-
-    <!-- TEST -->
-    <macrodef name="run-test">
-        <attribute name="excludedTestGroups" default=""/>
-        <attribute name="includedTestGroups" default=""/>
-        <attribute name="additionalJVMarg" default=""/>
-
-        <sequential>
-                <taskdef resource="testngtasks" classpathref="classpath"/>
-                <testng suitename="htsjdk-tests" classpathref="classpath" outputdir="${test.output}"
-                        failureproperty="tests.failed" excludedgroups="@{excludedTestGroups}" groups="@{includedTestGroups}"
-                        workingDir="${basedir}"
-                        verbose="${testng.verbosity}">
-                    <classpath>
-                        <pathelement path="${classes}"/>
-                        <pathelement path="${classes.test}"/>
-                        <pathelement path="${scripts}"/>
-                    </classpath>
-                    <classfileset dir="${classes.test}">
-                        <include name="**/Test*.class"/>
-                        <include name="**/*Test.class"/>
-                    </classfileset>
-                    <jvmarg line="-Xmx2G @{additionalJVMarg}"/>
-                </testng>
-                <junitreport todir="${dist}/test" >
-                    <fileset dir="${test.output}">
-                        <include name="*.xml"/>
-                    </fileset>
-                    <report format="noframes" todir="${dist}/test" styledir="etc/test"/>
-                </junitreport>
-                <copy file="etc/test/testng.css" todir="${dist}/test" overwrite="true"/>
-                <fail if="tests.failed" message="There were failed unit tests"/>
-        </sequential>
-    </macrodef>
-
-
-    <target name="test" depends="compile, set_excluded_test_groups, intel-test" description="Run unit tests">
-            <run-test excludedTestGroups="${excludedTestGroups}, sra, intel"/>
-    </target>
-
-    <target name="sra-test" depends="compile, set_excluded_test_groups" description="Run SRA unit tests">
-        <run-test includedTestGroups="sra" excludedTestGroups="${excludedTestGroups}"/>
-    </target>
-
-    <!-- needs to be in a separate target since jvm has different argument -->
-    <target name="intel-test" depends="compile, set_excluded_test_groups" description="Run Intel unit tests" if="isUnix">
-        <echo message="intelDeflator = ${intelDeflator}"/>
-        <run-test includedTestGroups="intel" additionalJVMarg="-Dsamjdk.intel_deflater_so_path=${intelDeflator}"/>
-    </target>
-
-    <target name="single-test"
-            depends="compile, compile-tests"
-            description="Compile and run a single test.">
-        <taskdef resource="testngtasks" classpathref="classpath"/>
-        <fail unless="name" message="Please provide input test: -Dname=..." />
-
-        <condition property="debug.jvm.args" value="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=${test.debug.port}" else="">
-          <isset property="test.debug" />
-        </condition>
-
-        <testng suitename="htsjdk-single-test" classpathref="classpath" outputdir="${test.output}"
-                verbose="${testng.verbosity}">
-            <jvmarg line="-Xmx512m ${debug.jvm.args}"/>
-            <classpath>
-                <pathelement path="${classes}"/>
-                <pathelement path="${classes.test}"/>
-                <pathelement path="${scripts}"/>
-            </classpath>
-            <classfileset dir="${classes.test}">
-                <include name="**/${name}.class"/>
-            </classfileset>
-        </testng>
-    </target>
-
-    <target name="htsjdk-jar" depends="compile-samtools, compile-tribble, compile-variant"
-            description="Builds htsjdk-${htsjdk-version}.jar for inclusion in other projects">
-        <mkdir dir="${dist}"/>
-        <jar destfile="${dist}/htsjdk-${htsjdk-version}.jar" compress="no">
-            <fileset dir="${classes}" includes ="htsjdk/samtools/**/*.*"/>
-            <fileset dir="${classes}" includes="htsjdk/tribble/**/*.*"/>
-            <fileset dir="${classes}" includes="htsjdk/variant/**/*.*"/>
-            <manifest>
-                <attribute name="Implementation-Version" value="${htsjdk-version}(${repository.revision})"/>
-                <attribute name="Implementation-Vendor" value="Broad Institute"/>
-            </manifest>
-        </jar>
-        <copy todir="${dist}">
-            <fileset dir="lib" includes="*.jar"/>
-        </copy>
-    </target>
-
-    <target name="javadoc" depends="init" description="Generates the project javadoc.">
-        <javadoc
-                sourcepath="${src}"
-                destdir="javadoc"
-                packagenames="htsjdk.samtools.*, htsjdk.variant.*, htsjdk.tribble.*"
-                windowtitle="HTS JDK API Documentation"
-                doctitle="<h1>HTS JDK API Documentation</h1>"
-                author="true"
-                protected="true"
-                use="true"
-                version="true"
-                additionalparam="-Xdoclint:none -notimestamp"
-                failonerror="true">
-            <classpath>
-                <pathelement location="${java.home}/../lib/tools.jar" />
-                <fileset dir="${lib}">
-                    <include name="**/*.jar" />
-                </fileset>
-            </classpath>
-            <link href="http://java.sun.com/j2se/1.5.0/docs/api/"/>
-        </javadoc>
-    </target>
-
-    <!-- ALL -->
-    <target name="all" depends="compile, htsjdk-jar" description="Default build target">
-    </target>
-
-    <!-- ************************************************************************************** -->
-    <!-- ************************************************************************************** -->
-    <!-- Beginning of taskdefs that are used elsewhere in the build file                        -->
-    <!-- ************************************************************************************** -->
-    <!-- ************************************************************************************** -->
-
-    <!-- Compile source files specified by includes, from source root. Can specifically
-    include or exclude-->
-    <macrodef name="compile-src">
-        <attribute name="includes" default=""/>
-        <attribute name="excludes" default=""/>
-        <attribute name="destdir" default="${classes}"/>
-        <attribute name="compile.classpath" default="classpath"/>
-        <attribute name="compiler.args" default=""/>
-        <sequential>
-        <mkdir dir="${classes}"/>
-            <!-- unset the sourcepath attribute in order to compile only files explicitly specified and disable javac's default searching mechanism -->
-            <javac destdir="@{destdir}"
-                   optimize="${javac.opt}"
-                   debug="${javac.debug}"
-                   sourcepath=""
-                   srcdir="${src}"
-                   includes="@{includes}"
-                   excludes="@{excludes}"
-                   source="${javac.target}"
-                   target="${javac.target}">
-                <classpath refid="@{compile.classpath}"/>
-                <compilerarg line="@{compiler.args}" />
-            </javac>
-        </sequential>
-    </macrodef>
-
-    <macrodef name="compile-tests">
-        <attribute name="includes" default=""/>
-        <attribute name="excludes" default=""/>
-        <attribute name="compiler.args" default=""/>
-
-        <sequential>
-            <mkdir dir="${classes.test}"/>
-            <javac destdir="${classes.test}"
-                   optimize="${javac.opt}"
-                   debug="${javac.debug}"
-                   srcdir="${src.test.java}"
-                   includes="@{includes}"
-                   excludes="@{excludes}"
-                   source="${javac.target}"
-                   target="${javac.target}">
-                <classpath>
-                    <path refid="classpath"/>
-                    <pathelement location="${classes}"/>
-                </classpath>
-                <compilerarg line="@{compiler.args}"/>
-            </javac>
-        </sequential>
-    </macrodef>
 </project>
diff --git a/etc/test/junit-noframes.xsl b/etc/test/junit-noframes.xsl
deleted file mode 100644
index ffbf68b..0000000
--- a/etc/test/junit-noframes.xsl
+++ /dev/null
@@ -1,581 +0,0 @@
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0"
-                xmlns:lxslt="http://xml.apache.org/xslt"
-                xmlns:stringutils="xalan://org.apache.tools.ant.util.StringUtils"
-                xmlns:Date="http://www.saxon.com/java/java.util.Date">
-    <xsl:output method="html" indent="yes" encoding="US-ASCII"
-                doctype-public="-//W3C//DTD HTML 4.01 Transitional//EN"/>
-    <xsl:decimal-format decimal-separator="." grouping-separator=","/>
-
-    <!--
-      Copyright 2001-2004 The Apache Software Foundation
-
-      Licensed under the Apache License, Version 2.0 (the "License");
-      you may not use this file except in compliance with the License.
-      You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-      Unless required by applicable law or agreed to in writing, software
-      distributed under the License is distributed on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-      See the License for the specific language governing permissions and
-      limitations under the License.
-    -->
-
-    <!--
-
-     Sample stylesheet to be used with Ant JUnitReport output.
-
-     It creates a non-framed report that can be useful to send via
-     e-mail or such.
-
-     @author Stephane Bailliez <a href="mailto:sbailliez at apache.org"/>
-     @author Erik Hatcher <a href="mailto:ehatcher at apache.org"/>
-
-    -->
-    <xsl:template match="testsuites">
-        <html>
-            <head>
-                <title>Unit Test Results</title>
-                <style type="text/css">
-                    body {
-                    font:normal 68% verdana,arial,helvetica;
-                    color:#000000;
-                    }
-                    table tr td, table tr th {
-                    font-size: 68%;
-                    }
-                    table.details tr th{
-                    font-weight: bold;
-                    text-align:left;
-                    background:#a6caf0;
-                    }
-                    table.details tr td{
-                    background:#eeeee0;
-                    }
-
-                    p {
-                    line-height:1.5em;
-                    margin-top:0.5em; margin-bottom:1.0em;
-                    }
-                    h1 {
-                    margin: 0px 0px 5px; font: 165% verdana,arial,helvetica
-                    }
-                    h2 {
-                    margin-top: 1em; margin-bottom: 0.5em; font: bold 125% verdana,arial,helvetica
-                    }
-                    h3 {
-                    margin-bottom: 0.5em; font: bold 115% verdana,arial,helvetica
-                    }
-                    h4 {
-                    margin-bottom: 0.5em; font: bold 100% verdana,arial,helvetica
-                    }
-                    h5 {
-                    margin-bottom: 0.5em; font: bold 100% verdana,arial,helvetica
-                    }
-                    h6 {
-                    margin-bottom: 0.5em; font: bold 100% verdana,arial,helvetica
-                    }
-                    .Error {
-                    font-weight:bold; color:red;
-                    }
-                    .Failure {
-                    font-weight:bold; color:#8b0000;
-                    }
-                    .Properties {
-                    text-align:right;
-                    }
-                </style>
-                <script type="text/javascript" language="JavaScript">
-                    var TestCases = new Array();
-                    var cur;
-                    <xsl:for-each select="./testsuite">
-                        <xsl:apply-templates select="properties"/>
-                    </xsl:for-each>
-
-                </script>
-                <script type="text/javascript" language="JavaScript"><![CDATA[
-        function displayProperties (name) {
-          var win = window.open('','JUnitSystemProperties','scrollbars=1,resizable=1');
-          var doc = win.document.open();
-          doc.write("<html><head><title>Properties of " + name + "</title>");
-          doc.write("<style>")
-          doc.write("body {font:normal 68% verdana,arial,helvetica; color:#000000; }");
-          doc.write("table tr td, table tr th { font-size: 68%; }");
-          doc.write("table.properties { border-collapse:collapse; border-left:solid 1 #cccccc; border-top:solid 1 #cccccc; padding:5px; }");
-          doc.write("table.properties th { text-align:left; border-right:solid 1 #cccccc; border-bottom:solid 1 #cccccc; background-color:#eeeeee; }");
-          doc.write("table.properties td { font:normal; text-align:left; border-right:solid 1 #cccccc; border-bottom:solid 1 #cccccc; background-color:#fffffff; }");
-          doc.write("h3 { margin-bottom: 0.5em; font: bold 115% verdana,arial,helvetica }");
-          doc.write("</style>");
-          doc.write("</head><body>");
-          doc.write("<h3>Properties of " + name + "</h3>");
-          doc.write("<div align=\"right\"><a href=\"javascript:window.close();\">Close</a></div>");
-          doc.write("<table class='properties'>");
-          doc.write("<tr><th>Name</th><th>Value</th></tr>");
-          for (prop in TestCases[name]) {
-            doc.write("<tr><th>" + prop + "</th><td>" + TestCases[name][prop] + "</td></tr>");
-          }
-          doc.write("</table>");
-          doc.write("</body></html>");
-          doc.close();
-          win.focus();
-        }
-      ]]>
-                </script>
-            </head>
-            <body>
-                <a name="top"></a>
-                <xsl:call-template name="pageHeader"/>
-
-                Last Modified:
-                <xsl:value-of select="Date:toString(Date:new())"/>
-
-                <!-- Summary part -->
-                <xsl:call-template name="summary"/>
-                <hr size="1" width="95%" align="left"/>
-
-                <!-- Package List part -->
-                <xsl:call-template name="packagelist"/>
-                <hr size="1" width="95%" align="left"/>
-
-                <!-- For each package create its part -->
-                <xsl:call-template name="packages"/>
-                <hr size="1" width="95%" align="left"/>
-
-                <!-- For each class create the  part -->
-                <xsl:call-template name="classes"/>
-
-            </body>
-        </html>
-    </xsl:template>
-
-
-    <!-- ================================================================== -->
-    <!-- Write a list of all packages with an hyperlink to the anchor of    -->
-    <!-- of the package name.                                               -->
-    <!-- ================================================================== -->
-    <xsl:template name="packagelist">
-        <h2>Packages</h2>
-        Note: package statistics are not computed recursively, they only sum up all of its testsuites numbers.
-        <table class="details" border="0" cellpadding="5" cellspacing="2" width="95%">
-            <xsl:call-template name="testsuite.test.header"/>
-            <!-- list all packages recursively -->
-            <xsl:for-each select="./testsuite[not(./@package = preceding-sibling::testsuite/@package)]">
-                <xsl:sort select="@package"/>
-                <xsl:variable name="testsuites-in-package"
-                              select="/testsuites/testsuite[./@package = current()/@package]"/>
-                <xsl:variable name="testCount" select="sum($testsuites-in-package/@tests)"/>
-                <xsl:variable name="errorCount" select="sum($testsuites-in-package/@errors)"/>
-                <xsl:variable name="failureCount" select="sum($testsuites-in-package/@failures)"/>
-                <xsl:variable name="timeCount" select="sum($testsuites-in-package/@time)"/>
-
-                <!-- write a summary for the package -->
-                <tr valign="top">
-                    <!-- set a nice color depending if there is an error/failure -->
-                    <xsl:attribute name="class">
-                        <xsl:choose>
-                            <xsl:when test="$failureCount > 0">Failure</xsl:when>
-                            <xsl:when test="$errorCount > 0">Error</xsl:when>
-                        </xsl:choose>
-                    </xsl:attribute>
-                    <td>
-                        <a href="#{@package}">
-                            <xsl:value-of select="@package"/>
-                        </a>
-                    </td>
-                    <td>
-                        <xsl:value-of select="$testCount"/>
-                    </td>
-                    <td>
-                        <xsl:value-of select="$errorCount"/>
-                    </td>
-                    <td>
-                        <xsl:value-of select="$failureCount"/>
-                    </td>
-                    <td>
-                        <xsl:call-template name="display-time">
-                            <xsl:with-param name="value" select="$timeCount"/>
-                        </xsl:call-template>
-                    </td>
-                </tr>
-            </xsl:for-each>
-        </table>
-    </xsl:template>
-
-
-    <!-- ================================================================== -->
-    <!-- Write a package level report                                       -->
-    <!-- It creates a table with values from the document:                  -->
-    <!-- Name | Tests | Errors | Failures | Time                            -->
-    <!-- ================================================================== -->
-    <xsl:template name="packages">
-        <!-- create an anchor to this package name -->
-        <xsl:for-each select="/testsuites/testsuite[not(./@package = preceding-sibling::testsuite/@package)]">
-            <xsl:sort select="@package"/>
-            <a name="{@package}"></a>
-            <h3>Package
-                <xsl:value-of select="@package"/>
-            </h3>
-
-            <table class="details" border="0" cellpadding="5" cellspacing="2" width="95%">
-                <xsl:call-template name="testsuite.test.header"/>
-
-                <!-- match the testsuites of this package -->
-                <xsl:apply-templates select="/testsuites/testsuite[./@package = current()/@package]" mode="print.test"/>
-            </table>
-            <a href="#top">Back to top</a>
-            <p/>
-            <p/>
-        </xsl:for-each>
-    </xsl:template>
-
-    <xsl:template name="classes">
-        <xsl:for-each select="testsuite">
-            <xsl:sort select="@name"/>
-            <!-- create an anchor to this class name -->
-            <a name="{@name}"></a>
-            <h3>TestCase
-                <xsl:value-of select="@name"/>
-            </h3>
-
-            <table class="details" border="0" cellpadding="5" cellspacing="2" width="95%">
-                <xsl:call-template name="testcase.test.header"/>
-                <!--
-                test can even not be started at all (failure to load the class)
-                so report the error directly
-                -->
-                <xsl:if test="./error">
-                    <tr class="Error">
-                        <td colspan="4">
-                            <xsl:apply-templates select="./error"/>
-                        </td>
-                    </tr>
-                </xsl:if>
-                <xsl:apply-templates select="./testcase" mode="print.test"/>
-            </table>
-            <table class="details" border="0" cellpadding="5" cellspacing="2"
-                   width="95%">
-                <xsl:call-template name="testcase.test.out"/>
-                <tr>
-                    <td>
-                        <xsl:choose>
-                            <xsl:when test="./system-out = ''">N/A</xsl:when>
-                            <xsl:otherwise>
-                                <xsl:call-template name="br-replace">
-                                    <xsl:with-param name="word" select="./system - out"/>
-                                </xsl:call-template>
-                            </xsl:otherwise>
-                        </xsl:choose>
-                    </td>
-                </tr>
-            </table>
-            <table class="details" border="0" cellpadding="5" cellspacing="2"
-                   width="95%">
-                <xsl:call-template name="testcase.test.err"/>
-                <tr>
-                    <td>
-                        <xsl:choose>
-                            <xsl:when test="./system-err = ''">N/A</xsl:when>
-                            <xsl:otherwise>
-                                <xsl:call-template name="br-replace">
-                                    <xsl:with-param name="word" select="./system-err"/>
-                                </xsl:call-template>
-                            </xsl:otherwise>
-                        </xsl:choose>
-                    </td>
-                </tr>
-            </table>
-            <div class="Properties">
-                <a>
-                    <xsl:attribute name="href">javascript:displayProperties('
-                        <xsl:value-of select="@package"/>
-                        .
-                        <xsl:value-of select="@name"/>
-                        ');
-                    </xsl:attribute>
-                    Properties »
-                </a>
-            </div>
-            <p/>
-
-            <a href="#top">Back to top</a>
-        </xsl:for-each>
-    </xsl:template>
-
-    <xsl:template name="summary">
-        <h2>Summary</h2>
-        <xsl:variable name="testCount" select="sum(testsuite/@tests)"/>
-        <xsl:variable name="errorCount" select="sum(testsuite/@errors)"/>
-        <xsl:variable name="failureCount" select="sum(testsuite/@failures)"/>
-        <xsl:variable name="timeCount" select="sum(testsuite/@time)"/>
-        <xsl:variable name="successRate" select="($testCount - $failureCount - $errorCount) div $testCount"/>
-        <table class="details" border="0" cellpadding="5" cellspacing="2" width="95%">
-            <tr valign="top">
-                <th>Tests</th>
-                <th>Failures</th>
-                <th>Errors</th>
-                <th>Success rate</th>
-                <th>Time</th>
-            </tr>
-            <tr valign="top">
-                <xsl:attribute name="class">
-                    <xsl:choose>
-                        <xsl:when test="$failureCount > 0">Failure</xsl:when>
-                        <xsl:when test="$errorCount > 0">Error</xsl:when>
-                    </xsl:choose>
-                </xsl:attribute>
-                <td>
-                    <xsl:value-of select="$testCount"/>
-                </td>
-                <td>
-                    <xsl:value-of select="$failureCount"/>
-                </td>
-                <td>
-                    <xsl:value-of select="$errorCount"/>
-                </td>
-                <td>
-                    <xsl:call-template name="display-percent">
-                        <xsl:with-param name="value" select="$successRate"/>
-                    </xsl:call-template>
-                </td>
-                <td>
-                    <xsl:call-template name="display-time">
-                        <xsl:with-param name="value" select="$timeCount"/>
-                    </xsl:call-template>
-                </td>
-
-            </tr>
-        </table>
-        <table border="0" width="95%">
-            <tr>
-                <td style="text-align: justify;">
-                    Note:
-                    <i>failures</i>
-                    are anticipated and checked for with assertions while
-                    <i>errors</i>
-                    are unanticipated.
-                </td>
-            </tr>
-        </table>
-    </xsl:template>
-
-    <!--
-    Write properties into a JavaScript data structure.
-    This is based on the original idea by Erik Hatcher (ehatcher at apache.org)
-    -->
-    <xsl:template match="properties">
-        cur = TestCases['
-        <xsl:value-of select="../@package"/>
-        .
-        <xsl:value-of select="../@name"/>
-        '] = new Array();
-        <xsl:for-each select="property">
-            <xsl:sort select="@name"/>
-            cur['
-            <xsl:value-of select="@name"/>
-            '] = '
-            <xsl:call-template name="JS-escape">
-                <xsl:with-param name="string" select="@value"/>
-            </xsl:call-template>
-            ';
-        </xsl:for-each>
-    </xsl:template>
-
-    <!-- Page HEADER -->
-    <xsl:template name="pageHeader">
-        <h1>Unit Test Results</h1>
-        <table width="100%">
-            <tr>
-                <td align="left"></td>
-                <td align="right">Designed for use with
-                    <a href='http://www.junit.org'>JUnit</a>
-                    and
-                    <a href='http://jakarta.apache.org/ant'>Ant</a>
-                    .
-                </td>
-            </tr>
-        </table>
-        <hr size="1"/>
-    </xsl:template>
-
-    <xsl:template match="testsuite" mode="header">
-        <tr valign="top">
-            <th width="80%">Name</th>
-            <th>Tests</th>
-            <th>Errors</th>
-            <th>Failures</th>
-            <th nowrap="nowrap">Time(s)</th>
-        </tr>
-    </xsl:template>
-
-    <!-- class header -->
-    <xsl:template name="testsuite.test.header">
-        <tr valign="top">
-            <th width="80%">Name</th>
-            <th>Tests</th>
-            <th>Errors</th>
-            <th>Failures</th>
-            <th nowrap="nowrap">Time(s)</th>
-        </tr>
-    </xsl:template>
-
-    <!-- method header -->
-    <xsl:template name="testcase.test.header">
-        <tr valign="top">
-            <th>Name</th>
-            <th>Status</th>
-            <th width="80%">Type</th>
-            <th nowrap="nowrap">Time(s)</th>
-        </tr>
-    </xsl:template>
-
-
-    <!-- System.out.println output header -->
-    <xsl:template name="testcase.test.out">
-        <tr valign="top">
-            <th>System.out</th>
-        </tr>
-    </xsl:template>
-
-    <!-- System.err.println output header -->
-    <xsl:template name="testcase.test.err">
-        <tr valign="top">
-            <th>System.err</th>
-        </tr>
-    </xsl:template>
-
-    <!-- class information -->
-    <xsl:template match="testsuite" mode="print.test">
-        <tr valign="top">
-            <!-- set a nice color depending if there is an error/failure -->
-            <xsl:attribute name="class">
-                <xsl:choose>
-                    <xsl:when test="@failures[.> 0]">Failure</xsl:when>
-                    <xsl:when test="@errors[.> 0]">Error</xsl:when>
-                </xsl:choose>
-            </xsl:attribute>
-
-            <!-- print testsuite information -->
-            <td>
-                <a href="#{@name}">
-                    <xsl:value-of select="@name"/>
-                </a>
-            </td>
-            <td>
-                <xsl:value-of select="@tests"/>
-            </td>
-            <td>
-                <xsl:value-of select="@errors"/>
-            </td>
-            <td>
-                <xsl:value-of select="@failures"/>
-            </td>
-            <td>
-                <xsl:call-template name="display-time">
-                    <xsl:with-param name="value" select="@time"/>
-                </xsl:call-template>
-            </td>
-        </tr>
-    </xsl:template>
-
-    <xsl:template match="testcase" mode="print.test">
-        <tr valign="top">
-            <xsl:attribute name="class">
-                <xsl:choose>
-                    <xsl:when test="failure | error">Error</xsl:when>
-                </xsl:choose>
-            </xsl:attribute>
-            <td>
-                <xsl:value-of select="@classname"/>
-                .
-                <xsl:value-of select="@name"/>
-            </td>
-            <xsl:choose>
-                <xsl:when test="failure">
-                    <td>Failure</td>
-                    <td>
-                        <xsl:apply-templates select="failure"/>
-                    </td>
-                </xsl:when>
-                <xsl:when test="error">
-                    <td>Error</td>
-                    <td>
-                        <xsl:apply-templates select="error"/>
-                    </td>
-                </xsl:when>
-                <xsl:otherwise>
-                    <td>Success</td>
-                    <td></td>
-                </xsl:otherwise>
-            </xsl:choose>
-            <td>
-                <xsl:call-template name="display-time">
-                    <xsl:with-param name="value" select="@time"/>
-                </xsl:call-template>
-            </td>
-        </tr>
-    </xsl:template>
-
-
-    <xsl:template match="failure">
-        <xsl:call-template name="display-failures"/>
-    </xsl:template>
-
-    <xsl:template match="error">
-        <xsl:call-template name="display-failures"/>
-    </xsl:template>
-
-    <!-- Style for the error and failure in the tescase template -->
-    <xsl:template name="display-failures">
-        <xsl:choose>
-            <xsl:when test="not(@message)">N/A</xsl:when>
-            <xsl:otherwise>
-                <xsl:value-of select="@message"/>
-            </xsl:otherwise>
-        </xsl:choose>
-        <!-- display the stacktrace -->
-        <code>
-            <br/>
-            <br/>
-            <xsl:call-template name="br-replace">
-                <xsl:with-param name="word" select="."/>
-            </xsl:call-template>
-        </code>
-        <!-- the later is better but might be problematic for non-21" monitors... -->
-        <!--pre><xsl:value-of select="."/></pre-->
-    </xsl:template>
-
-    <xsl:template name="JS-escape">
-        <xsl:param name="string"/>
-        <xsl:param name="tmp1" select="stringutils:replace(string($string),'\','\\')"/>
-        <xsl:param name="tmp2" select="stringutils:replace(string($tmp1),"'","\'")"/>
-        <xsl:value-of select="$tmp2"/>
-    </xsl:template>
-
-
-    <!--
-        template that will convert a carriage return into a br tag
-        @param word the text from which to convert CR to BR tag
-    -->
-    <xsl:template name="br-replace">
-        <xsl:param name="word"/>
-        <xsl:param name="br">
-            <br/>
-            <br/>
-        </xsl:param>
-        <xsl:value-of select='stringutils:replace(string($word),"&#xA;",$br)'/>
-    </xsl:template>
-
-    <xsl:template name="display-time">
-        <xsl:param name="value"/>
-        <xsl:value-of select="format-number($value,'0.000')"/>
-    </xsl:template>
-
-    <xsl:template name="display-percent">
-        <xsl:param name="value"/>
-        <xsl:value-of select="format-number($value,'0.00%')"/>
-    </xsl:template>
-
-</xsl:stylesheet>
-
-
diff --git a/etc/test/testng.css b/etc/test/testng.css
deleted file mode 100644
index 617bde1..0000000
--- a/etc/test/testng.css
+++ /dev/null
@@ -1,26 +0,0 @@
-nvocation-failed,  .test-failed  { background-color: #ffc0cb; }
-.invocation-percent, .test-percent { background-color: #66cdaa; }
-.invocation-passed,  .test-passed  { background-color: #98fb98; }
-.invocation-skipped, .test-skipped { background-color: #f0e68c; }
-
-.main-page {
-  font-size: small;
-  font-family: verdana, 'trebuchet ms', sans-serif;
-}
-
-
-table {
-  font-size: small;
-  font-family: verdana, 'trebuchet ms', sans-serif;
-}
-
-h1 , h2 , h3 , h4 , h5 , h6  {
-	color: #999;
-	font-family : Georgia, "Times New Roman", Times, serif;
-	font-weight : normal;
-	font-variant : small-caps;
-	padding: 0px;
-	margin-bottom:0.2px;
-	margin-top:1px;
-}
-
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..ca78035
Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..aad2b24
--- /dev/null
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Fri May 13 14:00:35 EDT 2016
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-2.13-bin.zip
diff --git a/gradlew b/gradlew
new file mode 100755
index 0000000..27309d9
--- /dev/null
+++ b/gradlew
@@ -0,0 +1,164 @@
+#!/usr/bin/env bash
+
+##############################################################################
+##
+##  Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+        PRG="$link"
+    else
+        PRG=`dirname "$PRG"`"/$link"
+    fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn ( ) {
+    echo "$*"
+}
+
+die ( ) {
+    echo
+    echo "$*"
+    echo
+    exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+  CYGWIN* )
+    cygwin=true
+    ;;
+  Darwin* )
+    darwin=true
+    ;;
+  MINGW* )
+    msys=true
+    ;;
+  NONSTOP* )
+    nonstop=true
+    ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+        # IBM's JDK on AIX uses strange locations for the executables
+        JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+        JAVACMD="$JAVA_HOME/bin/java"
+    fi
+    if [ ! -x "$JAVACMD" ] ; then
+        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+    fi
+else
+    JAVACMD="java"
+    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+    MAX_FD_LIMIT=`ulimit -H -n`
+    if [ $? -eq 0 ] ; then
+        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+            MAX_FD="$MAX_FD_LIMIT"
+        fi
+        ulimit -n $MAX_FD
+        if [ $? -ne 0 ] ; then
+            warn "Could not set maximum file descriptor limit: $MAX_FD"
+        fi
+    else
+        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+    fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+    JAVACMD=`cygpath --unix "$JAVACMD"`
+
+    # We build the pattern for arguments to be converted via cygpath
+    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+    SEP=""
+    for dir in $ROOTDIRSRAW ; do
+        ROOTDIRS="$ROOTDIRS$SEP$dir"
+        SEP="|"
+    done
+    OURCYGPATTERN="(^($ROOTDIRS))"
+    # Add a user-defined pattern to the cygpath arguments
+    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+    fi
+    # Now convert the arguments - kludge to limit ourselves to /bin/sh
+    i=0
+    for arg in "$@" ; do
+        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option
+
+        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
+            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+        else
+            eval `echo args$i`="\"$arg\""
+        fi
+        i=$((i+1))
+    done
+    case $i in
+        (0) set -- ;;
+        (1) set -- "$args0" ;;
+        (2) set -- "$args0" "$args1" ;;
+        (3) set -- "$args0" "$args1" "$args2" ;;
+        (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+        (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+        (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+        (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+        (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+        (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+    esac
+fi
+
+# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
+function splitJvmOpts() {
+    JVM_OPTS=("$@")
+}
+eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
+JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
+
+exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
diff --git a/htsjdk.iml b/htsjdk.iml
deleted file mode 100644
index 3c722e4..0000000
--- a/htsjdk.iml
+++ /dev/null
@@ -1,66 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module relativePaths="true" type="JAVA_MODULE" version="4">
-  <component name="FacetManager">
-    <facet type="Python" name="Python">
-      <configuration sdkName="" />
-    </facet>
-  </component>
-  <component name="NewModuleRootManager" inherit-compiler-output="false">
-    <output url="file://$MODULE_DIR$/intellij.classes" />
-    <output-test url="file://$MODULE_DIR$/intellij.testclasses" />
-    <exclude-output />
-    <content url="file://$MODULE_DIR$">
-      <sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
-      <sourceFolder url="file://$MODULE_DIR$/src/tests/java" isTestSource="true" />
-      <excludeFolder url="file://$MODULE_DIR$/classes" />
-    </content>
-    <orderEntry type="jdk" jdkName="1.6" jdkType="JavaSDK" />
-    <orderEntry type="sourceFolder" forTests="false" />
-    <orderEntry type="module-library">
-      <library>
-        <CLASSES>
-          <root url="jar://$MODULE_DIR$/lib/testng/testng-5.5-jdk15.jar!/" />
-        </CLASSES>
-        <JAVADOC />
-        <SOURCES />
-      </library>
-    </orderEntry>
-    <orderEntry type="module-library">
-      <library>
-        <CLASSES>
-          <root url="jar://$MODULE_DIR$/lib/snappy-java-1.0.3-rc3.jar!/" />
-        </CLASSES>
-        <JAVADOC />
-        <SOURCES />
-      </library>
-    </orderEntry>
-    <orderEntry type="module-library">
-      <library>
-        <CLASSES>
-          <root url="jar://$MODULE_DIR$/lib/commons-jexl-2.1.1.jar!/" />
-        </CLASSES>
-        <JAVADOC />
-        <SOURCES />
-      </library>
-    </orderEntry>
-    <orderEntry type="module-library">
-      <library>
-        <CLASSES>
-          <root url="jar://$MODULE_DIR$/lib/commons-logging-1.1.1.jar!/" />
-        </CLASSES>
-        <JAVADOC />
-        <SOURCES />
-      </library>
-    </orderEntry>
-    <orderEntry type="module-library">
-          <library>
-            <CLASSES>
-              <root url="jar://$MODULE_DIR$/lib/ngs-java-1.2.2.jar!/" />
-            </CLASSES>
-            <JAVADOC />
-            <SOURCES />
-          </library>
-        </orderEntry>
-  </component>
-</module>
-
diff --git a/htsjdk.ipr b/htsjdk.ipr
deleted file mode 100644
index 9ff5fb2..0000000
--- a/htsjdk.ipr
+++ /dev/null
@@ -1,352 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="AntConfiguration">
-    <buildFile url="file://$PROJECT_DIR$/build.xml">
-      <additionalClassPath>
-        <entry dir="file://$PROJECT_DIR$/lib/ant" />
-      </additionalClassPath>
-      <maximumStackSize value="32" />
-      <executeOn event="compositeTask" target="[clean,test]" presentableName="[clean,test]" />
-    </buildFile>
-  </component>
-  <component name="BuildJarProjectSettings">
-    <option name="BUILD_JARS_ON_MAKE" value="false" />
-  </component>
-  <component name="CompilerConfiguration">
-    <option name="DEFAULT_COMPILER" value="Javac" />
-    <resourceExtensions>
-      <entry name=".+\.(properties|xml|html|dtd|tld)" />
-      <entry name=".+\.(gif|png|jpeg|jpg)" />
-    </resourceExtensions>
-    <wildcardResourcePatterns>
-      <entry name="?*.properties" />
-      <entry name="?*.xml" />
-      <entry name="?*.gif" />
-      <entry name="?*.png" />
-      <entry name="?*.jpeg" />
-      <entry name="?*.jpg" />
-      <entry name="?*.html" />
-      <entry name="?*.dtd" />
-      <entry name="?*.tld" />
-      <entry name="?*.ftl" />
-    </wildcardResourcePatterns>
-    <annotationProcessing>
-      <profile default="true" name="Default" enabled="false">
-        <processorPath useClasspath="true" />
-      </profile>
-    </annotationProcessing>
-  </component>
-  <component name="CopyrightManager" default="">
-    <module2copyright />
-  </component>
-  <component name="DependencyValidationManager">
-    <option name="SKIP_IMPORT_STATEMENTS" value="false" />
-  </component>
-  <component name="EclipseCompilerSettings">
-    <option name="GENERATE_NO_WARNINGS" value="true" />
-    <option name="DEPRECATION" value="false" />
-  </component>
-  <component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
-  <component name="EntryPointsManager">
-    <entry_points version="2.0" />
-  </component>
-  <component name="InspectionProjectProfileManager">
-    <profiles>
-      <profile version="1.0" is_locked="false">
-        <option name="myName" value="Project Default" />
-        <option name="myLocal" value="false" />
-        <inspection_tool class="Convert2Diamond" enabled="false" level="WARNING" enabled_by_default="false" />
-        <inspection_tool class="FieldMayBeFinal" enabled="true" level="WARNING" enabled_by_default="true" />
-        <inspection_tool class="JavaDoc" enabled="false" level="WARNING" enabled_by_default="false">
-          <option name="TOP_LEVEL_CLASS_OPTIONS">
-            <value>
-              <option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
-              <option name="REQUIRED_TAGS" value="" />
-            </value>
-          </option>
-          <option name="INNER_CLASS_OPTIONS">
-            <value>
-              <option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
-              <option name="REQUIRED_TAGS" value="" />
-            </value>
-          </option>
-          <option name="METHOD_OPTIONS">
-            <value>
-              <option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
-              <option name="REQUIRED_TAGS" value="@return at param@throws or @exception" />
-            </value>
-          </option>
-          <option name="FIELD_OPTIONS">
-            <value>
-              <option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
-              <option name="REQUIRED_TAGS" value="" />
-            </value>
-          </option>
-          <option name="IGNORE_DEPRECATED" value="false" />
-          <option name="IGNORE_JAVADOC_PERIOD" value="true" />
-          <option name="IGNORE_DUPLICATED_THROWS" value="false" />
-          <option name="IGNORE_POINT_TO_ITSELF" value="false" />
-          <option name="myAdditionalJavadocTags" value="" />
-        </inspection_tool>
-        <inspection_tool class="LocalCanBeFinal" enabled="true" level="WARNING" enabled_by_default="true">
-          <option name="REPORT_VARIABLES" value="true" />
-          <option name="REPORT_PARAMETERS" value="true" />
-          <option name="REPORT_CATCH_PARAMETERS" value="true" />
-          <option name="REPORT_FOREACH_PARAMETERS" value="true" />
-        </inspection_tool>
-        <inspection_tool class="SqlNoDataSourceInspection" enabled="false" level="WARNING" enabled_by_default="false" />
-        <inspection_tool class="UnusedDeclaration" enabled="false" level="WARNING" enabled_by_default="false">
-          <option name="ADD_MAINS_TO_ENTRIES" value="true" />
-          <option name="ADD_APPLET_TO_ENTRIES" value="true" />
-          <option name="ADD_SERVLET_TO_ENTRIES" value="true" />
-          <option name="ADD_NONJAVA_TO_ENTRIES" value="true" />
-        </inspection_tool>
-        <inspection_tool class="groupsTestNG" enabled="true" level="WARNING" enabled_by_default="true">
-          <option name="groups">
-            <value>
-              <list size="1">
-                <item index="0" class="java.lang.String" itemvalue="unix" />
-              </list>
-            </value>
-          </option>
-        </inspection_tool>
-      </profile>
-    </profiles>
-    <option name="PROJECT_PROFILE" value="Project Default" />
-    <option name="USE_PROJECT_PROFILE" value="true" />
-    <version value="1.0" />
-    <list size="6">
-      <item index="0" class="java.lang.String" itemvalue="SERVER PROBLEM" />
-      <item index="1" class="java.lang.String" itemvalue="WEAK WARNING" />
-      <item index="2" class="java.lang.String" itemvalue="INFO" />
-      <item index="3" class="java.lang.String" itemvalue="TYPO" />
-      <item index="4" class="java.lang.String" itemvalue="WARNING" />
-      <item index="5" class="java.lang.String" itemvalue="ERROR" />
-    </list>
-  </component>
-  <component name="JavadocGenerationManager">
-    <option name="OUTPUT_DIRECTORY" />
-    <option name="OPTION_SCOPE" value="protected" />
-    <option name="OPTION_HIERARCHY" value="true" />
-    <option name="OPTION_NAVIGATOR" value="true" />
-    <option name="OPTION_INDEX" value="true" />
-    <option name="OPTION_SEPARATE_INDEX" value="true" />
-    <option name="OPTION_DOCUMENT_TAG_USE" value="false" />
-    <option name="OPTION_DOCUMENT_TAG_AUTHOR" value="false" />
-    <option name="OPTION_DOCUMENT_TAG_VERSION" value="false" />
-    <option name="OPTION_DOCUMENT_TAG_DEPRECATED" value="true" />
-    <option name="OPTION_DEPRECATED_LIST" value="true" />
-    <option name="OTHER_OPTIONS" value="" />
-    <option name="HEAP_SIZE" />
-    <option name="LOCALE" />
-    <option name="OPEN_IN_BROWSER" value="true" />
-  </component>
-  <component name="Palette2">
-    <group name="Swing">
-      <item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
-        <default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
-      </item>
-      <item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
-        <default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
-      </item>
-      <item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
-        <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
-      </item>
-      <item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
-        <default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
-      </item>
-      <item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
-        <initial-values>
-          <property name="text" value="Button" />
-        </initial-values>
-      </item>
-      <item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
-        <initial-values>
-          <property name="text" value="RadioButton" />
-        </initial-values>
-      </item>
-      <item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
-        <initial-values>
-          <property name="text" value="CheckBox" />
-        </initial-values>
-      </item>
-      <item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
-        <default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
-        <initial-values>
-          <property name="text" value="Label" />
-        </initial-values>
-      </item>
-      <item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
-        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
-          <preferred-size width="150" height="-1" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
-        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
-          <preferred-size width="150" height="-1" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
-        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
-          <preferred-size width="150" height="-1" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
-        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
-          <preferred-size width="150" height="50" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
-        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
-          <preferred-size width="150" height="50" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
-        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
-          <preferred-size width="150" height="50" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
-        <default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
-      </item>
-      <item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
-          <preferred-size width="150" height="50" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
-          <preferred-size width="150" height="50" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
-          <preferred-size width="150" height="50" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
-          <preferred-size width="200" height="200" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
-        <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
-          <preferred-size width="200" height="200" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
-        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
-      </item>
-      <item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
-      </item>
-      <item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
-        <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
-      </item>
-      <item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
-      </item>
-      <item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
-        <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
-          <preferred-size width="-1" height="20" />
-        </default-constraints>
-      </item>
-      <item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
-        <default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
-      </item>
-      <item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
-        <default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
-      </item>
-    </group>
-  </component>
-  <component name="ProjectCodeStyleSettingsManager">
-    <option name="PER_PROJECT_SETTINGS">
-      <value>
-        <option name="GENERATE_FINAL_LOCALS" value="true" />
-        <option name="GENERATE_FINAL_PARAMETERS" value="true" />
-        <option name="USE_FQ_CLASS_NAMES_IN_JAVADOC" value="false" />
-        <option name="CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999" />
-        <option name="RIGHT_MARGIN" value="140" />
-        <option name="JD_DO_NOT_WRAP_ONE_LINE_COMMENTS" value="true" />
-        <XML>
-          <option name="XML_LEGACY_SETTINGS_IMPORTED" value="true" />
-        </XML>
-        <codeStyleSettings language="JAVA">
-          <option name="KEEP_SIMPLE_METHODS_IN_ONE_LINE" value="true" />
-        </codeStyleSettings>
-      </value>
-    </option>
-    <option name="USE_PER_PROJECT_SETTINGS" value="true" />
-  </component>
-  <component name="ProjectDetails">
-    <option name="projectName" value="Picard-public" />
-  </component>
-  <component name="ProjectDictionaryState">
-    <dictionary name="jrose">
-      <words>
-        <w>ribosomal</w>
-      </words>
-    </dictionary>
-    <dictionary name="mccowan">
-      <words>
-        <w>bgzipped</w>
-        <w>codecs</w>
-        <w>demultiplex</w>
-        <w>demultiplexed</w>
-        <w>eamss</w>
-        <w>endian</w>
-        <w>gzipped</w>
-        <w>illumina's</w>
-        <w>indexable</w>
-        <w>inferer</w>
-        <w>inferrer</w>
-        <w>parsability</w>
-        <w>phread</w>
-        <w>seekable</w>
-        <w>tabix</w>
-        <w>tokenizes</w>
-        <w>tribble</w>
-      </words>
-    </dictionary>
-  </component>
-  <component name="ProjectModuleManager">
-    <modules>
-      <module fileurl="file://$PROJECT_DIR$/htsjdk.iml" filepath="$PROJECT_DIR$/htsjdk.iml" />
-    </modules>
-  </component>
-  <component name="ProjectResources">
-    <default-html-doctype>http://www.w3.org/1999/xhtml</default-html-doctype>
-  </component>
-  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
-    <output url="file://$PROJECT_DIR$/out" />
-  </component>
-  <component name="SvnBranchConfigurationManager">
-    <option name="myConfigurationMap">
-      <map>
-        <entry key="$PROJECT_DIR$">
-          <value>
-            <SvnBranchConfiguration>
-              <option name="branchUrls">
-                <list>
-                  <option value="https://picard.svn.sourceforge.net/svnroot/picard/branches" />
-                  <option value="https://picard.svn.sourceforge.net/svnroot/picard/tags" />
-                </list>
-              </option>
-              <option name="trunkUrl" value="https://picard.svn.sourceforge.net/svnroot/picard/trunk" />
-            </SvnBranchConfiguration>
-          </value>
-        </entry>
-      </map>
-    </option>
-    <option name="mySupportsUserInfoFilter" value="true" />
-  </component>
-  <component name="VcsDirectoryMappings">
-    <mapping directory="" vcs="" />
-    <mapping directory="$PROJECT_DIR$" vcs="Git" />
-  </component>
-  <component name="WebServicesPlugin" addRequiredLibraries="true" />
-</project>
-
diff --git a/project/plugins.sbt b/project/plugins.sbt
deleted file mode 100644
index 9c2b114..0000000
--- a/project/plugins.sbt
+++ /dev/null
@@ -1,5 +0,0 @@
-addSbtPlugin("de.johoop" % "sbt-testng-plugin" % "3.0.2")
-
-addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.13.0")
-
-addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.7.1")
\ No newline at end of file
diff --git a/src/scripts/explain_sam_flags.py b/scripts/explain_sam_flags.py
similarity index 100%
rename from src/scripts/explain_sam_flags.py
rename to scripts/explain_sam_flags.py
diff --git a/src/scripts/release_picard.sh b/scripts/release_picard.sh
similarity index 100%
rename from src/scripts/release_picard.sh
rename to scripts/release_picard.sh
diff --git a/src/c/inteldeflater/IntelDeflater.c b/src/c/inteldeflater/IntelDeflater.c
deleted file mode 100644
index 65f5d1d..0000000
--- a/src/c/inteldeflater/IntelDeflater.c
+++ /dev/null
@@ -1,392 +0,0 @@
-/*
- * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.  Oracle designates this
- * particular file as subject to the "Classpath" exception as provided
- * by Oracle in the LICENSE file that accompanied this code.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-
-/*
- * Native method support for htsjdk.samtools.util.zip.IntelDeflater.
- * This is copied from OpenJDK native support for java.util.zip.Deflater, and modified to support igzip.
- */
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <immintrin.h> 
-#include <emmintrin.h>
-#include <stdbool.h>
-#include <assert.h>
-#include "jlong.h"
-#include "jni.h"
-//#include "jni_util.h"
-
-#include "zlib.h"
-#include "htsjdk_samtools_util_zip_IntelDeflater.h"
-#include "igzip_lib.h"
-#define DEF_MEM_LEVEL 8
-#define FAST_COMPRESSION 1
-#define IGZIP_TRUE 1
-
-static jfieldID levelID;
-static jfieldID strategyID;
-static jfieldID setParamsID;
-static jfieldID finishID;
-static jfieldID finishedID;
-static jfieldID bufID, offID, lenID;
-
-typedef struct {
-    z_stream zStream;
-    LZ_Stream2 lz2Stream;
-    int useIGZIP;
-} Stream;
-
-
-bool is_cpuid_ecx_bit_set(int eax, int bitidx) 
-{ 
-  int ecx = 0, edx = 0, ebx = 0; 
-  __asm__ ("cpuid" 
-	   :"=b" (ebx), 
-	    "=c" (ecx), 
-	    "=d" (edx) 
-	   :"a" (eax) 
-	   ); 
-  return (((ecx >> bitidx)&1) == 1); 
-}
-
-bool is_sse42_supported() 
-{ 
-#ifdef __INTEL_COMPILER 
-  return  (_may_i_use_cpu_feature(_FEATURE_SSE4_2) > 0); 
-#else 
-  //  return  __builtin_cpu_supports("sse4.2"); 
-  return is_cpuid_ecx_bit_set(1, 20); 
-#endif 
-}
-//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-/**
- * Throw a Java exception by name. Similar to SignalError.
- */
-JNIEXPORT void JNICALL
-JNU_ThrowByName(JNIEnv *env, const char *name, const char *msg)
-{
-    jclass cls = (*env)->FindClass(env, name);
-
-    if (cls != 0) /* Otherwise an exception has already been thrown */
-        (*env)->ThrowNew(env, cls, msg);
-}
-
-/* JNU_Throw common exceptions */
-
-JNIEXPORT void JNICALL
-JNU_ThrowNullPointerException(JNIEnv *env, const char *msg)
-{
-    JNU_ThrowByName(env, "java/lang/NullPointerException", msg);
-}
-
-
-JNIEXPORT void JNICALL
-JNU_ThrowOutOfMemoryError(JNIEnv *env, const char *msg)
-{
-    JNU_ThrowByName(env, "java/lang/OutOfMemoryError", msg);
-}
-
-JNIEXPORT void JNICALL
-JNU_ThrowIllegalArgumentException(JNIEnv *env, const char *msg)
-{
-    JNU_ThrowByName(env, "java/lang/IllegalArgumentException", msg);
-}
-
-JNIEXPORT void JNICALL
-JNU_ThrowInternalError(JNIEnv *env, const char *msg)
-{
-    JNU_ThrowByName(env, "java/lang/InternalError", msg);
-}
-/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-JNIEXPORT void JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_initIDs(JNIEnv *env, jclass cls)
-{
-    levelID = (*env)->GetFieldID(env, cls, "level", "I");
-    strategyID = (*env)->GetFieldID(env, cls, "strategy", "I");
-    setParamsID = (*env)->GetFieldID(env, cls, "setParams", "Z");
-    finishID = (*env)->GetFieldID(env, cls, "finish", "Z");
-    finishedID = (*env)->GetFieldID(env, cls, "finished", "Z");
-    bufID = (*env)->GetFieldID(env, cls, "buf", "[B");
-    offID = (*env)->GetFieldID(env, cls, "off", "I");
-    lenID = (*env)->GetFieldID(env, cls, "len", "I");
- 
-}
-
-JNIEXPORT jlong JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_init(JNIEnv *env, jclass cls, jint level,
-                                 jint strategy, jboolean nowrap)
-{
-    Stream *strm = calloc(1, sizeof(Stream));
-    if (level == FAST_COMPRESSION && is_sse42_supported()) { //Use igzip
-	printf("Using igzip\n");
-	strm->useIGZIP = IGZIP_TRUE;
-	if (strm == 0) {
-	    JNU_ThrowOutOfMemoryError(env, 0);
-	    return jlong_zero;
-	} else {
-	    init_stream(&strm->lz2Stream); //CHECK RETURN VALUE
-	    return ptr_to_jlong(strm);
-	}
-      
-    } else {
-
-	if (strm == 0) {
-	    JNU_ThrowOutOfMemoryError(env, 0);
-	    return jlong_zero;
-	} else {
-	    char *msg;
-	    switch (deflateInit2(&strm->zStream, level, Z_DEFLATED,
-				 nowrap ? -MAX_WBITS : MAX_WBITS,
-				 DEF_MEM_LEVEL, strategy)) {
-	    case Z_OK:
-		return ptr_to_jlong(&strm->zStream);
-	    case Z_MEM_ERROR:
-		free(strm);
-		JNU_ThrowOutOfMemoryError(env, 0);
-		return jlong_zero;
-	    case Z_STREAM_ERROR:
-		free(strm);
-		JNU_ThrowIllegalArgumentException(env, 0);
-		return jlong_zero;
-	    default:
-		msg = strm->zStream.msg;
-		free(strm);
-		JNU_ThrowInternalError(env, msg);
-		return jlong_zero;
-	    }
-	}
-    }
-}
-
-JNIEXPORT void JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_setDictionary(JNIEnv *env, jclass cls, jlong addr,
-                                          jarray b, jint off, jint len)
-{
-    Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
-    int res;
-    if (buf == 0) {/* out of memory */
-        return;
-    }
-    res = deflateSetDictionary(&((Stream *)jlong_to_ptr(addr))->zStream, buf + off, len);
-    (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
-    switch (res) {
-    case Z_OK:
-        break;
-    case Z_STREAM_ERROR:
-        JNU_ThrowIllegalArgumentException(env, 0);
-        break;
-    default:
-        JNU_ThrowInternalError(env, ((Stream *)jlong_to_ptr(addr))->zStream.msg);
-        break;
-    }
-}
-
-JNIEXPORT jint JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_deflateBytes(JNIEnv *env, jobject this, jlong addr,
-                                         jarray b, jint off, jint len, jint flush)
-{
-    jarray this_buf = (*env)->GetObjectField(env, this, bufID);
-    jint this_off = (*env)->GetIntField(env, this, offID);
-    jint this_len = (*env)->GetIntField(env, this, lenID);
-    jbyte *in_buf;
-    jbyte *out_buf;
-    Stream *strm = jlong_to_ptr(addr);
-
-    //igzip only supports one compression level so setParamsID should not be set when using igzip 
-    //igzip does not support flush
-    if (((Stream *)jlong_to_ptr(addr))->useIGZIP && (((*env)->GetBooleanField(env, this, setParamsID) && strm->lz2Stream.total_in != 0)  || flush == 1)) {
-	JNU_ThrowInternalError(env, "igzip doesn't support this");
-    } else if (((Stream *)jlong_to_ptr(addr))->useIGZIP) {
-	in_buf = (*env)->GetPrimitiveArrayCritical(env, this_buf, 0);
-	if (in_buf == NULL) {
-	    // Throw OOME only when length is not zero
-	    if (this_len != 0) {
-		JNU_ThrowOutOfMemoryError(env, 0);
-	    }
-	    return 0;
-	}
-	out_buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
-	if (out_buf == NULL) {
-	    (*env)->ReleasePrimitiveArrayCritical(env, this_buf, in_buf, 0);
-	    if (len != 0) {
-		JNU_ThrowOutOfMemoryError(env, 0);
-	    }
-	    return 0;
-	}
-	strm->lz2Stream.next_in = (Bytef *) (in_buf + this_off);
-	strm->lz2Stream.next_out = (Bytef *) (out_buf + off);
-	strm->lz2Stream.avail_in = this_len;
-	strm->lz2Stream.avail_out = len;
-	assert(strm->lz2Stream.avail_in != 0);
-	assert(strm->lz2Stream.avail_out != 0);
-	jboolean finish = (*env)->GetBooleanField(env, this, finishID);
-	if (finish) {
-	    strm->lz2Stream.end_of_stream = 1;
-	} else {
-	    strm->lz2Stream.end_of_stream = 0;
-	}
-	fast_lz(&strm->lz2Stream);
-
-	(*env)->ReleasePrimitiveArrayCritical(env, b, out_buf, 0);
-	(*env)->ReleasePrimitiveArrayCritical(env, this_buf, in_buf, 0);
-	if (finish) {
-	    (*env)->SetBooleanField(env, this, finishedID, JNI_TRUE);
-	}
-	this_off += this_len - strm->lz2Stream.avail_in;
-	(*env)->SetIntField(env, this, offID, this_off);
-	(*env)->SetIntField(env, this, lenID, strm->lz2Stream.avail_in);
-	return len - strm->lz2Stream.avail_out;
-    } else {
-
-	int res;
-	if ((*env)->GetBooleanField(env, this, setParamsID)) {
-	    int level = (*env)->GetIntField(env, this, levelID);
-	    int strategy = (*env)->GetIntField(env, this, strategyID);
-
-	    in_buf = (*env)->GetPrimitiveArrayCritical(env, this_buf, 0);
-	    if (in_buf == NULL) {
-		// Throw OOME only when length is not zero
-		if (this_len != 0)
-		    JNU_ThrowOutOfMemoryError(env, 0);
-		return 0;
-	    }
-	    out_buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
-	    if (out_buf == NULL) {
-		(*env)->ReleasePrimitiveArrayCritical(env, this_buf, in_buf, 0);
-		if (len != 0)
-		    JNU_ThrowOutOfMemoryError(env, 0);
-		return 0;
-	    }
-
-	    strm->zStream.next_in = (Bytef *) (in_buf + this_off);
-	    strm->zStream.next_out = (Bytef *) (out_buf + off);
-	    strm->zStream.avail_in = this_len;
-	    strm->zStream.avail_out = len;
-	    res = deflateParams(&strm->zStream, level, strategy);
-	    (*env)->ReleasePrimitiveArrayCritical(env, b, out_buf, 0);
-	    (*env)->ReleasePrimitiveArrayCritical(env, this_buf, in_buf, 0);
-
-	    switch (res) {
-	    case Z_OK:
-		(*env)->SetBooleanField(env, this, setParamsID, JNI_FALSE);
-		this_off += this_len - strm->zStream.avail_in;
-		(*env)->SetIntField(env, this, offID, this_off);
-		(*env)->SetIntField(env, this, lenID, strm->zStream.avail_in);
-		return len - strm->zStream.avail_out;
-	    case Z_BUF_ERROR:
-		(*env)->SetBooleanField(env, this, setParamsID, JNI_FALSE);
-		return 0;
-	    default:
-		JNU_ThrowInternalError(env, strm->zStream.msg);
-		return 0;
-	    }
-	} else {
-	    jboolean finish = (*env)->GetBooleanField(env, this, finishID);
-	    in_buf = (*env)->GetPrimitiveArrayCritical(env, this_buf, 0);
-	    if (in_buf == NULL) {
-		if (this_len != 0)
-		    JNU_ThrowOutOfMemoryError(env, 0);
-		return 0;
-	    }
-	    out_buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
-	    if (out_buf == NULL) {
-		(*env)->ReleasePrimitiveArrayCritical(env, this_buf, in_buf, 0);
-		if (len != 0)
-		    JNU_ThrowOutOfMemoryError(env, 0);
-
-		return 0;
-	    }
-
-	    strm->zStream.next_in = (Bytef *) (in_buf + this_off);
-	    strm->zStream.next_out = (Bytef *) (out_buf + off);
-	    strm->zStream.avail_in = this_len;
-	    strm->zStream.avail_out = len;
-	    res = deflate(&strm->zStream, finish ? Z_FINISH : flush);
-	    (*env)->ReleasePrimitiveArrayCritical(env, b, out_buf, 0);
-	    (*env)->ReleasePrimitiveArrayCritical(env, this_buf, in_buf, 0);
-
-	    switch (res) {
-	    case Z_STREAM_END:
-		(*env)->SetBooleanField(env, this, finishedID, JNI_TRUE);
-		/* fall through */
-	    case Z_OK:
-		this_off += this_len - strm->zStream.avail_in;
-		(*env)->SetIntField(env, this, offID, this_off);
-		(*env)->SetIntField(env, this, lenID, strm->zStream.avail_in);
-		return len - strm->zStream.avail_out;
-	    case Z_BUF_ERROR:
-		return 0;
-            default:
-		JNU_ThrowInternalError(env, strm->zStream.msg);
-		return 0;
-	    }
-	}
-    }
-}
-
-JNIEXPORT jint JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_getAdler(JNIEnv *env, jclass cls, jlong addr)
-{
-    if (((Stream *)jlong_to_ptr(addr))->useIGZIP)
-	JNU_ThrowInternalError(env, "igzip doesn't support getAdler function");
-    else
-	return ((Stream *)jlong_to_ptr(addr))->zStream.adler;
-}
-
-JNIEXPORT jlong JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_getBytesRead(JNIEnv *env, jclass cls, jlong addr)
-{
-    return ( ((Stream *)jlong_to_ptr(addr))->useIGZIP ? ((Stream *) jlong_to_ptr(addr))->lz2Stream.total_in : ((Stream *)jlong_to_ptr(addr))->zStream.total_in);
-}
-
-JNIEXPORT jlong JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_getBytesWritten(JNIEnv *env, jclass cls, jlong addr)
-{
-    return ( ((Stream *)jlong_to_ptr(addr))->useIGZIP ? ((Stream *) jlong_to_ptr(addr))->lz2Stream.total_out : ((Stream *)jlong_to_ptr(addr))->zStream.total_out);
-}
-
-JNIEXPORT void JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_reset(JNIEnv *env, jclass cls, jlong addr)
-{
-    if  (((Stream *)jlong_to_ptr(addr))->useIGZIP)
-	init_stream(&(((Stream *)jlong_to_ptr(addr))->lz2Stream));
-    else {
-    if (deflateReset(&(((Stream *)jlong_to_ptr(addr))->zStream)) != Z_OK) {
-	    JNU_ThrowInternalError(env, 0);
-	}
-    }
-}
-
-JNIEXPORT void JNICALL
-Java_htsjdk_samtools_util_zip_IntelDeflater_end(JNIEnv *env, jclass cls, jlong addr)
-{
-    if (!((Stream *)jlong_to_ptr(addr))->useIGZIP) {
-	if (deflateEnd(&(((Stream *)jlong_to_ptr(addr))->zStream)) == Z_STREAM_ERROR) {
-	    JNU_ThrowInternalError(env, 0);
-	} 
-    }
-    free((Stream *)jlong_to_ptr(addr));
-}
diff --git a/src/c/inteldeflater/README b/src/c/inteldeflater/README
deleted file mode 100644
index 31170d4..0000000
--- a/src/c/inteldeflater/README
+++ /dev/null
@@ -1,7 +0,0 @@
-- IntelDeflater.c implements JNI for the IntelDeflater in htsjdk.samtools.util.zip.IntelDeflater
-- IntelDeflater uses Intel(R) Integrated  Performance Primitives (Intel(R) IPP) Samples and igzip to accelerate BAM compression.
-- Steps to build Intel Deflater using src/scripts/build_intel_deflater.sh:
-  - $OPENJDK should point to the OpenJDK directory
-  - $IPP8_INSTALL_DIR should point to the composer_xe_YEAR/ipp directory
-  - $IPP8_CODE_SAMPLES_DIR should point to ipp-samples.8.0.0.x directory
-  - $IGZIP_LIB should point to igzip_042/igzip directory, igzip should be built with ONLY_DEFLATE and GENOME_BAM defined. 
diff --git a/src/c/inteldeflater/igzip_lib.h b/src/c/inteldeflater/igzip_lib.h
deleted file mode 100644
index a27b334..0000000
--- a/src/c/inteldeflater/igzip_lib.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/**********************************************************************
-The MIT License
-
-Copyright (c) 2014 Intel Corporation
-
-	Permission is hereby granted, free of charge, to any person
-	obtaining a copy of this software and associated documentation
-	files (the "Software"), to deal in the Software without
-	restriction, including without limitation the rights to use,
-	copy, modify, merge, publish, distribute, sublicense, and/or
-	sell copies of the Software, and to permit persons to whom the
-	Software is furnished to do so, subject to the following
-	conditions:
-
-	The above copyright notice and this permission notice shall be
-	included in all copies or substantial portions of the
-	Software.
-
-	THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-	KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-	WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-	PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-	COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-	LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-	OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-	SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-**********************************************************************/
-#include "internal_state_size.h"
-#include "types.h"
-
-typedef struct {
-    UINT8 opaque[INTERNAL_STATE_SIZE];
-} LZ_State2;
-
-typedef struct {
-    UINT8 *next_in;   // Next input byte
-    UINT32 avail_in;  // number of bytes available at next_in
-    UINT32 total_in;  // total number of bytes read so far
-
-    UINT8 *next_out;  // Next output byte
-    UINT32 avail_out; // number of bytes available at next_out
-    UINT32 total_out; // total number of bytes written so far
-    UINT32 end_of_stream; // non-zero if this is the last input buffer
-
-    LZ_State2 internal_state;
-} LZ_Stream2;
-
-
-void init_stream(LZ_Stream2 *stream);
-void fast_lz(LZ_Stream2 *stream);
diff --git a/src/c/inteldeflater/internal_state_size.h b/src/c/inteldeflater/internal_state_size.h
deleted file mode 100644
index 1823a33..0000000
--- a/src/c/inteldeflater/internal_state_size.h
+++ /dev/null
@@ -1,32 +0,0 @@
-/**********************************************************************
-The MIT License
-
-Copyright (c) 2014 Intel Corporation
-
-	Permission is hereby granted, free of charge, to any person
-	obtaining a copy of this software and associated documentation
-	files (the "Software"), to deal in the Software without
-	restriction, including without limitation the rights to use,
-	copy, modify, merge, publish, distribute, sublicense, and/or
-	sell copies of the Software, and to permit persons to whom the
-	Software is furnished to do so, subject to the following
-	conditions:
-
-	The above copyright notice and this permission notice shall be
-	included in all copies or substantial portions of the
-	Software.
-
-	THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-	KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-	WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-	PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-	COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-	LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-	OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-	SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-**********************************************************************/
-//// for 8K
-#define INTERNAL_STATE_SIZE 82368+16
-
-// for 32K
-//#define INTERNAL_STATE_SIZE 328128+16
diff --git a/src/c/inteldeflater/types.h b/src/c/inteldeflater/types.h
deleted file mode 100644
index 83f7cc2..0000000
--- a/src/c/inteldeflater/types.h
+++ /dev/null
@@ -1,46 +0,0 @@
-/**********************************************************************
-The MIT License
-
-Copyright (c) 2014 Intel Corporation
-
-	Permission is hereby granted, free of charge, to any person
-	obtaining a copy of this software and associated documentation
-	files (the "Software"), to deal in the Software without
-	restriction, including without limitation the rights to use,
-	copy, modify, merge, publish, distribute, sublicense, and/or
-	sell copies of the Software, and to permit persons to whom the
-	Software is furnished to do so, subject to the following
-	conditions:
-
-	The above copyright notice and this permission notice shall be
-	included in all copies or substantial portions of the
-	Software.
-
-	THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-	KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-	WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-	PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-	COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-	LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-	OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-	SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-**********************************************************************/
-
-#ifndef __TYPES_H
-#define __TYPES_H
-
-#include <stdint.h> // For standard integer types
-
-typedef struct {
-    uint64_t low;
-    uint64_t high;
-} uint128_t;
-
-typedef int64_t   INT64;
-typedef uint64_t  UINT64;
-typedef uint32_t  UINT32;
-typedef uint16_t  UINT16;
-typedef uint8_t   UINT8;
-typedef uint128_t UINT128;
-
-#endif
diff --git a/src/java/htsjdk/samtools/BAMFileReader.java b/src/java/htsjdk/samtools/BAMFileReader.java
deleted file mode 100644
index 94673bf..0000000
--- a/src/java/htsjdk/samtools/BAMFileReader.java
+++ /dev/null
@@ -1,989 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.BinaryCodec;
-import htsjdk.samtools.util.BlockCompressedInputStream;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CoordMath;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.samtools.util.StringLineReader;
-
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.NoSuchElementException;
-
-/**
- * Class for reading and querying BAM files.
- */
-class BAMFileReader extends SamReader.ReaderImplementation {
-    // True if reading from a File rather than an InputStream
-    private boolean mIsSeekable = false;
-
-    // For converting bytes into other primitive types
-    private BinaryCodec mStream = null;
-
-    // Underlying compressed data stream.
-    private final BlockCompressedInputStream mCompressedInputStream;
-    private SAMFileHeader mFileHeader = null;
-
-    // One of these is populated if the file is seekable and an index exists
-    private File mIndexFile = null;
-    private SeekableStream mIndexStream = null;
-
-    private BAMIndex mIndex = null;
-    private long mFirstRecordPointer = 0;
-    // If non-null, there is an unclosed iterator extant.
-    private CloseableIterator<SAMRecord> mCurrentIterator = null;
-
-    // If true, all SAMRecords are fully decoded as they are read.
-    private boolean eagerDecode;
-
-    // If true, the BAMFileReader will use asynchronous IO.
-    // Note: this field currently has no effect (is not hooked up anywhere), but will be in the future. See https://github.com/samtools/htsjdk/pull/576
-    private final boolean useAsynchronousIO;
-
-    // For error-checking.
-    private ValidationStringency mValidationStringency;
-
-    // For creating BAMRecords
-    private SAMRecordFactory samRecordFactory;
-
-    /**
-     * Use the caching index reader implementation rather than the disk-hit-per-file model.
-     */
-    private boolean mEnableIndexCaching = false;
-
-    /**
-     * Use the traditional memory-mapped implementation for BAM file indexes rather than regular I/O.
-     */
-    private boolean mEnableIndexMemoryMapping = true;
-
-    /**
-     * Add information about the origin (reader and position) to SAM records.
-     */
-    private SamReader mReader = null;
-
-    /**
-     * Prepare to read BAM from a stream (not seekable)
-     * @param stream source of bytes.
-     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
-     * @param validationStringency Controls how to handle invalidate reads or header lines.
-     */
-    BAMFileReader(final InputStream stream,
-                  final File indexFile,
-                  final boolean eagerDecode,
-                  final boolean useAsynchronousIO,
-                  final ValidationStringency validationStringency,
-                  final SAMRecordFactory factory)
-        throws IOException {
-        mIndexFile = indexFile;
-        mIsSeekable = false;
-        this.useAsynchronousIO = useAsynchronousIO;
-        mCompressedInputStream = new BlockCompressedInputStream(stream);
-        mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
-        this.eagerDecode = eagerDecode;
-        this.mValidationStringency = validationStringency;
-        this.samRecordFactory = factory;
-        this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, null);
-    }
-
-    /**
-     * Prepare to read BAM from a file (seekable)
-     * @param file source of bytes.
-     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
-     * @param validationStringency Controls how to handle invalidate reads or header lines.
-     */
-    BAMFileReader(final File file,
-                  final File indexFile,
-                  final boolean eagerDecode,
-                  final boolean useAsynchronousIO,
-                  final ValidationStringency validationStringency,
-                  final SAMRecordFactory factory)
-        throws IOException {
-        this(new BlockCompressedInputStream(file), indexFile!=null ? indexFile : SamFiles.findIndex(file), eagerDecode, useAsynchronousIO, file.getAbsolutePath(), validationStringency, factory);
-        if (mIndexFile != null && mIndexFile.lastModified() < file.lastModified()) {
-            System.err.println("WARNING: BAM index file " + mIndexFile.getAbsolutePath() +
-                    " is older than BAM " + file.getAbsolutePath());
-        }
-        // Provide better error message when there is an error reading.
-        mStream.setInputFileName(file.getAbsolutePath());
-    }
-
-    BAMFileReader(final SeekableStream strm,
-                  final File indexFile,
-                  final boolean eagerDecode,
-                  final boolean useAsynchronousIO,
-                  final ValidationStringency validationStringency,
-                  final SAMRecordFactory factory)
-        throws IOException {
-        this(new BlockCompressedInputStream(strm), indexFile, eagerDecode, useAsynchronousIO, strm.getSource(), validationStringency, factory);
-    }
-
-    BAMFileReader(final SeekableStream strm,
-                  final SeekableStream indexStream,
-                  final boolean eagerDecode,
-                  final boolean useAsynchronousIO,
-                  final ValidationStringency validationStringency,
-                  final SAMRecordFactory factory)
-        throws IOException {
-        this(new BlockCompressedInputStream(strm), indexStream, eagerDecode, useAsynchronousIO, strm.getSource(), validationStringency, factory);
-    }
-
-    private BAMFileReader(final BlockCompressedInputStream compressedInputStream,
-                          final File indexFile,
-                          final boolean eagerDecode,
-                          final boolean useAsynchronousIO,
-                          final String source,
-                          final ValidationStringency validationStringency,
-                          final SAMRecordFactory factory)
-        throws IOException {
-        mIndexFile = indexFile;
-        mIsSeekable = true;
-        mCompressedInputStream = compressedInputStream;
-        mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
-        this.eagerDecode = eagerDecode;
-        this.useAsynchronousIO = useAsynchronousIO;
-        this.mValidationStringency = validationStringency;
-        this.samRecordFactory = factory;
-        this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, source);
-        mFirstRecordPointer = mCompressedInputStream.getFilePointer();
-    }    
-
-    private BAMFileReader(final BlockCompressedInputStream compressedInputStream,
-                          final SeekableStream indexStream,
-                          final boolean eagerDecode,
-                          final boolean useAsynchronousIO,
-                          final String source,
-                          final ValidationStringency validationStringency,
-                          final SAMRecordFactory factory)
-        throws IOException {
-        mIndexStream = indexStream;
-        mIsSeekable = true;
-        mCompressedInputStream = compressedInputStream;
-        mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
-        this.eagerDecode = eagerDecode;
-        this.useAsynchronousIO = useAsynchronousIO;
-        this.mValidationStringency = validationStringency;
-        this.samRecordFactory = factory;
-        this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, source);
-        mFirstRecordPointer = mCompressedInputStream.getFilePointer();
-    }
-
-    /** Reads through the header and sequence records to find the virtual file offset of the first record in the BAM file. */
-    static long findVirtualOffsetOfFirstRecord(final File bam) throws IOException {
-        final BAMFileReader reader = new BAMFileReader(bam, null, false, false, ValidationStringency.SILENT, new DefaultSAMRecordFactory());
-        final long offset = reader.mFirstRecordPointer;
-        reader.close();
-        return offset;
-    }
-
-    /**
-     * If true, writes the source of every read into the source SAMRecords.
-     * @param enabled true to write source information into each SAMRecord.
-     */
-    void enableFileSource(final SamReader reader, final boolean enabled) {
-        this.mReader = enabled ? reader : null;
-    }
-
-    /**
-     * If true, uses the caching version of the index reader.
-     * @param enabled true to use the caching version of the reader.
-     */
-    protected void enableIndexCaching(final boolean enabled) {
-        if(mIndex != null)
-            throw new SAMException("Unable to turn on index caching; index file has already been loaded.");
-        this.mEnableIndexCaching = enabled;
-    }
-
-    /**
-     * If false, disable the use of memory mapping for accessing index files (default behavior is to use memory mapping).
-     * This is slower but more scalable when accessing large numbers of BAM files sequentially.
-     * @param enabled True to use memory mapping, false to use regular I/O.
-     */
-    protected void enableIndexMemoryMapping(final boolean enabled) {
-        if (mIndex != null) {
-            throw new SAMException("Unable to change index memory mapping; index file has already been loaded.");
-        }
-        this.mEnableIndexMemoryMapping = enabled;
-    }
-
-    @Override void enableCrcChecking(final boolean enabled) {
-        this.mCompressedInputStream.setCheckCrcs(enabled);
-    }
-
-    @Override void setSAMRecordFactory(final SAMRecordFactory factory) { this.samRecordFactory = factory; }
-
-    @Override
-    public SamReader.Type type() {
-        return SamReader.Type.BAM_TYPE;
-    }
-
-    /**
-     * @return true if ths is a BAM file, and has an index
-     */
-    public boolean hasIndex() {
-        return mIsSeekable && ((mIndexFile != null) || (mIndexStream != null));
-    }
-
-    /**
-     * Retrieves the index for the given file type.  Ensure that the index is of the specified type.
-     * @return An index of the given type.
-     */
-    public BAMIndex getIndex() {
-        if(!hasIndex())
-            throw new SAMException("No index is available for this BAM file.");
-        if(mIndex == null) {
-            if (mIndexFile != null)
-                mIndex = mEnableIndexCaching ? new CachingBAMFileIndex(mIndexFile, getFileHeader().getSequenceDictionary(), mEnableIndexMemoryMapping)
-                                             : new DiskBasedBAMFileIndex(mIndexFile, getFileHeader().getSequenceDictionary(), mEnableIndexMemoryMapping);
-            else
-                mIndex = mEnableIndexCaching ? new CachingBAMFileIndex(mIndexStream, getFileHeader().getSequenceDictionary())
-                                             : new DiskBasedBAMFileIndex(mIndexStream, getFileHeader().getSequenceDictionary());
-        }
-        return mIndex;
-    }
-
-    public void setEagerDecode(final boolean desired) { this.eagerDecode = desired; }
-
-    @Override
-    public void close() {
-        if (mCompressedInputStream != null) {
-            try {
-                mCompressedInputStream.close();
-            } catch (IOException e) {
-                throw new RuntimeIOException("Exception closing compressed input stream.", e);
-            }
-        }
-        if (mStream != null) {
-            mStream.close();
-        }
-        if (mIndex != null) {
-            mIndex.close();
-        }
-        mStream = null;
-        mFileHeader = null;
-        mIndex = null;
-    }
-
-    public SAMFileHeader getFileHeader() {
-        return mFileHeader;
-    }
-
-    /**
-     * Set error-checking level for subsequent SAMRecord reads.
-     */
-    void setValidationStringency(final ValidationStringency validationStringency) {
-        this.mValidationStringency = validationStringency;
-    }
-
-    public ValidationStringency getValidationStringency() {
-        return this.mValidationStringency;
-    }
-
-    /**
-     * Prepare to iterate through the SAMRecords in file order.
-     * Only a single iterator on a BAM file can be extant at a time.  If getIterator() or a query method has been called once,
-     * that iterator must be closed before getIterator() can be called again.
-     * A somewhat peculiar aspect of this method is that if the file is not seekable, a second call to
-     * getIterator() begins its iteration where the last one left off.  That is the best that can be
-     * done in that situation.
-     */
-    public CloseableIterator<SAMRecord> getIterator() {
-        if (mStream == null) {
-            throw new IllegalStateException("File reader is closed");
-        }
-        if (mCurrentIterator != null) {
-            throw new IllegalStateException("Iteration in progress");
-        }
-        if (mIsSeekable) {
-            try {
-                mCompressedInputStream.seek(mFirstRecordPointer);
-            } catch (final IOException exc) {
-                throw new RuntimeIOException(exc.getMessage(), exc);
-            }
-        }
-        mCurrentIterator = new BAMFileIterator();
-        return mCurrentIterator;
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> getIterator(final SAMFileSpan chunks) {
-        if (mStream == null) {
-            throw new IllegalStateException("File reader is closed");
-        }
-        if (mCurrentIterator != null) {
-            throw new IllegalStateException("Iteration in progress");
-        }
-        if (!(chunks instanceof BAMFileSpan)) {
-            throw new IllegalStateException("BAMFileReader cannot handle this type of file span.");
-        }
-
-        // Create an iterator over the given chunk boundaries.
-        mCurrentIterator = new BAMFileIndexIterator(((BAMFileSpan)chunks).toCoordinateArray());
-        return mCurrentIterator;
-    }
-
-    /**
-     * Gets an unbounded pointer to the first record in the BAM file.  Because the reader doesn't necessarily know
-     * when the file ends, the rightmost bound of the file pointer will not end exactly where the file ends.  However,
-     * the rightmost bound is guaranteed to be after the last read in the file.
-     * @return An unbounded pointer to the first record in the BAM file.
-     */
-    @Override
-    public SAMFileSpan getFilePointerSpanningReads() {
-        return new BAMFileSpan(new Chunk(mFirstRecordPointer,Long.MAX_VALUE));
-    }
-
-    /**
-     * Prepare to iterate through the SAMRecords that match the given interval.
-     * Only a single iterator on a BAMFile can be extant at a time.  The previous one must be closed
-     * before calling any of the methods that return an iterator.
-     *
-     * Note that an unmapped SAMRecord may still have a reference name and an alignment start for sorting
-     * purposes (typically this is the coordinate of its mate), and will be found by this method if the coordinate
-     * matches the specified interval.
-     *
-     * Note that this method is not necessarily efficient in terms of disk I/O.  The index does not have perfect
-     * resolution, so some SAMRecords may be read and then discarded because they do not match the specified interval.
-     *
-     * @param sequence Reference sequence sought.
-     * @param start Desired SAMRecords must overlap or be contained in the interval specified by start and end.
-     * A value of zero implies the start of the reference sequence.
-     * @param end A value of zero implies the end of the reference sequence.
-     * @param contained If true, the alignments for the SAMRecords must be completely contained in the interval
-     * specified by start and end.  If false, the SAMRecords need only overlap the interval.
-     * @return Iterator for the matching SAMRecords
-     */
-    CloseableIterator<SAMRecord> query(final String sequence, final int start, final int end, final boolean contained) {
-        if (mStream == null) {
-            throw new IllegalStateException("File reader is closed");
-        }
-        if (mCurrentIterator != null) {
-            throw new IllegalStateException("Iteration in progress");
-        }
-        if (!mIsSeekable) {
-            throw new UnsupportedOperationException("Cannot query stream-based BAM file");
-        }
-        final int referenceIndex = mFileHeader.getSequenceIndex(sequence);
-        if (referenceIndex == -1) {
-            mCurrentIterator = new EmptyBamIterator();
-        } else {
-            final QueryInterval[] queryIntervals = {new QueryInterval(referenceIndex, start, end)};
-            mCurrentIterator = createIndexIterator(queryIntervals, contained);
-        }
-        return mCurrentIterator;
-    }
-
-    /**
-     * Prepare to iterate through the SAMRecords that match any of the given intervals.
-     * Only a single iterator on a BAMFile can be extant at a time.  The previous one must be closed
-     * before calling any of the methods that return an iterator.
-     *
-     * Note that an unmapped SAMRecord may still have a reference name and an alignment start for sorting
-     * purposes (typically this is the coordinate of its mate), and will be found by this method if the coordinate
-     * matches the specified interval.
-     *
-     * Note that this method is not necessarily efficient in terms of disk I/O.  The index does not have perfect
-     * resolution, so some SAMRecords may be read and then discarded because they do not match the specified interval.
-     *
-     * @param intervals list of intervals to be queried.  Must be optimized.
-     * @param contained If true, the alignments for the SAMRecords must be completely contained in the interval
-     * specified by start and end.  If false, the SAMRecords need only overlap the interval.
-     * @return Iterator for the matching SAMRecords
-     * @see QueryInterval#optimizeIntervals(QueryInterval[])
-     */
-    public CloseableIterator<SAMRecord> query(final QueryInterval[] intervals, final boolean contained) {
-        if (mStream == null) {
-            throw new IllegalStateException("File reader is closed");
-        }
-        if (mCurrentIterator != null) {
-            throw new IllegalStateException("Iteration in progress");
-        }
-        if (!mIsSeekable) {
-            throw new UnsupportedOperationException("Cannot query stream-based BAM file");
-        }
-        mCurrentIterator = createIndexIterator(intervals, contained);
-        return mCurrentIterator;
-    }
-
-    /**
-     * Prepare to iterate through the SAMRecords with the given alignment start.
-     * Only a single iterator on a BAMFile can be extant at a time.  The previous one must be closed
-     * before calling any of the methods that return an iterator.
-     *
-     * Note that an unmapped SAMRecord may still have a reference name and an alignment start for sorting
-     * purposes (typically this is the coordinate of its mate), and will be found by this method if the coordinate
-     * matches the specified interval.
-     *
-     * Note that this method is not necessarily efficient in terms of disk I/O.  The index does not have perfect
-     * resolution, so some SAMRecords may be read and then discarded because they do not match the specified interval.
-     *
-     * @param sequence Reference sequence sought.
-     * @param start Alignment start sought.
-     * @return Iterator for the matching SAMRecords.
-     */
-    public CloseableIterator<SAMRecord> queryAlignmentStart(final String sequence, final int start) {
-        if (mStream == null) {
-            throw new IllegalStateException("File reader is closed");
-        }
-        if (mCurrentIterator != null) {
-            throw new IllegalStateException("Iteration in progress");
-        }
-        if (!mIsSeekable) {
-            throw new UnsupportedOperationException("Cannot query stream-based BAM file");
-        }
-        final int referenceIndex = mFileHeader.getSequenceIndex(sequence);
-        if (referenceIndex == -1) {
-            mCurrentIterator = new EmptyBamIterator();
-        } else {
-            mCurrentIterator = createStartingAtIndexIterator(referenceIndex, start);
-        }
-        return mCurrentIterator;
-    }
-
-    /**
-     * Prepare to iterate through the SAMRecords that are unmapped and do not have a reference name or alignment start.
-     * Only a single iterator on a BAMFile can be extant at a time.  The previous one must be closed
-     * before calling any of the methods that return an iterator.
-     *
-     * @return Iterator for the matching SAMRecords.
-     */
-    public CloseableIterator<SAMRecord> queryUnmapped() {
-        if (mStream == null) {
-            throw new IllegalStateException("File reader is closed");
-        }
-        if (mCurrentIterator != null) {
-            throw new IllegalStateException("Iteration in progress");
-        }
-        if (!mIsSeekable) {
-            throw new UnsupportedOperationException("Cannot query stream-based BAM file");
-        }
-        try {
-            final long startOfLastLinearBin = getIndex().getStartOfLastLinearBin();
-            if (startOfLastLinearBin != -1) {
-                mCompressedInputStream.seek(startOfLastLinearBin);
-            } else {
-                // No mapped reads in file, just start at the first read in file.
-                mCompressedInputStream.seek(mFirstRecordPointer);
-            }
-            mCurrentIterator = new BAMFileIndexUnmappedIterator();
-            return mCurrentIterator;
-        } catch (final IOException e) {
-            throw new RuntimeIOException("IOException seeking to unmapped reads", e);
-        }
-    }
-
-    /**
-     * Reads the header of a BAM file from a stream
-     * @param stream A BinaryCodec to read the header from
-     * @param validationStringency Determines how stringent to be when validating the sam
-     * @param source Note that this is used only for reporting errors.
-     */
-    protected static SAMFileHeader readHeader(final BinaryCodec stream, final ValidationStringency validationStringency, final String source)
-        throws IOException {
-
-        final byte[] buffer = new byte[4];
-        stream.readBytes(buffer);
-        if (!Arrays.equals(buffer, BAMFileConstants.BAM_MAGIC)) {
-            throw new IOException("Invalid BAM file header");
-        }
-
-        final int headerTextLength = stream.readInt();
-        final String textHeader = stream.readString(headerTextLength);
-        final SAMTextHeaderCodec headerCodec = new SAMTextHeaderCodec();
-        headerCodec.setValidationStringency(validationStringency);
-        final SAMFileHeader samFileHeader = headerCodec.decode(new StringLineReader(textHeader),
-                source);
-
-        final int sequenceCount = stream.readInt();
-        if (!samFileHeader.getSequenceDictionary().isEmpty()) {
-            // It is allowed to have binary sequences but no text sequences, so only validate if both are present
-            if (sequenceCount != samFileHeader.getSequenceDictionary().size()) {
-                throw new SAMFormatException("Number of sequences in text header (" +
-                        samFileHeader.getSequenceDictionary().size() +
-                        ") != number of sequences in binary header (" + sequenceCount + ") for file " + source);
-            }
-            for (int i = 0; i < sequenceCount; i++) {
-                final SAMSequenceRecord binarySequenceRecord = readSequenceRecord(stream, source);
-                final SAMSequenceRecord sequenceRecord = samFileHeader.getSequence(i);
-                if (!sequenceRecord.getSequenceName().equals(binarySequenceRecord.getSequenceName())) {
-                    throw new SAMFormatException("For sequence " + i + ", text and binary have different names in file " +
-                            source);
-                }
-                if (sequenceRecord.getSequenceLength() != binarySequenceRecord.getSequenceLength()) {
-                    throw new SAMFormatException("For sequence " + i + ", text and binary have different lengths in file " +
-                            source);
-                }
-            }
-        } else {
-            // If only binary sequences are present, copy them into samFileHeader
-            final List<SAMSequenceRecord> sequences = new ArrayList<SAMSequenceRecord>(sequenceCount);
-            for (int i = 0; i < sequenceCount; i++) {
-                sequences.add(readSequenceRecord(stream, source));
-            }
-            samFileHeader.setSequenceDictionary(new SAMSequenceDictionary(sequences));
-        }
-
-        return samFileHeader;
-    }
-
-    /**
-     * Reads a single binary sequence record from the file or stream
-     * @param source Note that this is used only for reporting errors.
-     */
-    private static SAMSequenceRecord readSequenceRecord(final BinaryCodec stream, final String source) {
-        final int nameLength = stream.readInt();
-        if (nameLength <= 1) {
-            throw new SAMFormatException("Invalid BAM file header: missing sequence name in file " + source);
-        }
-        final String sequenceName = stream.readString(nameLength - 1);
-        // Skip the null terminator
-        stream.readByte();
-        final int sequenceLength = stream.readInt();
-        return new SAMSequenceRecord(SAMSequenceRecord.truncateSequenceName(sequenceName), sequenceLength);
-    }
-
-    /**
-     * Encapsulates the restriction that only one iterator may be open at a time.
-     */
-    private abstract class AbstractBamIterator implements CloseableIterator<SAMRecord> {
-
-        private boolean isClosed = false;
-
-        public void close() {
-            if (!isClosed) {
-                if (mCurrentIterator != null && this != mCurrentIterator) {
-                    throw new IllegalStateException("Attempt to close non-current iterator");
-                }
-                mCurrentIterator = null;
-                isClosed = true;
-            }
-        }
-
-        protected void assertOpen() {
-            if (isClosed) throw new AssertionError("Iterator has been closed");
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException("Not supported: remove");
-        }
-
-    }
-
-    private class EmptyBamIterator extends AbstractBamIterator {
-        @Override
-        public boolean hasNext() {
-            return false;
-        }
-
-        @Override
-        public SAMRecord next() {
-            throw new NoSuchElementException("next called on empty iterator");
-        }
-    }
-
-    /**
-
-    /**
-     * Iterator for non-indexed sequential iteration through all SAMRecords in file.
-     * Starting point of iteration is wherever current file position is when the iterator is constructed.
-     */
-    private class BAMFileIterator extends AbstractBamIterator {
-        private SAMRecord mNextRecord = null;
-        private final BAMRecordCodec bamRecordCodec;
-        private long samRecordIndex = 0; // Records at what position (counted in records) we are at in the file
-
-        BAMFileIterator() {
-            this(true);
-        }
-
-        /**
-         * @param advance Trick to enable subclass to do more setup before advancing
-         */
-        BAMFileIterator(final boolean advance) {
-            this.bamRecordCodec = new BAMRecordCodec(getFileHeader(), samRecordFactory);
-            this.bamRecordCodec.setInputStream(BAMFileReader.this.mStream.getInputStream(),
-                    BAMFileReader.this.mStream.getInputFileName());
-
-            if (advance) {
-                advance();
-            }
-        }
-
-        public boolean hasNext() {
-            assertOpen();
-            return (mNextRecord != null);
-        }
-
-        public SAMRecord next() {
-            assertOpen();
-            final SAMRecord result = mNextRecord;
-            advance();
-            return result;
-        }
-
-        void advance() {
-            try {
-                mNextRecord = getNextRecord();
-
-                if (mNextRecord != null) {
-                    ++this.samRecordIndex;
-                    // Because some decoding is done lazily, the record needs to remember the validation stringency.
-                    mNextRecord.setValidationStringency(mValidationStringency);
-
-                    if (mValidationStringency != ValidationStringency.SILENT) {
-                        final List<SAMValidationError> validationErrors = mNextRecord.isValid(mValidationStringency == ValidationStringency.STRICT);
-                        SAMUtils.processValidationErrors(validationErrors,
-                                this.samRecordIndex, BAMFileReader.this.getValidationStringency());
-                    }
-                }
-                if (eagerDecode && mNextRecord != null) {
-                    mNextRecord.eagerDecode();
-                }
-            } catch (final IOException exc) {
-                throw new RuntimeIOException(exc.getMessage(), exc);
-            }
-        }
-
-        /**
-         * Read the next record from the input stream.
-         */
-        SAMRecord getNextRecord() throws IOException {
-            final long startCoordinate = mCompressedInputStream.getFilePointer();
-            final SAMRecord next = bamRecordCodec.decode();
-            final long stopCoordinate = mCompressedInputStream.getFilePointer();
-
-            if(mReader != null && next != null)
-                next.setFileSource(new SAMFileSource(mReader,new BAMFileSpan(new Chunk(startCoordinate,stopCoordinate))));
-
-            return next;
-        }
-
-        /**
-         * @return The record that will be return by the next call to next()
-         */
-        protected SAMRecord peek() {
-            return mNextRecord;
-        }
-    }
-
-    /**
-     * Prepare to iterate through SAMRecords in the given reference that start exactly at the given start coordinate.
-     * @param referenceIndex Desired reference sequence.
-     * @param start 1-based alignment start.
-     */
-    private CloseableIterator<SAMRecord> createStartingAtIndexIterator(final int referenceIndex,
-                                                                       final int start) {
-
-        // Hit the index to determine the chunk boundaries for the required data.
-        final BAMIndex fileIndex = getIndex();
-        final BAMFileSpan fileSpan = fileIndex.getSpanOverlapping(referenceIndex, start, 0);
-        final long[] filePointers = fileSpan != null ? fileSpan.toCoordinateArray() : null;
-
-        // Create an iterator over the above chunk boundaries.
-        final BAMFileIndexIterator iterator = new BAMFileIndexIterator(filePointers);
-
-        // Add some preprocessing filters for edge-case reads that don't fit into this
-        // query type.
-        return new BAMQueryFilteringIterator(iterator,new BAMStartingAtIteratorFilter(referenceIndex,start));
-    }
-
-    /**
-     * @throws java.lang.IllegalArgumentException if the intervals are not optimized
-     * @see QueryInterval#optimizeIntervals(QueryInterval[])
-     */
-    private void assertIntervalsOptimized(final QueryInterval[] intervals) {
-        if (intervals.length == 0) return;
-        for (int i = 1; i < intervals.length; ++i) {
-        final QueryInterval prev = intervals[i-1];
-        final QueryInterval thisInterval = intervals[i];
-            if (prev.compareTo(thisInterval) >= 0) {
-                throw new IllegalArgumentException(String.format("List of intervals is not sorted: %s >= %s", prev, thisInterval));
-            }
-            if (prev.overlaps(thisInterval)) {
-                throw new IllegalArgumentException(String.format("List of intervals is not optimized: %s intersects %s", prev, thisInterval));
-            }
-            if (prev.abuts(thisInterval)) {
-                throw new IllegalArgumentException(String.format("List of intervals is not optimized: %s abuts %s", prev, thisInterval));
-            }
-        }
-    }
-
-    private CloseableIterator<SAMRecord> createIndexIterator(final QueryInterval[] intervals,
-                                                             final boolean contained) {
-
-        assertIntervalsOptimized(intervals);
-
-        // Hit the index to determine the chunk boundaries for the required data.
-        final BAMFileSpan[] inputSpans = new BAMFileSpan[intervals.length];
-        final BAMIndex fileIndex = getIndex();
-        for (int i = 0; i < intervals.length; ++i) {
-            final QueryInterval interval = intervals[i];
-            final BAMFileSpan span = fileIndex.getSpanOverlapping(interval.referenceIndex, interval.start, interval.end);
-            inputSpans[i] = span;
-        }
-        final long[] filePointers;
-        if (inputSpans.length > 0) {
-            filePointers = BAMFileSpan.merge(inputSpans).toCoordinateArray();
-        } else {
-            filePointers = null;
-        }
-
-        // Create an iterator over the above chunk boundaries.
-        final BAMFileIndexIterator iterator = new BAMFileIndexIterator(filePointers);
-
-        // Add some preprocessing filters for edge-case reads that don't fit into this
-        // query type.
-        return new BAMQueryFilteringIterator(iterator, new BAMQueryMultipleIntervalsIteratorFilter(intervals, contained));
-    }
-
-    /**
-     * Iterate over the SAMRecords defined by the sections of the file described in the ctor argument.
-     */
-    private class BAMFileIndexIterator extends BAMFileIterator {
-
-        private long[] mFilePointers = null;
-        private int mFilePointerIndex = 0;
-        private long mFilePointerLimit = -1;
-
-        /**
-         * Prepare to iterate through SAMRecords stored in the specified compressed blocks at the given offset.
-         * @param filePointers the block / offset combination, stored in chunk format.
-         */
-        BAMFileIndexIterator(final long[] filePointers) {
-            super(false);  // delay advance() until after construction
-            mFilePointers = filePointers;
-            advance();
-        }
-
-        SAMRecord getNextRecord()
-            throws IOException {
-            // Advance to next file block if necessary
-            while (mCompressedInputStream.getFilePointer() >= mFilePointerLimit) {
-                if (mFilePointers == null ||
-                        mFilePointerIndex >= mFilePointers.length) {
-                    return null;
-                }
-                final long startOffset = mFilePointers[mFilePointerIndex++];
-                final long endOffset = mFilePointers[mFilePointerIndex++];
-                mCompressedInputStream.seek(startOffset);
-                mFilePointerLimit = endOffset;
-            }
-            // Pull next record from stream
-            return super.getNextRecord();
-        }
-    }
-
-    /**
-     * Pull SAMRecords from a coordinate-sorted iterator, and filter out any that do not match the filter.
-     */
-    public class BAMQueryFilteringIterator extends AbstractBamIterator {
-        /**
-         * The wrapped iterator.
-         */
-        protected final CloseableIterator<SAMRecord> wrappedIterator;
-        /**
-         * The next record to be returned.  Will be null if no such record exists.
-         */
-        protected SAMRecord mNextRecord;
-        private final BAMIteratorFilter iteratorFilter;
-
-        public BAMQueryFilteringIterator(final CloseableIterator<SAMRecord> iterator,
-                                         final BAMIteratorFilter iteratorFilter) {
-            this.wrappedIterator = iterator;
-            this.iteratorFilter = iteratorFilter;
-            mNextRecord = advance();
-        }
-
-        /**
-         * Returns true if a next element exists; false otherwise.
-         */
-        public boolean hasNext() {
-            assertOpen();
-            return mNextRecord != null;
-        }
-
-        /**
-         * Gets the next record from the given iterator.
-         * @return The next SAM record in the iterator.
-         */
-        public SAMRecord next() {
-            if(!hasNext())
-                throw new NoSuchElementException("BAMQueryFilteringIterator: no next element available");
-            final SAMRecord currentRead = mNextRecord;
-            mNextRecord = advance();
-            return currentRead;
-        }
-
-        SAMRecord advance() {
-            while (true) {
-                // Pull next record from stream
-                if(!wrappedIterator.hasNext())
-                    return null;
-
-                final SAMRecord record = wrappedIterator.next();
-                switch (iteratorFilter.compareToFilter(record)) {
-                    case MATCHES_FILTER: return record;
-                    case STOP_ITERATION: return null;
-                    case CONTINUE_ITERATION: break; // keep looping
-                    default: throw new SAMException("Unexpected return from compareToFilter");
-                }
-            }
-        }
-    }
-
-    interface BAMIteratorFilter {
-        /**
-         * Determine if given record passes the filter, and if it does not, whether iteration should continue
-         * or if this record is beyond the region(s) of interest.
-         */
-        FilteringIteratorState compareToFilter(final SAMRecord record);
-    }
-
-    /**
-     * A decorating iterator that filters out records that do not match the given reference and start position.
-     */
-    private class BAMStartingAtIteratorFilter implements BAMIteratorFilter {
-
-        private final int mReferenceIndex;
-        private final int mRegionStart;
-
-        public BAMStartingAtIteratorFilter(final int referenceIndex, final int start) {
-            mReferenceIndex = referenceIndex;
-            mRegionStart = start;
-        }
-
-        /**
-         *
-         * @return MATCHES_FILTER if this record matches the filter;
-         * CONTINUE_ITERATION if does not match filter but iteration should continue;
-         * STOP_ITERATION if does not match filter and iteration should end.
-         */
-        @Override
-        public FilteringIteratorState compareToFilter(final SAMRecord record) {
-            // If beyond the end of this reference sequence, end iteration
-            final int referenceIndex = record.getReferenceIndex();
-            if (referenceIndex < 0 || referenceIndex > mReferenceIndex) {
-                return FilteringIteratorState.STOP_ITERATION;
-            } else if (referenceIndex < mReferenceIndex) {
-                // If before this reference sequence, continue
-                return FilteringIteratorState.CONTINUE_ITERATION;
-            }
-            final int alignmentStart = record.getAlignmentStart();
-            if (alignmentStart > mRegionStart) {
-                // If scanned beyond target region, end iteration
-                return FilteringIteratorState.STOP_ITERATION;
-            } else  if (alignmentStart == mRegionStart) {
-                    return FilteringIteratorState.MATCHES_FILTER;
-            } else {
-                return FilteringIteratorState.CONTINUE_ITERATION;
-            }
-        }
-
-    }
-
-    private class BAMFileIndexUnmappedIterator extends BAMFileIterator  {
-        private BAMFileIndexUnmappedIterator() {
-            while (this.hasNext() && peek().getReferenceIndex() != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
-                advance();
-            }
-        }
-    }
-
-    /**
-     * Filters out records that do not match any of the given intervals and query type.
-     */
-    private class BAMQueryMultipleIntervalsIteratorFilter implements BAMIteratorFilter {
-        final QueryInterval[] intervals;
-        final boolean contained;
-        int intervalIndex = 0;
-
-
-        public BAMQueryMultipleIntervalsIteratorFilter(final QueryInterval[] intervals,
-                                                       final boolean contained) {
-            this.contained = contained;
-            this.intervals = intervals;
-        }
-
-        @Override
-        public FilteringIteratorState compareToFilter(final SAMRecord record) {
-            while (intervalIndex < intervals.length) {
-                final IntervalComparison comparison = compareIntervalToRecord(intervals[intervalIndex], record);
-                switch (comparison) {
-                    // Interval is before SAMRecord.  Try next interval;
-                    case BEFORE: ++intervalIndex; break;
-                    // Interval is after SAMRecord.  Keep scanning forward in SAMRecords
-                    case AFTER: return FilteringIteratorState.CONTINUE_ITERATION;
-                    // Found a good record
-                    case CONTAINED: return FilteringIteratorState.MATCHES_FILTER;
-                    // Either found a good record, or else keep scanning SAMRecords
-                    case OVERLAPPING: return
-                            (contained ? FilteringIteratorState.CONTINUE_ITERATION : FilteringIteratorState.MATCHES_FILTER);
-                }
-            }
-            // Went past the last interval
-            return FilteringIteratorState.STOP_ITERATION;
-        }
-
-        private IntervalComparison compareIntervalToRecord(final QueryInterval interval, final SAMRecord record) {
-            // interval.end <= 0 implies the end of the reference sequence.
-            final int intervalEnd = (interval.end <= 0? Integer.MAX_VALUE: interval.end);
-            final int alignmentEnd;
-            if (record.getReadUnmappedFlag() && record.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START) {
-                // Unmapped read with coordinate of mate.
-                alignmentEnd = record.getAlignmentStart();
-            } else {
-                alignmentEnd = record.getAlignmentEnd();
-            }
-
-            if (interval.referenceIndex < record.getReferenceIndex()) return IntervalComparison.BEFORE;
-            else if (interval.referenceIndex > record.getReferenceIndex()) return IntervalComparison.AFTER;
-            else if (intervalEnd < record.getAlignmentStart()) return IntervalComparison.BEFORE;
-            else if (alignmentEnd < interval.start) return IntervalComparison.AFTER;
-            else if (CoordMath.encloses(interval.start, intervalEnd, record.getAlignmentStart(), alignmentEnd)) {
-                return IntervalComparison.CONTAINED;
-            } else return IntervalComparison.OVERLAPPING;
-        }
-    }
-
-    private enum IntervalComparison {
-        BEFORE, AFTER, OVERLAPPING, CONTAINED
-    }
-
-    /**
-     * Type returned by BAMIteratorFilter that tell BAMQueryFilteringIterator how to handle each SAMRecord.
-     */
-    private enum FilteringIteratorState {
-        MATCHES_FILTER, STOP_ITERATION, CONTINUE_ITERATION
-
-    }
-}
diff --git a/src/java/htsjdk/samtools/BAMFileWriter.java b/src/java/htsjdk/samtools/BAMFileWriter.java
deleted file mode 100644
index 52b7bb1..0000000
--- a/src/java/htsjdk/samtools/BAMFileWriter.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.BinaryCodec;
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import htsjdk.samtools.util.RuntimeIOException;
-
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.StringWriter;
-import java.io.Writer;
-
-/**
- * Concrete implementation of SAMFileWriter for writing gzipped BAM files.
- */
-class BAMFileWriter extends SAMFileWriterImpl {
-
-    private final BinaryCodec outputBinaryCodec;
-    private BAMRecordCodec bamRecordCodec = null;
-    private final BlockCompressedOutputStream blockCompressedOutputStream;
-    private BAMIndexer bamIndexer = null;
-
-    protected BAMFileWriter(final File path) {
-        blockCompressedOutputStream = new BlockCompressedOutputStream(path);
-        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
-        outputBinaryCodec.setOutputFileName(path.getAbsolutePath());
-    }
-
-    protected BAMFileWriter(final File path, final int compressionLevel) {
-        blockCompressedOutputStream = new BlockCompressedOutputStream(path, compressionLevel);
-        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
-        outputBinaryCodec.setOutputFileName(path.getAbsolutePath());
-    }
-
-    protected BAMFileWriter(final OutputStream os, final File file) {
-        blockCompressedOutputStream = new BlockCompressedOutputStream(os, file);
-        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
-        outputBinaryCodec.setOutputFileName(getPathString(file));
-    }
-
-    protected BAMFileWriter(final OutputStream os, final File file, final int compressionLevel) {
-        blockCompressedOutputStream = new BlockCompressedOutputStream(os, file, compressionLevel);
-        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
-        outputBinaryCodec.setOutputFileName(getPathString(file));
-    }
-
-    private void prepareToWriteAlignments() {
-        if (bamRecordCodec == null) {
-            bamRecordCodec = new BAMRecordCodec(getFileHeader());
-            bamRecordCodec.setOutputStream(outputBinaryCodec.getOutputStream(), getFilename());
-        }
-    }
-
-    /** @return absolute path, or null if arg is null.  */
-    private String getPathString(final File path){
-        return (path != null) ? path.getAbsolutePath() : null;
-    }
-
-   // Allow enabling the bam index construction
-   // only enabled by factory method before anything is written
-   void enableBamIndexConstruction () {
-        if (!getSortOrder().equals(SAMFileHeader.SortOrder.coordinate)){
-           throw new SAMException("Not creating BAM index since not sorted by coordinates: " + getSortOrder());
-        }
-        if(getFilename() == null){
-            throw new SAMException("Not creating BAM index since we don't have an output file name");
-        }
-        bamIndexer = createBamIndex(getFilename());
-    }
-
-    private BAMIndexer createBamIndex(final String path) {
-        try {
-            final String indexFileBase = path.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION) ?
-                    path.substring(0, path.lastIndexOf('.')) : path;
-            final File indexFile = new File(indexFileBase + BAMIndex.BAMIndexSuffix);
-            if (indexFile.exists()) {
-                if (!indexFile.canWrite()) {
-                    throw new SAMException("Not creating BAM index since unable to write index file " + indexFile);
-                }
-            }
-            return new BAMIndexer(indexFile, getFileHeader());
-        } catch (Exception e) {
-            throw new SAMException("Not creating BAM index", e);
-        }
-    }
-
-    protected void writeAlignment(final SAMRecord alignment) {
-        prepareToWriteAlignments();
-
-        if (bamIndexer != null) {
-            try {
-                final long startOffset = blockCompressedOutputStream.getFilePointer();
-                bamRecordCodec.encode(alignment);
-                final long stopOffset = blockCompressedOutputStream.getFilePointer();
-                // set the alignment's SourceInfo and then prepare its index information
-                alignment.setFileSource(new SAMFileSource(null, new BAMFileSpan(new Chunk(startOffset, stopOffset))));
-                bamIndexer.processAlignment(alignment);
-            } catch (Exception e) {
-                bamIndexer = null;
-                throw new SAMException("Exception when processing alignment for BAM index " + alignment, e);
-            }
-        } else {
-            bamRecordCodec.encode(alignment);
-        }
-    }
-
-    protected void writeHeader(final String textHeader) {
-        writeHeader(outputBinaryCodec, getFileHeader(), textHeader);
-    }
-
-    protected void finish() {
-        outputBinaryCodec.close();
-            try {
-                if (bamIndexer != null) {
-                    bamIndexer.finish();
-                }
-            } catch (Exception e) {
-                throw new SAMException("Exception writing BAM index file", e);
-            }
-    }
-
-    /** @return absolute path, or null if this writer does not correspond to a file.  */
-    protected String getFilename() {
-        return outputBinaryCodec.getOutputFileName();
-    }
-
-    /**
-     * Writes a header to a BAM file. samFileHeader and headerText are redundant - one can be used to regenerate the other but in
-     * some instances we already have both so this allows us to save some cycles
-     */
-    protected static void writeHeader(final BinaryCodec outputBinaryCodec, final SAMFileHeader samFileHeader, final String headerText) {
-        outputBinaryCodec.writeBytes(BAMFileConstants.BAM_MAGIC);
-
-        // calculate and write the length of the SAM file header text and the header text
-        outputBinaryCodec.writeString(headerText, true, false);
-
-        // write the sequences binarily.  This is redundant with the text header
-        outputBinaryCodec.writeInt(samFileHeader.getSequenceDictionary().size());
-        for (final SAMSequenceRecord sequenceRecord: samFileHeader.getSequenceDictionary().getSequences()) {
-            outputBinaryCodec.writeString(sequenceRecord.getSequenceName(), true, true);
-            outputBinaryCodec.writeInt(sequenceRecord.getSequenceLength());
-        }
-    }
-
-    /**
-     * Writes a header to a BAM file. Might need to regenerate the String version of the header, if one already has both the
-     * samFileHeader and the String, use the version of this method which takes both.
-     */
-    protected static void writeHeader(final BinaryCodec outputBinaryCodec, final SAMFileHeader samFileHeader) {
-        // Do not use SAMFileHeader.getTextHeader() as it is not updated when changes to the underlying object are made
-        final String headerString;
-        final Writer stringWriter = new StringWriter();
-        new SAMTextHeaderCodec().encode(stringWriter, samFileHeader, true);
-        headerString = stringWriter.toString();
-
-        writeHeader(outputBinaryCodec, samFileHeader, headerString);
-    }
-
-    protected static void writeHeader(final OutputStream outputStream, final SAMFileHeader samFileHeader) {
-        final BlockCompressedOutputStream blockCompressedOutputStream = new BlockCompressedOutputStream(outputStream, null);
-        final BinaryCodec outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
-        writeHeader(outputBinaryCodec, samFileHeader);
-        try {
-            blockCompressedOutputStream.flush();
-        } catch (final IOException ioe) {
-            throw new RuntimeIOException(ioe);
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/CRAMFileReader.java b/src/java/htsjdk/samtools/CRAMFileReader.java
deleted file mode 100644
index 71ef0c7..0000000
--- a/src/java/htsjdk/samtools/CRAMFileReader.java
+++ /dev/null
@@ -1,625 +0,0 @@
-/*******************************************************************************
- * Copyright 2013 EMBL-EBI
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package htsjdk.samtools;
-
-import htsjdk.samtools.SAMFileHeader.SortOrder;
-import htsjdk.samtools.SamReader.Type;
-import htsjdk.samtools.cram.CRAIIndex;
-import htsjdk.samtools.cram.ref.CRAMReferenceSource;
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.cram.structure.Container;
-import htsjdk.samtools.cram.structure.ContainerIO;
-import htsjdk.samtools.seekablestream.SeekableFileStream;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.CoordMath;
-import htsjdk.samtools.util.RuntimeEOFException;
-import htsjdk.samtools.util.RuntimeIOException;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Arrays;
-import java.util.Iterator;
-
-/**
- * {@link htsjdk.samtools.BAMFileReader BAMFileReader} analogue for CRAM files.
- * Supports random access using BAI index file formats.
- *
- * @author vadim
- */
- at SuppressWarnings("UnusedDeclaration")
-public class CRAMFileReader extends SamReader.ReaderImplementation implements SamReader.Indexing {
-    private File cramFile;
-    private final CRAMReferenceSource referenceSource;
-    private InputStream inputStream;
-    private CRAMIterator iterator;
-    private BAMIndex mIndex;
-    private File mIndexFile;
-    private boolean mEnableIndexCaching;
-    private boolean mEnableIndexMemoryMapping;
-
-    private ValidationStringency validationStringency;
-
-    /**
-     * Create a CRAMFileReader from either a file or input stream using the reference source returned by
-     * {@link ReferenceSource#getDefaultCRAMReferenceSource() getDefaultCRAMReferenceSource}.
-     *
-     *
-     * @param cramFile CRAM file to open
-     * @param inputStream CRAM stream to read
-     *
-     * @throws IllegalArgumentException if the {@code cramFile} and the {@code inputStream} are both null
-     * @throws IllegalStateException if a {@link ReferenceSource#getDefaultCRAMReferenceSource() default}
-     * reference source cannot be acquired
-     */
-    public CRAMFileReader(final File cramFile, final InputStream inputStream) {
-        this(cramFile, inputStream, ReferenceSource.getDefaultCRAMReferenceSource());
-    }
-
-    /**
-     * Create a CRAMFileReader from either a file or input stream using the supplied reference source.
-     *
-     * @param cramFile        CRAM file to read
-     * @param inputStream     CRAM stream to read
-     * @param referenceSource a {@link htsjdk.samtools.cram.ref.ReferenceSource source} of
-     *                        reference sequences. May not be null.
-     *
-     * @throws IllegalArgumentException if the {@code cramFile} and the {@code inputStream} are both null
-     * or if the {@code CRAMReferenceSource} is null
-     */
-    public CRAMFileReader(final File cramFile, final InputStream inputStream,
-                          final CRAMReferenceSource referenceSource) {
-        if (cramFile == null && inputStream == null) {
-            throw new IllegalArgumentException("Either file or input stream is required.");
-        }
-        if (referenceSource == null) {
-            throw new IllegalArgumentException("A reference is required for CRAM readers");
-        }
-
-        this.cramFile = cramFile;
-        this.inputStream = inputStream;
-        this.referenceSource = referenceSource;
-        getIterator();
-    }
-
-    /**
-     * Create a CRAMFileReader from a file and optional index file using the supplied reference source. If index file
-     * is supplied then random access will be available.
-     *
-     * @param cramFile        CRAM file to read. May not be null.
-     * @param indexFile       index file to be used for random access. May be null.
-     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
-     *                        reference sequences. May not be null.
-     * @throws IllegalArgumentException if the {@code cramFile} or the {@code CRAMReferenceSource} is null
-     */
-    public CRAMFileReader(final File cramFile, final File indexFile,
-                          final CRAMReferenceSource referenceSource) {
-        if (cramFile == null)
-            throw new IllegalArgumentException("File is required.");
-        if (referenceSource == null) {
-            throw new IllegalArgumentException("A reference is required for CRAM readers");
-        }
-
-        this.cramFile = cramFile;
-        this.mIndexFile = indexFile;
-        this.referenceSource = referenceSource;
-
-        getIterator();
-    }
-
-    /**
-     * Create a CRAMFileReader from a file using the supplied reference source.
-     *
-     * @param cramFile        CRAM file to read. Can not be null.
-     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
-     *                        reference sequences. May not be null.
-     * @throws IllegalArgumentException if the {@code cramFile} or the {@code CRAMReferenceSource} is null
-     */
-    public CRAMFileReader(final File cramFile, final CRAMReferenceSource referenceSource) {
-        if (cramFile == null)
-            throw new IllegalArgumentException("CRAM file cannot be null.");
-        if (referenceSource == null) {
-            throw new IllegalArgumentException("A reference is required for CRAM readers");
-        }
-
-        this.cramFile = cramFile;
-        this.referenceSource = referenceSource;
-
-        getIterator();
-    }
-
-    /**
-     * Create a CRAMFileReader from an input stream and optional index stream using the supplied reference
-     * source and validation stringency.
-     *
-     * @param inputStream      CRAM stream to read. May not be null.
-     * @param indexInputStream index stream to be used for random access. May be null.
-     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
-     *                        reference sequences. May not be null.
-     * @param validationStringency Validation stringency to be used when reading
-     *
-     * @throws IllegalArgumentException if the {@code inputStream} or the {@code CRAMReferenceSource} is null
-     */
-    public CRAMFileReader(final InputStream inputStream, final SeekableStream indexInputStream,
-                          final CRAMReferenceSource referenceSource, final ValidationStringency validationStringency) throws IOException {
-        if (inputStream == null) {
-            throw new IllegalArgumentException("Input stream can not be null for CRAM reader");
-        }
-        if (referenceSource == null) {
-            throw new IllegalArgumentException("A reference is required for CRAM readers");
-        }
-
-        this.inputStream = inputStream;
-        this.referenceSource = referenceSource;
-        this.validationStringency = validationStringency;
-
-        iterator = new CRAMIterator(inputStream, referenceSource, validationStringency);
-        if (indexInputStream != null) {
-            try {
-                mIndex = new CachingBAMFileIndex(indexInputStream, iterator.getSAMFileHeader().getSequenceDictionary());
-            } catch (Exception e) {
-                // try CRAI instead:
-                indexInputStream.seek(0);
-                final SeekableStream baiStream = CRAIIndex.openCraiFileAsBaiStream(indexInputStream, iterator.getSAMFileHeader().getSequenceDictionary());
-                mIndex = new CachingBAMFileIndex(baiStream, iterator.getSAMFileHeader().getSequenceDictionary());
-            }
-        }
-    }
-
-    /**
-     * Create a CRAMFileReader from an input stream and optional index file using the supplied reference
-     * source and validation stringency.
-     *
-     * @param stream            CRAM stream to read. May not be null.
-     * @param indexFile         index file to be used for random access. May be null.
-     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
-     *                        reference sequences. May not be null.
-     * @param validationStringency Validation stringency to be used when reading
-     *
-     * @throws IllegalArgumentException if the {@code inputStream} or the {@code CRAMReferenceSource} is null
-     */
-    public CRAMFileReader(final InputStream stream,
-                          final File indexFile, final CRAMReferenceSource referenceSource,
-                          final ValidationStringency validationStringency) throws IOException {
-        this(stream, indexFile == null ? null: new SeekableFileStream(indexFile), referenceSource, validationStringency);
-    }
-
-    /**
-     * Create a CRAMFileReader from a CRAM file and optional index file using the supplied reference
-     * source and validation stringency.
-     *
-     * @param cramFile        CRAM stream to read. May not be null.
-     * @param indexFile       index file to be used for random access. May be null.
-     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
-     *                        reference sequences. May not be null.
-     * @param validationStringency Validation stringency to be used when reading
-     *
-     * @throws IllegalArgumentException if the {@code cramFile} or the {@code CRAMReferenceSource} is null
-     */
-    public CRAMFileReader(final File cramFile,
-                          final File indexFile, final CRAMReferenceSource referenceSource,
-                          final ValidationStringency validationStringency) throws IOException {
-        this(new FileInputStream(cramFile), indexFile, referenceSource, validationStringency);
-        this.cramFile = cramFile;
-    }
-
-    @Override
-    void enableIndexCaching(final boolean enabled) {
-        // relevant to BAI only
-        mEnableIndexCaching = enabled;
-    }
-
-    @Override
-    void enableIndexMemoryMapping(final boolean enabled) {
-        // relevant to BAI only
-        mEnableIndexMemoryMapping = enabled;
-    }
-
-    @Override
-    void enableCrcChecking(final boolean enabled) {
-        // inapplicable to CRAM: do nothing
-    }
-
-    @Override
-    void setSAMRecordFactory(final SAMRecordFactory factory) {
-    }
-
-    @Override
-    public boolean hasIndex() {
-        return mIndex != null || mIndexFile != null;
-    }
-
-    @Override
-    public BAMIndex getIndex() {
-        if (!hasIndex())
-            throw new SAMException("No index is available for this BAM file.");
-        if (mIndex == null) {
-            final SAMSequenceDictionary dictionary = getFileHeader()
-                    .getSequenceDictionary();
-            if (mIndexFile.getName().endsWith(BAMIndex.BAMIndexSuffix)) {
-                mIndex = mEnableIndexCaching ? new CachingBAMFileIndex(mIndexFile,
-                        dictionary, mEnableIndexMemoryMapping)
-                        : new DiskBasedBAMFileIndex(mIndexFile, dictionary,
-                        mEnableIndexMemoryMapping);
-                return mIndex;
-            }
-
-            if (!mIndexFile.getName().endsWith(CRAIIndex.CRAI_INDEX_SUFFIX)) return null;
-            // convert CRAI into BAI:
-            final SeekableStream baiStream;
-            try {
-                baiStream = CRAIIndex.openCraiFileAsBaiStream(mIndexFile, iterator.getSAMFileHeader().getSequenceDictionary());
-            } catch (IOException e) {
-                throw new RuntimeException(e);
-            }
-
-            mIndex = mEnableIndexCaching ? new CachingBAMFileIndex(baiStream, getFileHeader().getSequenceDictionary()) :
-                    new DiskBasedBAMFileIndex(baiStream, getFileHeader().getSequenceDictionary());
-        }
-        return mIndex;
-    }
-
-    @Override
-    public boolean hasBrowseableIndex() {
-        return false;
-    }
-
-    @Override
-    public BrowseableBAMIndex getBrowseableIndex() {
-        return null;
-    }
-
-    @Override
-    public SAMRecordIterator iterator(final SAMFileSpan fileSpan) {
-        // get the file coordinates for the span:
-        final long[] coordinateArray = ((BAMFileSpan) fileSpan).toCoordinateArray();
-        if (coordinateArray == null || coordinateArray.length == 0) return emptyIterator;
-        try {
-            // create an input stream that reads the source cram stream only within the coordinate pairs:
-            final SeekableStream seekableStream = getSeekableStreamOrFailWithRTE();
-            return new CRAMIterator(seekableStream, referenceSource, coordinateArray, validationStringency);
-        } catch (final IOException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    @Override
-    public SAMFileHeader getFileHeader() {
-        return iterator.getSAMFileHeader();
-    }
-
-    @Override
-    public SAMRecordIterator getIterator() {
-        if (iterator != null && cramFile == null)
-            return iterator;
-        try {
-            final CRAMIterator newIterator;
-            if (cramFile != null) {
-                newIterator = new CRAMIterator(new FileInputStream(cramFile),
-                        referenceSource, validationStringency);
-            } else
-                newIterator = new CRAMIterator(inputStream, referenceSource, validationStringency);
-
-            iterator = newIterator;
-            return iterator;
-        } catch (final Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> getIterator(final SAMFileSpan fileSpan) {
-        return iterator(fileSpan);
-    }
-
-    @Override
-    public SAMFileSpan getFilePointerSpanningReads() {
-        return new BAMFileSpan(new Chunk(iterator.firstContainerOffset << 16, Long.MAX_VALUE));
-    }
-
-    private static final SAMRecordIterator emptyIterator = new SAMRecordIterator() {
-
-        @Override
-        public boolean hasNext() {
-            return false;
-        }
-
-        @Override
-        public SAMRecord next() {
-            throw new RuntimeException("No records.");
-        }
-
-        @Override
-        public void remove() {
-            throw new RuntimeException("Remove not supported.");
-        }
-
-        @Override
-        public void close() {
-        }
-
-        @Override
-        public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
-            return this;
-        }
-    };
-
-    @Override
-    public CloseableIterator<SAMRecord> queryAlignmentStart(final String sequence,
-                                                            final int start) {
-        long[] filePointers = null;
-
-        // Hit the index to determine the chunk boundaries for the required data.
-        final SAMFileHeader fileHeader = getFileHeader();
-        final int referenceIndex = fileHeader.getSequenceIndex(sequence);
-        if (referenceIndex != -1) {
-            final BAMIndex fileIndex = getIndex();
-            final BAMFileSpan fileSpan = fileIndex.getSpanOverlapping(
-                    referenceIndex, start, -1);
-            filePointers = fileSpan != null ? fileSpan.toCoordinateArray()
-                    : null;
-        }
-
-        if (filePointers == null || filePointers.length == 0)
-            return emptyIterator;
-
-        Container container;
-        final SeekableStream seekableStream = getSeekableStreamOrFailWithRTE();
-        for (int i = 0; i < filePointers.length; i += 2) {
-            final long containerOffset = filePointers[i] >>> 16;
-
-            try {
-                seekableStream.seek(containerOffset);
-                iterator.nextContainer();
-
-                if (iterator.jumpWithinContainerToPos(fileHeader.getSequenceIndex(sequence), start)) {
-                    return new IntervalIterator(iterator, new QueryInterval(referenceIndex, start, -1));
-                }
-            } catch (final IOException e) {
-                throw new RuntimeIOException(e);
-            } catch (IllegalAccessException e) {
-                throw new SAMException(e);
-            }
-        }
-        throw new SAMException("Failed to query alignment start: " + sequence + " at " + start);
-    }
-
-    CloseableIterator<SAMRecord> query(final int referenceIndex,
-                                       final int start, final int end, final boolean overlap) throws IOException {
-        long[] filePointers = null;
-
-        // Hit the index to determine the chunk boundaries for the required data.
-        if (referenceIndex != -1) {
-            final BAMIndex fileIndex = getIndex();
-            final BAMFileSpan fileSpan = fileIndex.getSpanOverlapping(
-                    referenceIndex, start, -1);
-            filePointers = fileSpan != null ? fileSpan.toCoordinateArray()
-                    : null;
-        }
-
-        if (filePointers == null || filePointers.length == 0)
-            return emptyIterator;
-
-        final CRAMIterator newIterator = new CRAMIterator(getSeekableStreamOrFailWithRTE(), referenceSource, filePointers, validationStringency);
-        return new IntervalIterator(newIterator, new QueryInterval(referenceIndex, start, end), overlap);
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> queryUnmapped() {
-        final long startOfLastLinearBin = getIndex().getStartOfLastLinearBin();
-
-        final SeekableStream seekableStream = getSeekableStreamOrFailWithRTE();
-        final CRAMIterator newIterator;
-        try {
-            seekableStream.seek(0);
-            newIterator = new CRAMIterator(seekableStream, referenceSource, validationStringency);
-            seekableStream.seek(startOfLastLinearBin >>> 16);
-            final Container container = ContainerIO.readContainerHeader(newIterator.getCramHeader().getVersion().major, seekableStream);
-            seekableStream.seek(seekableStream.position() + container.containerByteSize);
-            iterator = newIterator;
-            iterator.jumpWithinContainerToPos(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX, SAMRecord.NO_ALIGNMENT_START);
-        } catch (final IOException e) {
-            throw new RuntimeEOFException(e);
-        }
-
-        return iterator;
-    }
-
-    private SeekableStream getSeekableStreamOrFailWithRTE() {
-        SeekableStream seekableStream = null;
-        if (cramFile != null) {
-            try {
-                seekableStream = new SeekableFileStream(cramFile);
-            } catch (final FileNotFoundException e) {
-                throw new RuntimeException(e);
-            }
-        } else if (inputStream instanceof SeekableStream) {
-            seekableStream = (SeekableStream) inputStream;
-        }
-        return seekableStream;
-    }
-
-    @Override
-    public void close() {
-        CloserUtil.close(iterator);
-        CloserUtil.close(inputStream);
-        CloserUtil.close(mIndex);
-    }
-
-    @Override
-    void setValidationStringency(final ValidationStringency validationStringency) {
-        this.validationStringency = validationStringency;
-        if (iterator != null) iterator.setValidationStringency(validationStringency);
-    }
-
-    @Override
-    public ValidationStringency getValidationStringency() {
-        return validationStringency;
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> query(final QueryInterval[] intervals,
-                                              final boolean contained) {
-        return new MultiIntervalIterator(Arrays.asList(intervals).iterator(), !contained);
-    }
-
-    @Override
-    public Type type() {
-        return Type.CRAM_TYPE;
-    }
-
-    @Override
-    void enableFileSource(final SamReader reader, final boolean enabled) {
-        if (iterator != null)
-            iterator.setFileSource(enabled ? reader : null);
-    }
-
-    private class MultiIntervalIterator implements SAMRecordIterator {
-        private final Iterator<QueryInterval> queries;
-        private CloseableIterator<SAMRecord> iterator;
-        private final boolean overlap;
-
-        public MultiIntervalIterator(final Iterator<QueryInterval> queries, final boolean overlap) {
-            this.queries = queries;
-            this.overlap = overlap;
-        }
-
-        @Override
-        public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
-            return null;
-        }
-
-        @Override
-        public void close() {
-
-        }
-
-        @Override
-        public boolean hasNext() {
-            if (iterator == null || !iterator.hasNext()) {
-                if (!queries.hasNext()) return false;
-                do {
-                    final QueryInterval query = queries.next();
-                    try {
-                        iterator = query(query.referenceIndex, query.start, query.end, overlap);
-                    } catch (final IOException e) {
-                        throw new RuntimeException(e);
-                    }
-                } while (!iterator.hasNext() && queries.hasNext());
-            }
-            return iterator.hasNext();
-        }
-
-        @Override
-        public SAMRecord next() {
-            return iterator.next();
-        }
-
-        @Override
-        public void remove() {
-            iterator.remove();
-        }
-    }
-
-    public static class IntervalIterator implements SAMRecordIterator {
-        private final CloseableIterator<SAMRecord> delegate;
-        private final QueryInterval interval;
-        private SAMRecord next;
-        private boolean noMore = false;
-        private final boolean overlap;
-
-        public IntervalIterator(final CloseableIterator<SAMRecord> delegate, final QueryInterval interval) {
-            this(delegate, interval, true);
-        }
-
-        public IntervalIterator(final CloseableIterator<SAMRecord> delegate, final QueryInterval interval, final boolean overlap) {
-            this.delegate = delegate;
-            this.interval = interval;
-            this.overlap = overlap;
-        }
-
-        @Override
-        public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
-            return null;
-        }
-
-        @Override
-        public void close() {
-            delegate.close();
-        }
-
-        @Override
-        public boolean hasNext() {
-            if (next != null) return true;
-            if (noMore) return false;
-
-            while (delegate.hasNext()) {
-                next = delegate.next();
-
-                if (isWithinTheInterval(next)) break;
-                if (isBeyondTheInterval(next)) {
-                    next = null;
-                    noMore = true;
-                    return false;
-                }
-                next = null;
-            }
-
-            return next != null;
-        }
-
-        boolean isWithinTheInterval(final SAMRecord record) {
-            final boolean refMatch = record.getReferenceIndex() == interval.referenceIndex;
-            if (interval.start == -1) return refMatch;
-            if (!refMatch) return false;
-
-            final int start = record.getAlignmentStart();
-            final int end = record.getAlignmentEnd();
-            if (overlap) {
-                return CoordMath.overlaps(start, end, interval.start, interval.end < 0 ? Integer.MAX_VALUE : interval.end);
-            } else {
-                // contained:
-                return CoordMath.encloses(interval.start, interval.end < 0 ? Integer.MAX_VALUE : interval.end, start, end);
-            }
-
-        }
-
-        boolean isBeyondTheInterval(final SAMRecord record) {
-            if (record.getReadUnmappedFlag()) return false;
-            if (record.getReferenceIndex() > interval.referenceIndex) return true;
-            if (record.getReferenceIndex() != interval.referenceIndex) return false;
-
-            return interval.end != -1 && record.getAlignmentStart() > interval.end;
-
-        }
-
-        @Override
-        public SAMRecord next() {
-            final SAMRecord result = next;
-            next = null;
-            return result;
-        }
-
-        @Override
-        public void remove() {
-            throw new RuntimeException("Not available.");
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/CRAMIterator.java b/src/java/htsjdk/samtools/CRAMIterator.java
deleted file mode 100644
index 3504736..0000000
--- a/src/java/htsjdk/samtools/CRAMIterator.java
+++ /dev/null
@@ -1,306 +0,0 @@
-/*******************************************************************************
- * Copyright 2013 EMBL-EBI
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License countingInputStream distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- ******************************************************************************/
-package htsjdk.samtools;
-
-import htsjdk.samtools.SAMFileHeader.SortOrder;
-import htsjdk.samtools.cram.build.ContainerParser;
-import htsjdk.samtools.cram.build.Cram2SamRecordFactory;
-import htsjdk.samtools.cram.build.CramContainerIterator;
-import htsjdk.samtools.cram.build.CramNormalizer;
-import htsjdk.samtools.cram.build.CramSpanContainerIterator;
-import htsjdk.samtools.cram.io.CountingInputStream;
-import htsjdk.samtools.cram.ref.CRAMReferenceSource;
-import htsjdk.samtools.cram.structure.Container;
-import htsjdk.samtools.cram.structure.ContainerIO;
-import htsjdk.samtools.cram.structure.CramCompressionRecord;
-import htsjdk.samtools.cram.structure.CramHeader;
-import htsjdk.samtools.cram.structure.Slice;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.Log;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-import htsjdk.samtools.cram.CRAMException;
-
-public class CRAMIterator implements SAMRecordIterator {
-    private static final Log log = Log.getInstance(CRAMIterator.class);
-    private final CountingInputStream countingInputStream;
-    private CramHeader cramHeader;
-    private ArrayList<SAMRecord> records;
-    private SAMRecord nextRecord = null;
-    private CramNormalizer normalizer;
-    private byte[] refs;
-    private int prevSeqId = SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX;
-    public Container container;
-    private SamReader mReader;
-    long firstContainerOffset = 0;
-    private Iterator<Container> containerIterator;
-
-    private ContainerParser parser;
-    private final CRAMReferenceSource referenceSource;
-
-    private Iterator<SAMRecord> iterator = Collections.<SAMRecord>emptyList().iterator();
-
-    private ValidationStringency validationStringency = ValidationStringency.DEFAULT_STRINGENCY;
-
-    public ValidationStringency getValidationStringency() {
-        return validationStringency;
-    }
-
-    public void setValidationStringency(
-            final ValidationStringency validationStringency) {
-        this.validationStringency = validationStringency;
-    }
-
-    private long samRecordIndex;
-    private ArrayList<CramCompressionRecord> cramRecords;
-
-    public CRAMIterator(final InputStream inputStream, final CRAMReferenceSource referenceSource, final ValidationStringency validationStringency)
-            throws IOException {
-        if (null == referenceSource) {
-            throw new CRAMException("A reference source is required for CRAM files");
-        }
-        this.countingInputStream = new CountingInputStream(inputStream);
-        this.referenceSource = referenceSource;
-        this.validationStringency = validationStringency;
-        final CramContainerIterator containerIterator = new CramContainerIterator(this.countingInputStream);
-        cramHeader = containerIterator.getCramHeader();
-        this.containerIterator = containerIterator;
-
-        firstContainerOffset = this.countingInputStream.getCount();
-        records = new ArrayList<SAMRecord>(10000);
-        normalizer = new CramNormalizer(cramHeader.getSamFileHeader(),
-                referenceSource);
-        parser = new ContainerParser(cramHeader.getSamFileHeader());
-    }
-
-    public CRAMIterator(final SeekableStream seekableStream, final CRAMReferenceSource referenceSource, final long[] coordinates, final ValidationStringency validationStringency)
-            throws IOException {
-        if (null == referenceSource) {
-            throw new CRAMException("A reference source is required for CRAM files");
-        }
-        this.countingInputStream = new CountingInputStream(seekableStream);
-        this.referenceSource = referenceSource;
-        this.validationStringency = validationStringency;
-        final CramSpanContainerIterator containerIterator = CramSpanContainerIterator.fromFileSpan(seekableStream, coordinates);
-        cramHeader = containerIterator.getCramHeader();
-        this.containerIterator = containerIterator;
-
-        firstContainerOffset = containerIterator.getFirstContainerOffset();
-        records = new ArrayList<SAMRecord>(10000);
-        normalizer = new CramNormalizer(cramHeader.getSamFileHeader(),
-                referenceSource);
-        parser = new ContainerParser(cramHeader.getSamFileHeader());
-    }
-
-    @Deprecated
-    public CRAMIterator(final SeekableStream seekableStream, final CRAMReferenceSource referenceSource, final long[] coordinates)
-            throws IOException {
-        this(seekableStream, referenceSource, coordinates, ValidationStringency.DEFAULT_STRINGENCY);
-    }
-
-    public CramHeader getCramHeader() {
-        return cramHeader;
-    }
-
-    void nextContainer() throws IOException, IllegalArgumentException,
-            IllegalAccessException, CRAMException {
-
-        if (containerIterator != null) {
-            if (!containerIterator.hasNext()) {
-                records.clear();
-                nextRecord = null;
-                return;
-            }
-            container = containerIterator.next();
-            if (container.isEOF()) {
-                records.clear();
-                nextRecord = null;
-                return;
-            }
-        } else {
-            container = ContainerIO.readContainer(cramHeader.getVersion(), countingInputStream);
-            if (container.isEOF()) {
-                records.clear();
-                nextRecord = null;
-                return;
-            }
-        }
-
-        if (records == null)
-            records = new ArrayList<SAMRecord>(container.nofRecords);
-        else
-            records.clear();
-        if (cramRecords == null)
-            cramRecords = new ArrayList<CramCompressionRecord>(container.nofRecords);
-        else
-            cramRecords.clear();
-
-        parser.getRecords(container, cramRecords, validationStringency);
-
-        if (container.sequenceId == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
-            refs = new byte[]{};
-            prevSeqId = SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX;
-        } else if (container.sequenceId == Slice.MULTI_REFERENCE) {
-            refs = null;
-            prevSeqId = Slice.MULTI_REFERENCE;
-        } else if (prevSeqId < 0 || prevSeqId != container.sequenceId) {
-            final SAMSequenceRecord sequence = cramHeader.getSamFileHeader()
-                    .getSequence(container.sequenceId);
-            refs = referenceSource.getReferenceBases(sequence, true);
-            if (refs == null) {
-                throw new CRAMException(String.format("Contig %s not found in the reference file.", sequence.getSequenceName()));
-            }
-            prevSeqId = container.sequenceId;
-        }
-
-        for (int i = 0; i < container.slices.length; i++) {
-            final Slice slice = container.slices[i];
-            if (slice.sequenceId < 0)
-                continue;
-            if (validationStringency != ValidationStringency.SILENT && !slice.validateRefMD5(refs)) {
-                log.error(String
-                        .format("Reference sequence MD5 mismatch for slice: seq id %d, start %d, span %d, expected MD5 %s", slice.sequenceId,
-                                slice.alignmentStart, slice.alignmentSpan, String.format("%032x", new BigInteger(1, slice.refMD5))));
-            }
-        }
-
-        normalizer.normalize(cramRecords, refs, 0,
-                container.header.substitutionMatrix);
-
-        final Cram2SamRecordFactory cramToSamRecordFactory = new Cram2SamRecordFactory(
-                cramHeader.getSamFileHeader());
-
-        for (final CramCompressionRecord cramRecord : cramRecords) {
-            final SAMRecord samRecord = cramToSamRecordFactory.create(cramRecord);
-            if (!cramRecord.isSegmentUnmapped()) {
-                final SAMSequenceRecord sequence = cramHeader.getSamFileHeader()
-                        .getSequence(cramRecord.sequenceId);
-                refs = referenceSource.getReferenceBases(sequence, true);
-            }
-
-            samRecord.setValidationStringency(validationStringency);
-
-            if (validationStringency != ValidationStringency.SILENT) {
-                final List<SAMValidationError> validationErrors = samRecord.isValid();
-                SAMUtils.processValidationErrors(validationErrors,
-                        samRecordIndex, validationStringency);
-            }
-
-            if (mReader != null) {
-                final long chunkStart = (container.offset << 16) | cramRecord.sliceIndex;
-                final long chunkEnd = ((container.offset << 16) | cramRecord.sliceIndex) + 1;
-                nextRecord.setFileSource(new SAMFileSource(mReader,
-                        new BAMFileSpan(new Chunk(chunkStart, chunkEnd))));
-            }
-
-            records.add(samRecord);
-            samRecordIndex++;
-        }
-        cramRecords.clear();
-        iterator = records.iterator();
-    }
-
-    /**
-     * Skip cached records until given alignment start position.
-     *
-     * @param refIndex reference sequence index
-     * @param pos      alignment start to skip to
-     */
-    public boolean jumpWithinContainerToPos(final int refIndex, final int pos) {
-        if (!hasNext()) return false;
-        int i = 0;
-        for (final SAMRecord record : records) {
-            if (refIndex != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && record.getReferenceIndex() != refIndex) continue;
-
-            if (pos <= 0) {
-                if (record.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START) {
-                    iterator = records.listIterator(i);
-                    return true;
-                }
-            } else {
-                if (record.getAlignmentStart() >= pos) {
-                    iterator = records.listIterator(i);
-                    return true;
-                }
-            }
-            i++;
-        }
-        iterator = Collections.<SAMRecord>emptyList().iterator();
-        return false;
-    }
-
-    @Override
-    public boolean hasNext() {
-        if (container != null && container.isEOF()) return false;
-        if (!iterator.hasNext()) {
-            try {
-                nextContainer();
-            } catch (IOException e) {
-                throw new SAMException(e);
-            } catch (IllegalAccessException e) {
-                throw new SAMException(e);
-            }
-        }
-
-        return !records.isEmpty();
-    }
-
-    @Override
-    public SAMRecord next() {
-        return iterator.next();
-    }
-
-    @Override
-    public void remove() {
-        throw new RuntimeException("Removal of records not implemented.");
-    }
-
-    @Override
-    public void close() {
-        records.clear();
-        //noinspection EmptyCatchBlock
-        try {
-            if (countingInputStream != null)
-                countingInputStream.close();
-        } catch (final IOException e) {
-        }
-    }
-
-    @Override
-    public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
-        throw new RuntimeException("Not implemented.");
-    }
-
-    public SamReader getFileSource() {
-        return mReader;
-    }
-
-    public void setFileSource(final SamReader mReader) {
-        this.mReader = mReader;
-    }
-
-    public SAMFileHeader getSAMFileHeader() {
-        return cramHeader.getSamFileHeader();
-    }
-
-}
diff --git a/src/java/htsjdk/samtools/Defaults.java b/src/java/htsjdk/samtools/Defaults.java
deleted file mode 100644
index e6a2e13..0000000
--- a/src/java/htsjdk/samtools/Defaults.java
+++ /dev/null
@@ -1,186 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.Log;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-/**
- * Embodies defaults for global values that affect how the SAM JDK operates. Defaults are encoded in the class
- * and are also overridable using system properties.
- *
- * @author Tim Fennell
- */
-public class Defaults {
-    private static Log log = Log.getInstance(Defaults.class);
-    
-    /** Should BAM index files be created when writing out coordinate sorted BAM files?  Default = false. */
-    public static final boolean CREATE_INDEX;
-
-    /** Should MD5 files be created when writing out SAM and BAM files?  Default = false. */
-    public static final boolean CREATE_MD5;
-
-    /** Should asynchronous I/O be used where supported throughout all of htsjdk (one thread per file).
-     *  Note: this option takes precedence over {@link #USE_ASYNC_IO_FOR_SAMTOOLS} and {@link #USE_ASYNC_IO_FOR_TRIBBLE}.
-     *  Default = false.
-     */
-    public static final boolean USE_ASYNC_IO;
-
-    /** Should asynchronous I/O be used where supported by the samtools package (one thread per file).
-     *  Note: The {@link #USE_ASYNC_IO} option takes precedence over this option.
-     *  Default = false.
-     */
-    public static final boolean USE_ASYNC_IO_FOR_SAMTOOLS;
-
-    /** Should asynchronous I/O be used where supported by the tribble package (one thread per file).
-     *  Note: performance may depend on the characteristics of the input file (eg number of samples in the VCF) and should be tested on a case-by-case basis.
-     *  In particular, asynchronous reading of VCF files with few samples is known to perform worse than synchronous reading.
-     *  Note: The {@link #USE_ASYNC_IO} option takes precedence over this option.
-     *  Default = false.
-     */
-    public static final boolean USE_ASYNC_IO_FOR_TRIBBLE;
-
-    /** Compresion level to be used for writing BAM and other block-compressed outputs.  Default = 5. */
-    public static final int COMPRESSION_LEVEL;
-
-    /** Buffer size, in bytes, used whenever reading/writing files or streams.  Default = 128k. */
-    public static final int BUFFER_SIZE;
-
-    /**
-     * Even if BUFFER_SIZE is 0, this is guaranteed to be non-zero.  If BUFFER_SIZE is non-zero,
-     * this == BUFFER_SIZE
-     */
-    public static final int NON_ZERO_BUFFER_SIZE;
-
-    /** Should BlockCompressedOutputStream attempt to load libIntelDeflater? */
-    public static final boolean TRY_USE_INTEL_DEFLATER;
-
-    /**
-     * Path to libIntelDeflater.so.  If this is not set, the library is looked for in the directory
-     * where the executable jar lives.
-     */
-    public static final String INTEL_DEFLATER_SHARED_LIBRARY_PATH;
-
-    /**
-     * The reference FASTA file.  If this is not set, the file is null.  This file may be required for reading
-     * writing SAM files (ex. CRAM).
-     */
-    public static final File REFERENCE_FASTA;
-
-    /** Custom reader factory able to handle URL based resources like ga4gh.
-     *  Expected format: <url prefix>,<fully qualified factory class name>[,<jar file name>]
-     *  E.g. https://www.googleapis.com/genomics/v1beta/reads/,com.google.genomics.ReaderFactory
-     *  OR https://www.googleapis.com/genomics/v1beta/reads/,com.google.genomics.ReaderFactory,/tmp/genomics.jar
-     */
-    public static final String CUSTOM_READER_FACTORY;
-
-    /**
-     * Boolean describing whether downloading a reference file is allowed (for CRAM files),
-     * in case the reference file is not specified by the user
-     * Enabling this is not necessarily a good idea, since this process often fails
-     */
-    public static final boolean USE_CRAM_REF_DOWNLOAD;
-
-    /**
-     * A mask (pattern) to use when building EBI reference service URL for a
-     * given MD5 checksum. Must contain one and only one string placeholder.
-     */
-    public static final String EBI_REFERENCE_SEVICE_URL_MASK;
-
-
-    static {
-        CREATE_INDEX = getBooleanProperty("create_index", false);
-        CREATE_MD5 = getBooleanProperty("create_md5", false);
-        if (hasProperty("use_async_io")){
-            USE_ASYNC_IO = getBooleanProperty("use_async_io", false);
-            USE_ASYNC_IO_FOR_SAMTOOLS = USE_ASYNC_IO;
-            USE_ASYNC_IO_FOR_TRIBBLE = USE_ASYNC_IO;
-        } else {
-            USE_ASYNC_IO = false;
-            USE_ASYNC_IO_FOR_SAMTOOLS = getBooleanProperty("use_async_io_samtools", false);
-            USE_ASYNC_IO_FOR_TRIBBLE = getBooleanProperty("use_async_io_tribble", false);
-        }
-        COMPRESSION_LEVEL = getIntProperty("compression_level", 5);
-        BUFFER_SIZE = getIntProperty("buffer_size", 1024 * 128);
-        TRY_USE_INTEL_DEFLATER = getBooleanProperty("try_use_intel_deflater", true);
-        INTEL_DEFLATER_SHARED_LIBRARY_PATH = getStringProperty("intel_deflater_so_path", null);
-        if (BUFFER_SIZE == 0) {
-            NON_ZERO_BUFFER_SIZE = 1024 * 128;
-        } else {
-            NON_ZERO_BUFFER_SIZE = BUFFER_SIZE;
-        }
-        REFERENCE_FASTA = getFileProperty("reference_fasta", null);
-        USE_CRAM_REF_DOWNLOAD = getBooleanProperty("use_cram_ref_download", false);
-        EBI_REFERENCE_SEVICE_URL_MASK = "http://www.ebi.ac.uk/ena/cram/md5/%s";
-        CUSTOM_READER_FACTORY = getStringProperty("custom_reader", "");
-    }
-
-    /**
-     * Returns a map of all default values (keys are names), lexicographically sorted by keys.
-     * The returned map is unmodifiable.
-     * This function is useful for example when logging all defaults.
-     */
-    public static SortedMap<String, Object> allDefaults(){
-        final SortedMap<String, Object> result = new TreeMap<>();
-        result.put("CREATE_INDEX", CREATE_INDEX);
-        result.put("CREATE_MD5", CREATE_MD5);
-        result.put("USE_ASYNC_IO", USE_ASYNC_IO);
-        result.put("USE_ASYNC_IO_FOR_SAMTOOLS", USE_ASYNC_IO_FOR_SAMTOOLS);
-        result.put("USE_ASYNC_IO_FOR_TRIBBLE", USE_ASYNC_IO_FOR_TRIBBLE);
-        result.put("COMPRESSION_LEVEL", COMPRESSION_LEVEL);
-        result.put("BUFFER_SIZE", BUFFER_SIZE);
-        result.put("TRY_USE_INTEL_DEFLATER", TRY_USE_INTEL_DEFLATER);
-        result.put("INTEL_DEFLATER_SHARED_LIBRARY_PATH", INTEL_DEFLATER_SHARED_LIBRARY_PATH);
-        result.put("NON_ZERO_BUFFER_SIZE", NON_ZERO_BUFFER_SIZE);
-        result.put("REFERENCE_FASTA", REFERENCE_FASTA);
-        result.put("USE_CRAM_REF_DOWNLOAD", USE_CRAM_REF_DOWNLOAD);
-        result.put("EBI_REFERENCE_SEVICE_URL_MASK", EBI_REFERENCE_SEVICE_URL_MASK);
-        result.put("CUSTOM_READER_FACTORY", CUSTOM_READER_FACTORY);
-        return Collections.unmodifiableSortedMap(result);
-    }
-
-    /** Gets a string system property, prefixed with "samjdk." using the default 
-     * if the property does not exist or if the java.security manager raises an exception for
-     * applications started with  -Djava.security.manager  . */
-    private static String getStringProperty(final String name, final String def) {
-        try {
-            return System.getProperty("samjdk." + name, def);
-        } catch (final java.security.AccessControlException error) {
-            log.warn(error,"java Security Manager forbids 'System.getProperty(\"" + name + "\")' , returning default value: " + def );
-            return def;
-        }
-    }
-
-    /** Checks whether a string system property, prefixed with "samjdk.", exists.
-     * If the property does not exist or if the java.security manager raises an exception for
-     * applications started with  -Djava.security.manager  this method returns false. */
-    private static boolean hasProperty(final String name){
-        try {
-            return null != System.getProperty("samjdk." + name);
-        } catch (final java.security.AccessControlException error) {
-            log.warn(error,"java Security Manager forbids 'System.getProperty(\"" + name + "\")' , returning false");
-            return false;
-        }
-    }
-
-    /** Gets a boolean system property, prefixed with "samjdk." using the default if the property does not exist. */
-    private static boolean getBooleanProperty(final String name, final boolean def) {
-        final String value = getStringProperty(name, Boolean.toString(def));
-        return Boolean.parseBoolean(value);
-    }
-
-    /** Gets an int system property, prefixed with "samjdk." using the default if the property does not exist. */
-    private static int getIntProperty(final String name, final int def) {
-        final String value = getStringProperty(name, Integer.toString(def));
-        return Integer.parseInt(value);
-    }
-
-    /** Gets a File system property, prefixed with "samdjk." using the default if the property does not exist. */
-    private static File getFileProperty(final String name, final String def) {
-        final String value = getStringProperty(name, def);
-        // TODO: assert that it is readable
-        return (null == value) ? null : new File(value);
-    }
-}
diff --git a/src/java/htsjdk/samtools/SAMFileReader.java b/src/java/htsjdk/samtools/SAMFileReader.java
deleted file mode 100644
index 6e0e9b8..0000000
--- a/src/java/htsjdk/samtools/SAMFileReader.java
+++ /dev/null
@@ -1,751 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-
-import htsjdk.samtools.seekablestream.SeekableBufferedStream;
-import htsjdk.samtools.seekablestream.SeekableHTTPStream;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.*;
-
-import java.io.*;
-import java.net.URL;
-import java.util.NoSuchElementException;
-import java.util.zip.GZIPInputStream;
-
-/**
- * Class for reading and querying SAM/BAM files.  Delegates to appropriate concrete implementation.
- *
- * @see SamReaderFactory
- */
- at Deprecated
-public class SAMFileReader implements SamReader, SamReader.Indexing {
-
-    private static ValidationStringency defaultValidationStringency = ValidationStringency.DEFAULT_STRINGENCY;
-
-    public static ValidationStringency getDefaultValidationStringency() {
-        return defaultValidationStringency;
-    }
-
-    /**
-     * Set validation stringency for all subsequently-created SAMFileReaders.  This is the only way to
-     * change the validation stringency for SAM header.
-     * NOTE: Programs that change this should make sure to have a try/finally clause wrapping the work that
-     * they do, so that the original stringency can be restored after the program's work is done.  This facilitates
-     * calling a program that is usually run stand-alone from another program, without messing up the original
-     * validation stringency.
-     */
-    public static void setDefaultValidationStringency(final ValidationStringency defaultValidationStringency) {
-        SAMFileReader.defaultValidationStringency = defaultValidationStringency;
-    }
-
-    /**
-     * Returns the SAMSequenceDictionary from the provided FASTA.
-     */
-    public static SAMSequenceDictionary getSequenceDictionary(final File dictionaryFile) {
-        final SAMFileReader samFileReader = new SAMFileReader(dictionaryFile);
-        final SAMSequenceDictionary dict = samFileReader.getFileHeader().getSequenceDictionary();
-        CloserUtil.close(dictionaryFile);
-        return dict;
-    }
-
-    private boolean mIsBinary = false;
-    private BAMIndex mIndex = null;
-    private SAMRecordFactory samRecordFactory = new DefaultSAMRecordFactory();
-    private ReaderImplementation mReader = null;
-    private boolean useAsyncIO = Defaults.USE_ASYNC_IO_FOR_SAMTOOLS;
-
-    private File samFile = null;
-
-    private static class EmptySamIterator implements CloseableIterator<SAMRecord> {
-        @Override
-        public boolean hasNext() {
-            return false;
-        }
-
-        @Override
-        public SAMRecord next() {
-            throw new NoSuchElementException("next called on empty iterator");
-        }
-
-        @Override
-        public void remove() {
-            throw new UnsupportedOperationException("Not supported: remove");
-        }
-
-        @Override
-        public void close() {
-            //no-op
-        }
-    }
-
-
-    /**
-     * Prepare to read a SAM or BAM file.  Indexed lookup not allowed because reading from InputStream.
-     */
-    public SAMFileReader(final InputStream stream) {
-        this(stream, false);
-    }
-
-    /**
-     * Prepare to read a SAM or BAM file.  If the given file is a BAM, and has a companion BAI index file
-     * that is named according to the convention, it will be found and opened, and indexed query will be allowed.
-     */
-    public SAMFileReader(final File file) {
-        this(file, null, false);
-    }
-
-    /**
-     * Prepare to read a SAM or BAM file.  If the given file is a BAM, and an index is present, indexed query
-     * will be allowed.
-     *
-     * @param file      SAM or BAM to read.
-     * @param indexFile Index file that is companion to BAM, or null if no index file, or if index file
-     *                  should be found automatically.
-     */
-    public SAMFileReader(final File file, final File indexFile) {
-        this(file, indexFile, false);
-    }
-
-    /**
-     * Read a SAM or BAM file.  Indexed lookup not allowed because reading from InputStream.
-     *
-     * @param stream      input SAM or BAM.  This is buffered internally so caller need not buffer.
-     * @param eagerDecode if true, decode SAM record entirely when reading it.
-     */
-    public SAMFileReader(final InputStream stream, final boolean eagerDecode) {
-        init(stream, null, null, eagerDecode, defaultValidationStringency);
-    }
-
-    /**
-     * Read a SAM or BAM file, possibly with an index file if present.
-     * If the given file is a BAM, and an index is present, indexed query will be allowed.
-     *
-     * @param file        SAM or BAM.
-     * @param eagerDecode if true, decode SAM record entirely when reading it.
-     */
-    public SAMFileReader(final File file, final boolean eagerDecode) {
-        this(file, null, eagerDecode);
-    }
-
-    /**
-     * Read a SAM or BAM file, possibly with an index file. If the given file is a BAM, and an index is present,
-     * indexed query will be allowed.
-     *
-     * @param file        SAM or BAM.
-     * @param indexFile   Location of index file, or null in order to use the default index file (if present).
-     * @param eagerDecode eagerDecode if true, decode SAM record entirely when reading it.
-     */
-    public SAMFileReader(final File file, final File indexFile, final boolean eagerDecode) {
-        init(null, file, indexFile, eagerDecode, defaultValidationStringency);
-    }
-
-    /**
-     * Read a BAM file by http
-     * indexed query will be allowed.
-     *
-     * @param url         BAM.
-     * @param indexFile   Location of index file, or null if indexed access not required.
-     * @param eagerDecode eagerDecode if true, decode SAM record entirely when reading it.
-     */
-    public SAMFileReader(final URL url, final File indexFile, final boolean eagerDecode) {
-        init(new SeekableBufferedStream(new SeekableHTTPStream(url)),
-                indexFile, eagerDecode, defaultValidationStringency);
-    }
-
-    /**
-     * Read a BAM file via caller-supplied mechanism.  Indexed query will be allowed, but
-     * index file must be provided in that case.
-     *
-     * @param strm        BAM -- If the stream is not buffered, caller should wrap in SeekableBufferedStream for
-     *                    better performance.
-     * @param indexFile   Location of index file, or null indexed access not required.
-     * @param eagerDecode if true, decode SAM record entirely when reading it.
-     */
-    public SAMFileReader(final SeekableStream strm, final File indexFile, final boolean eagerDecode) {
-        init(strm, indexFile, eagerDecode, defaultValidationStringency);
-    }
-
-    /**
-     * @param strm BAM -- If the stream is not buffered, caller should wrap in SeekableBufferedStream for
-     *             better performance.
-     */
-    public SAMFileReader(final SeekableStream strm, final SeekableStream indexStream, final boolean eagerDecode) {
-        init(strm, indexStream, eagerDecode, defaultValidationStringency);
-    }
-
-    public void close() {
-        if (mReader != null) {
-            mReader.close();
-        }
-        mReader = null;
-        mIndex = null;
-    }
-
-    /**
-     * If true, this reader will use asynchronous IO.
-     */
-    public void setUseAsyncIO(final boolean useAsyncIO) {
-        this.useAsyncIO = useAsyncIO;
-    }
-
-    /**
-     * If true, writes the source of every read into the source SAMRecords.
-     *
-     * @param enabled true to write source information into each SAMRecord.
-     */
-    public void enableFileSource(final boolean enabled) {
-        mReader.enableFileSource(this, enabled);
-    }
-
-    /**
-     * If true, uses the caching version of the index reader.
-     *
-     * @param enabled true to use the caching version of the reader.
-     */
-    public void enableIndexCaching(final boolean enabled) {
-        if (mIndex != null)
-            throw new SAMException("Unable to turn on index caching; index file has already been loaded.");
-        mReader.enableIndexCaching(enabled);
-    }
-
-    /**
-     * If false, disable the use of memory mapping for accessing index files (default behavior is to use memory mapping).
-     * This is slower but more scalable when accessing large numbers of BAM files sequentially.
-     *
-     * @param enabled True to use memory mapping, false to use regular I/O.
-     */
-    public void enableIndexMemoryMapping(final boolean enabled) {
-        if (mIndex != null) {
-            throw new SAMException("Unable to change index memory mapping; index file has already been loaded.");
-        }
-        mReader.enableIndexMemoryMapping(enabled);
-    }
-
-    /**
-     * Only meaningful for BAM file readers - enables or disables checking of checksums on uncompressed
-     * data during decompression. Enabling this will increase decompression time by 15-30%.
-     */
-    public void enableCrcChecking(final boolean enabled) {
-        this.mReader.enableCrcChecking(enabled);
-    }
-
-    /**
-     * Override the default SAMRecordFactory class used to instantiate instances of SAMRecord and BAMRecord.
-     */
-    public void setSAMRecordFactory(final SAMRecordFactory factory) {
-        this.samRecordFactory = factory;
-        this.mReader.setSAMRecordFactory(factory);
-    }
-
-    /**
-     * @return True if this is a BAM reader.
-     */
-    public boolean isBinary() {
-        return mIsBinary;
-    }
-
-    /**
-     * @return true if ths is a BAM file, and has an index
-     */
-    public boolean hasIndex() {
-        return mReader.hasIndex();
-    }
-
-    @Override
-    public Indexing indexing() {
-        return this;
-    }
-
-    /**
-     * Retrieves the index for the given file type.  Ensure that the index is of the specified type.
-     *
-     * @return An index of the given type.
-     */
-    public BAMIndex getIndex() {
-        return mReader.getIndex();
-    }
-
-    /**
-     * Returns true if the supported index is browseable, meaning the bins in it can be traversed
-     * and chunk data inspected and retrieved.
-     *
-     * @return True if the index supports the BrowseableBAMIndex interface.  False otherwise.
-     */
-    public boolean hasBrowseableIndex() {
-        return hasIndex() && getIndex() instanceof BrowseableBAMIndex;
-    }
-
-    /**
-     * Gets an index tagged with the BrowseableBAMIndex interface.  Throws an exception if no such
-     * index is available.
-     *
-     * @return An index with a browseable interface, if possible.
-     * @throws SAMException if no such index is available.
-     */
-    public BrowseableBAMIndex getBrowseableIndex() {
-        final BAMIndex index = getIndex();
-        if (!(index instanceof BrowseableBAMIndex))
-            throw new SAMException("Cannot return index: index created by BAM is not browseable.");
-        return BrowseableBAMIndex.class.cast(index);
-    }
-
-    public SAMFileHeader getFileHeader() {
-        return mReader.getFileHeader();
-    }
-
-    @Override
-    public Type type() {
-        return mReader.type();
-    }
-
-    @Override
-    public String getResourceDescription() {
-        return this.toString();
-    }
-
-    /**
-     * Control validation of SAMRecords as they are read from file.
-     * In order to control validation stringency for SAM Header, call SAMFileReader.setDefaultValidationStringency
-     * before constructing a SAMFileReader.
-     */
-    public void setValidationStringency(final ValidationStringency validationStringency) {
-        mReader.setValidationStringency(validationStringency);
-    }
-
-    /**
-     * Iterate through file in order.  For a SAMFileReader constructed from an InputStream, and for any SAM file,
-     * a 2nd iteration starts where the 1st one left off.  For a BAM constructed from a File, each new iteration
-     * starts at the first record.
-     * <p/>
-     * Only a single open iterator on a SAM or BAM file may be extant at any one time.  If you want to start
-     * a second iteration, the first one must be closed first.
-     */
-    public SAMRecordIterator iterator() {
-        return new AssertingIterator(mReader.getIterator());
-    }
-
-    /**
-     * Iterate through the given chunks in the file.
-     *
-     * @param chunks List of chunks for which to retrieve data.
-     * @return An iterator over the given chunks.
-     */
-    public SAMRecordIterator iterator(final SAMFileSpan chunks) {
-        return new AssertingIterator(mReader.getIterator(chunks));
-    }
-
-    /**
-     * Gets a pointer spanning all reads in the BAM file.
-     *
-     * @return Unbounded pointer to the first record, in chunk format.
-     */
-    public SAMFileSpan getFilePointerSpanningReads() {
-        return mReader.getFilePointerSpanningReads();
-    }
-
-    /**
-     * Iterate over records that match the given interval.  Only valid to call this if hasIndex() == true.
-     * <p/>
-     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
-     * a second iteration, the first one must be closed first.  You can use a second SAMFileReader to iterate
-     * in parallel over the same underlying file.
-     * <p/>
-     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
-     * and then discarded because they do not match the interval of interest.
-     * <p/>
-     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
-     * is in the query region.
-     *
-     * @param sequence  Reference sequence of interest.
-     * @param start     1-based, inclusive start of interval of interest. Zero implies start of the reference sequence.
-     * @param end       1-based, inclusive end of interval of interest. Zero implies end of the reference sequence.
-     * @param contained If true, each SAMRecord returned is will have its alignment completely contained in the
-     *                  interval of interest.  If false, the alignment of the returned SAMRecords need only overlap the interval of interest.
-     * @return Iterator over the SAMRecords matching the interval.
-     */
-    public SAMRecordIterator query(final String sequence, final int start, final int end, final boolean contained) {
-        final int referenceIndex = getFileHeader().getSequenceIndex(sequence);
-        final CloseableIterator<SAMRecord> currentIterator;
-        if (referenceIndex == -1) {
-            currentIterator = new EmptySamIterator();
-        } else {
-            final QueryInterval[] queryIntervals = {new QueryInterval(referenceIndex, start, end)};
-            currentIterator = mReader.query(queryIntervals, contained);
-        }
-        return new AssertingIterator(currentIterator);
-    }
-
-    /**
-     * Iterate over records that overlap the given interval.  Only valid to call this if hasIndex() == true.
-     * <p/>
-     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
-     * a second iteration, the first one must be closed first.
-     * <p/>
-     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
-     * and then discarded because they do not match the interval of interest.
-     * <p/>
-     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
-     * is in the query region.
-     *
-     * @param sequence Reference sequence of interest.
-     * @param start    1-based, inclusive start of interval of interest. Zero implies start of the reference sequence.
-     * @param end      1-based, inclusive end of interval of interest. Zero implies end of the reference sequence.
-     * @return Iterator over the SAMRecords overlapping the interval.
-     */
-    public SAMRecordIterator queryOverlapping(final String sequence, final int start, final int end) {
-        return query(sequence, start, end, false);
-    }
-
-    /**
-     * Iterate over records that are contained in the given interval.  Only valid to call this if hasIndex() == true.
-     * <p/>
-     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
-     * a second iteration, the first one must be closed first.
-     * <p/>
-     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
-     * and then discarded because they do not match the interval of interest.
-     * <p/>
-     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
-     * is in the query region.
-     *
-     * @param sequence Reference sequence of interest.
-     * @param start    1-based, inclusive start of interval of interest. Zero implies start of the reference sequence.
-     * @param end      1-based, inclusive end of interval of interest. Zero implies end of the reference sequence.
-     * @return Iterator over the SAMRecords contained in the interval.
-     */
-    public SAMRecordIterator queryContained(final String sequence, final int start, final int end) {
-        return query(sequence, start, end, true);
-    }
-
-    /**
-     * Iterate over records that match one of the given intervals.  This may be more efficient than querying
-     * each interval separately, because multiple reads of the same SAMRecords is avoided.
-     * <p/>
-     * Only valid to call this if hasIndex() == true.
-     * <p/>
-     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
-     * a second iteration, the first one must be closed first.  You can use a second SAMFileReader to iterate
-     * in parallel over the same underlying file.
-     * <p/>
-     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
-     * and then discarded because they do not match an interval of interest.
-     * <p/>
-     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
-     * is in the query region.
-     *
-     * @param intervals Intervals to be queried.  The intervals must be optimized, i.e. in order, with overlapping
-     *                  and abutting intervals merged.  This can be done with {@link htsjdk.samtools.QueryInterval#optimizeIntervals}
-     * @param contained If true, each SAMRecord returned is will have its alignment completely contained in one of the
-     *                  intervals of interest.  If false, the alignment of the returned SAMRecords need only overlap one of
-     *                  the intervals of interest.
-     * @return Iterator over the SAMRecords matching the interval.
-     */
-    public SAMRecordIterator query(final QueryInterval[] intervals, final boolean contained) {
-        return new AssertingIterator(mReader.query(intervals, contained));
-    }
-
-    /**
-     * Iterate over records that overlap any of the given intervals.  This may be more efficient than querying
-     * each interval separately, because multiple reads of the same SAMRecords is avoided.
-     * <p/>
-     * Only valid to call this if hasIndex() == true.
-     * <p/>
-     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
-     * a second iteration, the first one must be closed first.
-     * <p/>
-     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
-     * and then discarded because they do not match the interval of interest.
-     * <p/>
-     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
-     * is in the query region.
-     *
-     * @param intervals Intervals to be queried.  The intervals must be optimized, i.e. in order, with overlapping
-     *                  and abutting intervals merged.  This can be done with {@link htsjdk.samtools.QueryInterval#optimizeIntervals}
-     * @return Iterator over the SAMRecords overlapping any of the intervals.
-     */
-    public SAMRecordIterator queryOverlapping(final QueryInterval[] intervals) {
-        return query(intervals, false);
-    }
-
-    /**
-     * Iterate over records that are contained in the given interval.  This may be more efficient than querying
-     * each interval separately, because multiple reads of the same SAMRecords is avoided.
-     * <p/>
-     * Only valid to call this if hasIndex() == true.
-     * <p/>
-     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
-     * a second iteration, the first one must be closed first.
-     * <p/>
-     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
-     * and then discarded because they do not match the interval of interest.
-     * <p/>
-     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
-     * is in the query region.
-     *
-     * @param intervals Intervals to be queried.  The intervals must be optimized, i.e. in order, with overlapping
-     *                  and abutting intervals merged.  This can be done with {@link htsjdk.samtools.QueryInterval#optimizeIntervals}
-     * @return Iterator over the SAMRecords contained in any of the intervals.
-     */
-    public SAMRecordIterator queryContained(final QueryInterval[] intervals) {
-        return query(intervals, true);
-    }
-
-
-    public SAMRecordIterator queryUnmapped() {
-        return new AssertingIterator(mReader.queryUnmapped());
-    }
-
-    /**
-     * Iterate over records that map to the given sequence and start at the given position.  Only valid to call this if hasIndex() == true.
-     * <p/>
-     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
-     * a second iteration, the first one must be closed first.
-     * <p/>
-     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
-     * and then discarded because they do not match the interval of interest.
-     * <p/>
-     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
-     * matches the arguments.
-     *
-     * @param sequence Reference sequence of interest.
-     * @param start    Alignment start of interest.
-     * @return Iterator over the SAMRecords with the given alignment start.
-     */
-    public SAMRecordIterator queryAlignmentStart(final String sequence, final int start) {
-        return new AssertingIterator(mReader.queryAlignmentStart(sequence, start));
-    }
-
-    /**
-     * Fetch the mate for the given read.  Only valid to call this if hasIndex() == true.
-     * This will work whether the mate has a coordinate or not, so long as the given read has correct
-     * mate information.  This method iterates over the SAM file, so there may not be an unclosed
-     * iterator on the SAM file when this method is called.
-     * <p/>
-     * Note that it is not possible to call queryMate when iterating over the SAMFileReader, because queryMate
-     * requires its own iteration, and there cannot be two simultaneous iterations on the same SAMFileReader.  The
-     * work-around is to open a second SAMFileReader on the same input file, and call queryMate on the second
-     * reader.
-     *
-     * @param rec Record for which mate is sought.  Must be a paired read.
-     * @return rec's mate, or null if it cannot be found.
-     */
-    public SAMRecord queryMate(final SAMRecord rec) {
-        if (!rec.getReadPairedFlag()) {
-            throw new IllegalArgumentException("queryMate called for unpaired read.");
-        }
-        if (rec.getFirstOfPairFlag() == rec.getSecondOfPairFlag()) {
-            throw new IllegalArgumentException("SAMRecord must be either first and second of pair, but not both.");
-        }
-        final boolean firstOfPair = rec.getFirstOfPairFlag();
-        final CloseableIterator<SAMRecord> it;
-        if (rec.getMateReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
-            it = queryUnmapped();
-        } else {
-            it = queryAlignmentStart(rec.getMateReferenceName(), rec.getMateAlignmentStart());
-        }
-        try {
-            SAMRecord mateRec = null;
-            while (it.hasNext()) {
-                final SAMRecord next = it.next();
-                if (!next.getReadPairedFlag()) {
-                    if (rec.getReadName().equals(next.getReadName())) {
-                        throw new SAMFormatException("Paired and unpaired reads with same name: " + rec.getReadName());
-                    }
-                    continue;
-                }
-                if (firstOfPair) {
-                    if (next.getFirstOfPairFlag()) continue;
-                } else {
-                    if (next.getSecondOfPairFlag()) continue;
-                }
-                if (rec.getReadName().equals(next.getReadName())) {
-                    if (mateRec != null) {
-                        throw new SAMFormatException("Multiple SAMRecord with read name " + rec.getReadName() +
-                                " for " + (firstOfPair ? "second" : "first") + " end.");
-                    }
-                    mateRec = next;
-                }
-            }
-            return mateRec;
-        } finally {
-            it.close();
-        }
-    }
-
-
-    private void init(final SeekableStream strm, final File indexFile, final boolean eagerDecode,
-                      final ValidationStringency validationStringency) {
-
-        try {
-            if (streamLooksLikeBam(strm)) {
-                mIsBinary = true;
-                mReader = new BAMFileReader(strm, indexFile, eagerDecode,  useAsyncIO, validationStringency, this.samRecordFactory);
-            } else {
-                throw new SAMFormatException("Unrecognized file format: " + strm);
-            }
-            setValidationStringency(validationStringency);
-        } catch (final IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    private void init(final SeekableStream strm, final SeekableStream indexStream, final boolean eagerDecode,
-                      final ValidationStringency validationStringency) {
-
-        try {
-            if (streamLooksLikeBam(strm)) {
-                mIsBinary = true;
-                mReader = new BAMFileReader(strm, indexStream, eagerDecode, useAsyncIO, validationStringency, this.samRecordFactory);
-            } else {
-                throw new SAMFormatException("Unrecognized file format: " + strm);
-            }
-            setValidationStringency(validationStringency);
-        } catch (final IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    // Its too expensive to examine the remote file to determine type.
-    // Rely on file extension.
-    private boolean streamLooksLikeBam(final SeekableStream strm) {
-        String source = strm.getSource();
-        if (source == null) return true;
-        source = source.toLowerCase();
-        //Source will typically be a file path or URL
-        //If it's a URL we require one of the query parameters to be bam file
-        return source.endsWith(".bam") || source.contains(".bam?") || source.contains(".bam&") || source.contains(".bam%26");
-    }
-
-    private void init(final InputStream stream, File file, final File indexFile, final boolean eagerDecode,
-                      final ValidationStringency validationStringency) {
-        if (stream != null && file != null) throw new IllegalArgumentException("stream and file are mutually exclusive");
-        this.samFile = file;
-
-        try {
-            BufferedInputStream bufferedStream;
-            // Buffering is required because mark() and reset() are called on the input stream.
-            final int bufferSize = Math.max(Defaults.BUFFER_SIZE, BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE);
-            if (file != null) bufferedStream = new BufferedInputStream(new FileInputStream(file), bufferSize);
-            else bufferedStream = IOUtil.toBufferedStream(stream);
-            if (SamStreams.isBAMFile(bufferedStream)) {
-                mIsBinary = true;
-                if (file == null || !file.isFile()) {
-                    // Handle case in which file is a named pipe, e.g. /dev/stdin or created by mkfifo
-                    mReader = new BAMFileReader(bufferedStream, indexFile, eagerDecode, useAsyncIO, validationStringency, this.samRecordFactory);
-                } else {
-                    bufferedStream.close();
-                    mReader = new BAMFileReader(file, indexFile, eagerDecode, useAsyncIO,  validationStringency, this.samRecordFactory);
-                }
-            } else if (BlockCompressedInputStream.isValidFile(bufferedStream)) {
-                mIsBinary = false;
-                mReader = new SAMTextReader(new BlockCompressedInputStream(bufferedStream), validationStringency, this.samRecordFactory);
-            } else if (SamStreams.isGzippedSAMFile(bufferedStream)) {
-                mIsBinary = false;
-                mReader = new SAMTextReader(new GZIPInputStream(bufferedStream), validationStringency, this.samRecordFactory);
-            } else if (SamStreams.isCRAMFile(bufferedStream)) {
-                if (file == null || !file.isFile()) {
-                    file = null;
-                } else {
-                    bufferedStream.close();
-                    bufferedStream = null;
-                }
-                mReader = new CRAMFileReader(file, bufferedStream);
-            } else if (isSAMFile(bufferedStream)) {
-                if (indexFile != null) {
-                    bufferedStream.close();
-                    throw new RuntimeException("Cannot use index file with textual SAM file");
-                }
-                mIsBinary = false;
-                mReader = new SAMTextReader(bufferedStream, file, validationStringency, this.samRecordFactory);
-            } else {
-                bufferedStream.close();
-                throw new SAMFormatException("Unrecognized file format");
-            }
-
-            setValidationStringency(validationStringency);
-            mReader.setSAMRecordFactory(this.samRecordFactory);
-        } catch (final IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    private static int readBytes(final InputStream stream, final byte[] buffer, final int offset, final int length)
-            throws IOException {
-        int bytesRead = 0;
-        while (bytesRead < length) {
-            final int count = stream.read(buffer, offset + bytesRead, length - bytesRead);
-            if (count <= 0) {
-                break;
-            }
-            bytesRead += count;
-        }
-        return bytesRead;
-    }
-
-    private boolean isSAMFile(final InputStream stream) {
-        // For now, assume every non-binary file is a SAM text file.
-        return true;
-    }
-
-    @Override
-    public String toString() {
-        if (this.samFile == null) {
-            return getClass().getSimpleName() + "{initialized with stream}";
-        } else {
-            return getClass().getSimpleName() + "{" + this.samFile.getAbsolutePath() + "}";
-        }
-    }
-
-    /**
-     * Convenience method to create a QueryInterval
-     *
-     * @param sequence sequence of interest, must exist in sequence dictionary
-     * @param start    1-based start position, must be >= 1
-     * @param end      1-based end position.
-     * @throws java.lang.IllegalArgumentException if sequence not found in sequence dictionary, or start position < 1
-     */
-    public QueryInterval makeQueryInterval(final String sequence, int start, int end) {
-        int referenceIndex = getFileHeader().getSequenceIndex(sequence);
-        if (referenceIndex < 0) {
-            throw new IllegalArgumentException(String.format("Sequence '%s' not found in sequence dictionary", sequence));
-        }
-        if (start < 1) {
-            throw new IllegalArgumentException("Start position must be >= 1");
-        }
-        return new QueryInterval(referenceIndex, start, end);
-    }
-
-    /**
-     * Convenience method to create a QueryInterval that goes from start to end of given sequence.
-     *
-     * @param sequence sequence of interest, must exist in sequence dictionary
-     * @param start    1-based start position, must be >= 1
-     * @throws java.lang.IllegalArgumentException if sequence not found in sequence dictionary, or start position < 1
-     */
-    public QueryInterval makeQueryInterval(final String sequence, int start) {
-        return makeQueryInterval(sequence, start, 0);
-    }
-
-}
diff --git a/src/java/htsjdk/samtools/SAMFileWriterFactory.java b/src/java/htsjdk/samtools/SAMFileWriterFactory.java
deleted file mode 100644
index 077b490..0000000
--- a/src/java/htsjdk/samtools/SAMFileWriterFactory.java
+++ /dev/null
@@ -1,465 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.Md5CalculatingOutputStream;
-import htsjdk.samtools.util.RuntimeIOException;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-
-/**
- * Create a writer for writing SAM, BAM, or CRAM files.
- */
-public class SAMFileWriterFactory implements Cloneable {
-    private final static Log log = Log.getInstance(SAMFileWriterFactory.class);
-    private static boolean defaultCreateIndexWhileWriting = Defaults.CREATE_INDEX;
-    private boolean createIndex = defaultCreateIndexWhileWriting;
-    private static boolean defaultCreateMd5File = Defaults.CREATE_MD5;
-    private boolean createMd5File = defaultCreateMd5File;
-    private boolean useAsyncIo = Defaults.USE_ASYNC_IO_FOR_SAMTOOLS;
-    private int asyncOutputBufferSize = AsyncSAMFileWriter.DEFAULT_QUEUE_SIZE;
-    private int bufferSize = Defaults.BUFFER_SIZE;
-    private File tmpDir;
-    /** compression level 0: min 9:max */
-    private int compressionLevel = BlockCompressedOutputStream.getDefaultCompressionLevel();
-    private Integer maxRecordsInRam = null;
-
-    /** simple constructor */
-    public SAMFileWriterFactory() {
-    }
-    
-    /** copy constructor */
-    public SAMFileWriterFactory( final SAMFileWriterFactory other) {
-        if( other == null ) throw new IllegalArgumentException("SAMFileWriterFactory(null)");
-        this.createIndex = other.createIndex;
-        this.createMd5File = other.createMd5File;
-        this.useAsyncIo = other.useAsyncIo;
-        this.asyncOutputBufferSize = other.asyncOutputBufferSize;
-        this.bufferSize = other.bufferSize;
-        this.tmpDir = other.tmpDir;
-        this.compressionLevel = other.compressionLevel;
-        this.maxRecordsInRam = other.maxRecordsInRam;
-    }
-    
-    @Override
-    public SAMFileWriterFactory clone() {
-        return new SAMFileWriterFactory(this);
-    }
-    
-    /**
-     * Sets the default for whether to create md5Files for BAM files this factory.
-     */
-    public static void setDefaultCreateMd5File(final boolean createMd5File) {
-        defaultCreateMd5File = createMd5File;
-    }
-
-    /**
-     * Sets whether to create md5Files for BAMs from this factory.
-     */
-    public SAMFileWriterFactory setCreateMd5File(final boolean createMd5File) {
-        this.createMd5File = createMd5File;
-        return this;
-    }
-
-    /** set compression level 0!none 9: max */
-    public SAMFileWriterFactory setCompressionLevel(final int compressionLevel) {
-        this.compressionLevel = Math.min(9, Math.max(0, compressionLevel));
-        return this;
-    }
-    
-    public int getCompressionLevel() {
-        return compressionLevel;
-    }
-    
-    /**
-     * Sets the default for subsequent SAMFileWriterFactories
-     * that do not specify whether to create an index.
-     * If a BAM (not SAM) file is created, the setting is true, and the file header specifies coordinate order,
-     * then a BAM index file will be written along with the BAM file.
-     *
-     * @param setting whether to attempt to create a BAM index while creating the BAM file
-     */
-    public static void setDefaultCreateIndexWhileWriting(final boolean setting) {
-        defaultCreateIndexWhileWriting = setting;
-    }
-
-    /**
-     * Convenience method allowing newSAMFileWriterFactory().setCreateIndex(true);
-     * Equivalent to SAMFileWriterFactory.setDefaultCreateIndexWhileWriting(true); newSAMFileWriterFactory();
-     * If a BAM or CRAM (not SAM) file is created, the setting is true, and the file header specifies coordinate order,
-     * then a BAM index file will be written along with the BAM file.
-     *
-     * @param setting whether to attempt to create a BAM index while creating the BAM file.
-     * @return this factory object
-     */
-    public SAMFileWriterFactory setCreateIndex(final boolean setting) {
-        this.createIndex = setting;
-        return this;
-    }
-
-    /**
-     * Before creating a writer that is not presorted, this method may be called in order to override
-     * the default number of SAMRecords stored in RAM before spilling to disk
-     * (c.f. SAMFileWriterImpl.MAX_RECORDS_IN_RAM).  When writing very large sorted SAM files, you may need
-     * call this method in order to avoid running out of file handles.  The RAM available to the JVM may need
-     * to be increased in order to hold the specified number of records in RAM.  This value affects the number
-     * of records stored in subsequent calls to one of the make...() methods.
-     *
-     * @param maxRecordsInRam Number of records to store in RAM before spilling to temporary file when
-     *                        creating a sorted SAM or BAM file.
-     */
-    public SAMFileWriterFactory setMaxRecordsInRam(final int maxRecordsInRam) {
-        this.maxRecordsInRam = maxRecordsInRam;
-        return this;
-    }
-
-    /**
-     * Turn on or off the use of asynchronous IO for writing output SAM and BAM files.  If true then
-     * each SAMFileWriter creates a dedicated thread which is used for compression and IO activities.
-     */
-    public SAMFileWriterFactory setUseAsyncIo(final boolean useAsyncIo) {
-        this.useAsyncIo = useAsyncIo;
-        return this;
-    }
-
-    /**
-     * If and only if using asynchronous IO then sets the maximum number of records that can be buffered per
-     * SAMFileWriter before producers will block when trying to write another SAMRecord.
-     */
-    public SAMFileWriterFactory setAsyncOutputBufferSize(final int asyncOutputBufferSize) {
-        this.asyncOutputBufferSize = asyncOutputBufferSize;
-        return this;
-    }
-
-    /**
-     * Controls size of write buffer.
-     * Default value: [[htsjdk.samtools.Defaults#BUFFER_SIZE]]
-     */
-    public SAMFileWriterFactory setBufferSize(final int bufferSize) {
-        this.bufferSize = bufferSize;
-        return this;
-    }
-
-    /**
-     * Set the temporary directory to use when sort data.
-     *
-     * @param tmpDir Path to the temporary directory
-     */
-    public SAMFileWriterFactory setTempDirectory(final File tmpDir) {
-        this.tmpDir = tmpDir;
-        return this;
-    }
-
-    /**
-     * Create a BAMFileWriter that is ready to receive SAMRecords.  Uses default compression level.
-     *
-     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param outputFile where to write the output.
-     */
-    public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile) {
-        return makeBAMWriter(header, presorted, outputFile, this.getCompressionLevel());
-    }
-
-    /**
-     * Create a BAMFileWriter that is ready to receive SAMRecords.
-     *
-     * @param header           entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted        if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param outputFile       where to write the output.
-     * @param compressionLevel Override default compression level with the given value, between 0 (fastest) and 9 (smallest).
-     */
-    public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile,
-                                       final int compressionLevel) {
-        try {
-            final boolean createMd5File = this.createMd5File && IOUtil.isRegularPath(outputFile);
-            if (this.createMd5File && !createMd5File) {
-                log.warn("Cannot create MD5 file for BAM because output file is not a regular file: " + outputFile.getAbsolutePath());
-            }
-            OutputStream os = IOUtil.maybeBufferOutputStream(new FileOutputStream(outputFile, false), bufferSize);
-            if (createMd5File) os = new Md5CalculatingOutputStream(os, new File(outputFile.getAbsolutePath() + ".md5"));
-            final BAMFileWriter ret = new BAMFileWriter(os, outputFile, compressionLevel);
-            final boolean createIndex = this.createIndex && IOUtil.isRegularPath(outputFile);
-            if (this.createIndex && !createIndex) {
-                log.warn("Cannot create index for BAM because output file is not a regular file: " + outputFile.getAbsolutePath());
-            }
-            if (this.tmpDir != null) ret.setTempDirectory(this.tmpDir);
-            initializeBAMWriter(ret, header, presorted, createIndex);
-
-            if (this.useAsyncIo) return new AsyncSAMFileWriter(ret, this.asyncOutputBufferSize);
-            else return ret;
-        } catch (final IOException ioe) {
-            throw new RuntimeIOException("Error opening file: " + outputFile.getAbsolutePath());
-        }
-    }
-
-    private void initializeBAMWriter(final BAMFileWriter writer, final SAMFileHeader header, final boolean presorted, final boolean createIndex) {
-        writer.setSortOrder(header.getSortOrder(), presorted);
-        if (maxRecordsInRam != null) {
-            writer.setMaxRecordsInRam(maxRecordsInRam);
-        }
-        writer.setHeader(header);
-        if (createIndex && writer.getSortOrder().equals(SAMFileHeader.SortOrder.coordinate)) {
-            writer.enableBamIndexConstruction();
-        }
-    }
-
-    /**
-     * Create a SAMTextWriter that is ready to receive SAMRecords.
-     *
-     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param outputFile where to write the output.
-     */
-    public SAMFileWriter makeSAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile) {
-        try {
-            final SAMTextWriter ret = this.createMd5File
-                    ? new SAMTextWriter(new Md5CalculatingOutputStream(new FileOutputStream(outputFile, false),
-                    new File(outputFile.getAbsolutePath() + ".md5")))
-                    : new SAMTextWriter(outputFile);
-            ret.setSortOrder(header.getSortOrder(), presorted);
-            if (maxRecordsInRam != null) {
-                ret.setMaxRecordsInRam(maxRecordsInRam);
-            }
-            ret.setHeader(header);
-
-            if (this.useAsyncIo) return new AsyncSAMFileWriter(ret, this.asyncOutputBufferSize);
-            else return ret;
-        } catch (final IOException ioe) {
-            throw new RuntimeIOException("Error opening file: " + outputFile.getAbsolutePath());
-        }
-    }
-
-    /**
-     * Create a SAMTextWriter for writing to a stream that is ready to receive SAMRecords.
-     * This method does not support the creation of an MD5 file
-     *
-     * @param header    entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param stream    the stream to write records to.  Note that this method does not buffer the stream, so the
-     *                  caller must buffer if desired.  Note that PrintStream is buffered.
-     */
-    public SAMFileWriter makeSAMWriter(final SAMFileHeader header, final boolean presorted, final OutputStream stream) {
-        return initWriter(header, presorted, false, new SAMTextWriter(stream));
-    }
-
-    /**
-     * Create a BAMFileWriter for writing to a stream that is ready to receive SAMRecords.
-     * This method does not support the creation of an MD5 file
-     *
-     * @param header    entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param stream    the stream to write records to.  Note that this method does not buffer the stream, so the
-     *                  caller must buffer if desired.  Note that PrintStream is buffered.
-     */
-
-    public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final OutputStream stream) {
-        return initWriter(header, presorted, true, new BAMFileWriter(stream, null, this.getCompressionLevel()));
-    }
-
-    /**
-     * Initialize SAMTextWriter or a BAMFileWriter and possibly wrap in AsyncSAMFileWriter
-     *
-     * @param header    entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param binary    do we want to generate a BAM or a SAM
-     * @param writer    SAM or BAM writer to initialize and maybe wrap.
-     */
-
-    private SAMFileWriter initWriter(final SAMFileHeader header, final boolean presorted, final boolean binary,
-                                     final SAMFileWriterImpl writer) {
-        writer.setSortOrder(header.getSortOrder(), presorted);
-        if (maxRecordsInRam != null) {
-            writer.setMaxRecordsInRam(maxRecordsInRam);
-        }
-        writer.setHeader(header);
-
-        if (this.useAsyncIo) return new AsyncSAMFileWriter(writer, this.asyncOutputBufferSize);
-        else return writer;
-    }
-
-    /**
-     * Create either a SAM or a BAM writer based on examination of the outputFile extension.
-     *
-     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted  presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param outputFile where to write the output.  Must end with .sam or .bam.
-     * @return SAM or BAM writer based on file extension of outputFile.
-     */
-    public SAMFileWriter makeSAMOrBAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile) {
-        final String filename = outputFile.getName();
-        if (filename.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION)) {
-            return makeBAMWriter(header, presorted, outputFile);
-        }
-        if (filename.endsWith(".sam")) {
-            return makeSAMWriter(header, presorted, outputFile);
-        }
-        return makeBAMWriter(header, presorted, outputFile);
-    }
-
-    /**
-     *
-     * Create a SAM, BAM or CRAM writer based on examination of the outputFile extension.
-     *
-     * @param header header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param outputFile where to write the output.  Must end with .sam, .bam or .cram.
-     * @param referenceFasta reference sequence file
-     * @return SAMFileWriter appropriate for the file type specified in outputFile
-     *
-     */
-    public SAMFileWriter makeWriter(final SAMFileHeader header, final boolean presorted, final File outputFile, final File referenceFasta) {
-        if (outputFile.getName().endsWith(SamReader.Type.CRAM_TYPE.fileExtension())) {
-            return makeCRAMWriter(header, presorted, outputFile, referenceFasta);
-        }
-        else {
-            return makeSAMOrBAMWriter(header, presorted, outputFile);
-        }
-    }
-
-    /**
-     * Create a CRAMFileWriter on an output stream. Requires the input to be presorted to match the sort order defined
-     * by the input header.
-     *
-     * Note: does not honor factory settings for CREATE_MD5, CREATE_INDEX, USE_ASYNC_IO.
-     *
-     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param stream where to write the output.
-     * @param referenceFasta reference sequence file
-     * @return CRAMFileWriter
-     */
-    public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final OutputStream stream, final File referenceFasta) {
-        // create the CRAMFileWriter directly without propagating factory settings
-        final CRAMFileWriter writer = new CRAMFileWriter(stream, new ReferenceSource(referenceFasta), header, null);
-        setCRAMWriterDefaults(writer);
-        return writer;
-    }
-
-    /**
-     * Create a CRAMFileWriter on an output file. Requires input record to be presorted to match the
-     * sort order defined by the input header.
-     *
-     * Note: does not honor factory settings for USE_ASYNC_IO.
-     *
-     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param outputFile where to write the output.  Must end with .sam, .bam or .cram.
-     * @param referenceFasta reference sequence file
-     * @return CRAMFileWriter
-     *
-     */
-    public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final File outputFile, final File referenceFasta) {
-        return createCRAMWriterWithSettings(header, true, outputFile, referenceFasta);
-    }
-
-    /**
-     * Create a CRAMFileWriter on an output file.
-     *
-     * Note: does not honor factory setting for USE_ASYNC_IO.
-     *
-     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param outputFile where to write the output.  Must end with .sam, .bam or .cram.
-     * @param referenceFasta reference sequence file
-     * @return CRAMFileWriter
-     *
-     */
-    public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile, final File referenceFasta) {
-        return createCRAMWriterWithSettings(header, presorted, outputFile, referenceFasta);
-    }
-
-    /**
-     * Create a CRAMFileWriter on an output file based on factory settings.
-     *
-     * Note: does not honor the factory setting for USE_ASYNC_IO.
-     *
-     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
-     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param outputFile where to write the output.  Must end with .sam, .bam or .cram.
-     * @param referenceFasta reference sequence file
-     * @return CRAMFileWriter
-     */
-    private CRAMFileWriter createCRAMWriterWithSettings(
-            final SAMFileHeader header,
-            final boolean presorted,
-            final File outputFile,
-            final File referenceFasta) {
-        OutputStream cramOS = null;
-        OutputStream indexOS = null ;
-
-        if (createIndex) {
-            if (!IOUtil.isRegularPath(outputFile)) {
-                log.warn("Cannot create index for CRAM because output file is not a regular file: " + outputFile.getAbsolutePath());
-            }
-            else {
-                try {
-                    final File indexFile = new File(outputFile.getAbsolutePath() + BAMIndex.BAMIndexSuffix) ;
-                    indexOS = new FileOutputStream(indexFile) ;
-                }
-                catch (final IOException ioe) {
-                    throw new RuntimeIOException("Error creating index file for: " + outputFile.getAbsolutePath()+ BAMIndex.BAMIndexSuffix);
-                }
-            }
-        }
-
-        try {
-            cramOS = IOUtil.maybeBufferOutputStream(new FileOutputStream(outputFile, false), bufferSize);
-        }
-        catch (final IOException ioe) {
-            throw new RuntimeIOException("Error creating CRAM file: " + outputFile.getAbsolutePath());
-        }
-
-        final CRAMFileWriter writer = new CRAMFileWriter(
-                createMd5File ? new Md5CalculatingOutputStream(cramOS, new File(outputFile.getAbsolutePath() + ".md5")) : cramOS,
-                indexOS,
-                presorted,
-                new ReferenceSource(referenceFasta),
-                header,
-                outputFile.getAbsolutePath());
-        setCRAMWriterDefaults(writer);
-
-        return writer;
-    }
-
-    // Set the default CRAM writer preservation parameters
-    private void setCRAMWriterDefaults(final CRAMFileWriter writer) {
-        writer.setPreserveReadNames(true);
-        writer.setCaptureAllTags(true);
-    }
-
-    @Override
-    public String toString() {
-        return "SAMFileWriterFactory [createIndex=" + createIndex + ", createMd5File=" + createMd5File + ", useAsyncIo="
-                + useAsyncIo + ", asyncOutputBufferSize=" + asyncOutputBufferSize + ", bufferSize=" + bufferSize
-                + ", tmpDir=" + tmpDir + ", compressionLevel=" + compressionLevel + ", maxRecordsInRam="
-                + maxRecordsInRam + "]";
-    }
-
-}
diff --git a/src/java/htsjdk/samtools/SAMFlag.java b/src/java/htsjdk/samtools/SAMFlag.java
deleted file mode 100644
index 6752b6b..0000000
--- a/src/java/htsjdk/samtools/SAMFlag.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * The MIT License
- *
- * Author: Pierre Lindenbaum PhD @yokofakun
- *  Institut du Thorax - Nantes - France
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import java.util.HashSet;
-import java.util.Set;
-
-/**
- * SAM flags as enum, to be used in GUI, menu, etc...
- */
-public enum SAMFlag {
-    READ_PAIRED(                    0x1,    "Template having multiple segments in sequencing"),
-    PROPER_PAIR(                    0x2,    "Each segment properly aligned according to the aligner"),
-    READ_UNMAPPED(                  0x4,    "Segment unmapped"),
-    MATE_UNMAPPED(                  0x8,    "Next segment in the template unmapped"),
-    READ_REVERSE_STRAND(            0x10,   "SEQ being reverse complemented"),
-    MATE_REVERSE_STRAND(            0x20,   "SEQ of the next segment in the template being reverse complemented"),
-    FIRST_OF_PAIR(                  0x40,   "The first segment in the template"),
-    SECOND_OF_PAIR(                 0x80,   "The last segment in the template"),
-    NOT_PRIMARY_ALIGNMENT(          0x100,  "Secondary alignment"),
-    READ_FAILS_VENDOR_QUALITY_CHECK(0x200,  "Not passing quality controls"),
-    DUPLICATE_READ(                 0x400,  "PCR or optical duplicate"), 
-    SUPPLEMENTARY_ALIGNMENT(        0x800,  "Supplementary alignment")
-    ;
-
-    /* visible for the package, to be used by SAMRecord */
-    final int flag;
-    private final String description;
-
-    SAMFlag(int flag,String description) {
-        this.flag = flag;
-        this.description = description;
-    }
-
-    /** @return this flag as an int */
-    public int intValue() {
-        return flag;
-    }
-
-    /** @return a human label for this SAMFlag */
-    public String getLabel() {
-        return name().toLowerCase().replace('_', ' ');
-    }
-
-    /** @return a human description for this SAMFlag */
-    public String getDescription() {
-        return this.description;
-    }
-
-    /** @return the SAMFlag for the value 'flag' or null if it was not found */
-    public static SAMFlag valueOf(int flag) {
-        for (SAMFlag f : values()) {
-            if (flag == f.flag)
-                return f;
-        }
-        return null;
-    }
-
-    /** @return find SAMFlag the flag by name, or null if it was not found */
-    public static SAMFlag findByName(String flag)
-        {   
-        for (SAMFlag f : values()) {
-            if (f.name().equals(flag))
-                return f;
-        }
-        return null;
-    }
-
-    /** @returns true if the bit for is set for flag */
-    public boolean isSet(int flag) {
-        return (this.flag & flag) != 0;
-    }
-
-    /** @returns true if the bit for is not set for flag */
-    public boolean isUnset(int flag) {
-        return !isSet(flag);
-    }
-
-    /** @returns the java.util.Set of SAMFlag for 'flag' */
-    public static Set<SAMFlag> getFlags(int flag) {
-        Set<SAMFlag> set = new HashSet<SAMFlag>();
-        for (SAMFlag f : values()) {
-            if (f.isSet(flag))
-                set.add(f);
-        }
-        return set;
-    }
-}
diff --git a/src/java/htsjdk/samtools/SAMLineParser.java b/src/java/htsjdk/samtools/SAMLineParser.java
deleted file mode 100644
index ee84e1d..0000000
--- a/src/java/htsjdk/samtools/SAMLineParser.java
+++ /dev/null
@@ -1,469 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2012 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.StringUtil;
-
-import java.io.File;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Pattern;
-
-/**
- * this class enables creation of a SAMRecord object from a String in SAM text format.
- */
-public class SAMLineParser {
-
-    // From SAM specification
-    private static final int QNAME_COL = 0;
-    private static final int FLAG_COL = 1;
-    private static final int RNAME_COL = 2;
-    private static final int POS_COL = 3;
-    private static final int MAPQ_COL = 4;
-    private static final int CIGAR_COL = 5;
-    private static final int MRNM_COL = 6;
-    private static final int MPOS_COL = 7;
-    private static final int ISIZE_COL = 8;
-    private static final int SEQ_COL = 9;
-    private static final int QUAL_COL = 10;
-
-    private static final int NUM_REQUIRED_FIELDS = 11;
-
-    // Read string must contain only these characters
-    private static final Pattern VALID_BASES = Pattern
-            .compile("^[acmgrsvtwyhkdbnACMGRSVTWYHKDBN.=]+$");
-
-    /**
-     * Allocate this once rather than for every line as a performance
-     * optimization. The size is arbitrary -- merely large enough to handle the
-     * maximum number of fields we might expect from a reasonable SAM file.
-     */
-    private final String[] mFields = new String[10000];
-
-    /**
-     * Add information about the origin (reader and position) to SAM records.
-     */
-    private final SamReader mParentReader;
-    private final SAMRecordFactory samRecordFactory;
-    private final ValidationStringency validationStringency;
-    private final SAMFileHeader mFileHeader;
-    private final File mFile;
-
-    private final TextTagCodec tagCodec = new TextTagCodec();
-
-    private int currentLineNumber;
-    private String currentLine;
-
-    //
-    // Constructors
-    //
-
-    /**
-     * Public constructor. Use the default SAMRecordFactory and stringency.
-     *
-     * @param samFileHeader SAM file header
-     */
-    public SAMLineParser(final SAMFileHeader samFileHeader) {
-
-        this(new DefaultSAMRecordFactory(),
-                ValidationStringency.DEFAULT_STRINGENCY, samFileHeader,
-                null, null);
-    }
-
-    /**
-     * Public constructor. Use the default SAMRecordFactory and stringency.
-     *
-     * @param samFileHeader SAM file header
-     * @param samFileReader SAM file reader For passing to SAMRecord.setFileSource, may be null.
-     * @param samFile       SAM file being read (for error message only, may be null)
-     */
-    public SAMLineParser(final SAMFileHeader samFileHeader,
-                         final SamReader samFileReader, final File samFile) {
-
-        this(new DefaultSAMRecordFactory(),
-                ValidationStringency.DEFAULT_STRINGENCY, samFileHeader,
-                samFileReader, samFile);
-    }
-
-    /**
-     * Public constructor.
-     *
-     * @param samRecordFactory     SamRecord Factory
-     * @param validationStringency validation stringency
-     * @param samFileHeader        SAM file header
-     * @param samFileReader        SAM file reader For passing to SAMRecord.setFileSource, may be null.
-     * @param samFile              SAM file being read (for error message only, may be null)
-     */
-    public SAMLineParser(final SAMRecordFactory samRecordFactory,
-                         final ValidationStringency validationStringency,
-                         final SAMFileHeader samFileHeader, final SamReader samFileReader,
-                         final File samFile) {
-
-        if (samRecordFactory == null)
-            throw new NullPointerException("The SamRecordFactory must be set");
-
-        if (validationStringency == null)
-            throw new NullPointerException("The validationStringency must be set");
-
-        if (samFileHeader == null)
-            throw new NullPointerException("The mFileHeader must be set");
-
-        this.samRecordFactory = samRecordFactory;
-        this.validationStringency = validationStringency;
-        this.mFileHeader = samFileHeader;
-
-        // Can be null
-        this.mParentReader = samFileReader;
-
-        // Can be null
-        this.mFile = samFile;
-    }
-
-    /**
-     * Get the File header.
-     *
-     * @return the SAM file header
-     */
-    public SAMFileHeader getFileHeader() {
-
-        return this.mFileHeader;
-    }
-
-    /**
-     * Get validation stringency.
-     *
-     * @return validation stringency
-     */
-    public ValidationStringency getValidationStringency() {
-
-        return this.validationStringency;
-    }
-
-    private int parseInt(final String s, final String fieldName) {
-        final int ret;
-        try {
-            ret = Integer.parseInt(s);
-        } catch (NumberFormatException e) {
-            throw reportFatalErrorParsingLine("Non-numeric value in "
-                    + fieldName + " column");
-        }
-        return ret;
-    }
-
-    private void validateReferenceName(final String rname, final String fieldName) {
-        if (rname.equals("=")) {
-            if (fieldName.equals("MRNM")) {
-                return;
-            }
-            reportErrorParsingLine("= is not a valid value for "
-                    + fieldName + " field.");
-        }
-        if (!this.mFileHeader.getSequenceDictionary().isEmpty()) {
-            if (this.mFileHeader.getSequence(rname) == null) {
-                reportErrorParsingLine(fieldName
-                        + " '" + rname + "' not found in any SQ record");
-            }
-        }
-    }
-
-    /**
-     * Parse a SAM line.
-     *
-     * @param line line to parse
-     * @return a new SAMRecord object
-     */
-    public SAMRecord parseLine(final String line) {
-
-        return parseLine(line, -1);
-    }
-
-    /**
-     * Parse a SAM line.
-     *
-     * @param line       line to parse
-     * @param lineNumber line number in the file. If the line number is not known
-     *                   can be <=0.
-     * @return a new SAMRecord object
-     */
-    public SAMRecord parseLine(final String line, final int lineNumber) {
-
-        final String mCurrentLine = line;
-        this.currentLineNumber = lineNumber;
-        this.currentLine = line;
-
-        final int numFields = StringUtil.split(mCurrentLine, mFields, '\t');
-        if (numFields < NUM_REQUIRED_FIELDS) {
-            throw reportFatalErrorParsingLine("Not enough fields");
-        }
-        if (numFields == mFields.length) {
-            reportErrorParsingLine("Too many fields in SAM text record.");
-        }
-        for (int i = 0; i < numFields; ++i) {
-            if (mFields[i].isEmpty()) {
-                reportErrorParsingLine("Empty field at position " + i + " (zero-based)");
-            }
-        }
-        final SAMRecord samRecord =
-                samRecordFactory.createSAMRecord(this.mFileHeader);
-        samRecord.setValidationStringency(this.validationStringency);
-        if (mParentReader != null)
-            samRecord.setFileSource(new SAMFileSource(mParentReader, null));
-        samRecord.setHeader(this.mFileHeader);
-        samRecord.setReadName(mFields[QNAME_COL]);
-
-        final int flags = parseInt(mFields[FLAG_COL], "FLAG");
-        samRecord.setFlags(flags);
-
-        String rname = mFields[RNAME_COL];
-        if (!rname.equals("*")) {
-            rname = SAMSequenceRecord.truncateSequenceName(rname);
-            validateReferenceName(rname, "RNAME");
-            samRecord.setReferenceName(rname);
-        } else if (!samRecord.getReadUnmappedFlag()) {
-            reportErrorParsingLine("RNAME is not specified but flags indicate mapped");
-        }
-
-        final int pos = parseInt(mFields[POS_COL], "POS");
-        final int mapq = parseInt(mFields[MAPQ_COL], "MAPQ");
-        final String cigar = mFields[CIGAR_COL];
-        if (!SAMRecord.NO_ALIGNMENT_REFERENCE_NAME.equals(samRecord
-                .getReferenceName())) {
-            if (pos == 0) {
-                reportErrorParsingLine("POS must be non-zero if RNAME is specified");
-            }
-            if (!samRecord.getReadUnmappedFlag() && cigar.equals("*")) {
-                reportErrorParsingLine("CIGAR must not be '*' if RNAME is specified");
-            }
-        } else {
-            if (pos != 0) {
-                reportErrorParsingLine("POS must be zero if RNAME is not specified");
-            }
-            if (mapq != 0) {
-                reportErrorParsingLine("MAPQ must be zero if RNAME is not specified");
-            }
-            if (!cigar.equals("*")) {
-                reportErrorParsingLine("CIGAR must be '*' if RNAME is not specified");
-            }
-        }
-        samRecord.setAlignmentStart(pos);
-        samRecord.setMappingQuality(mapq);
-        samRecord.setCigarString(cigar);
-
-        String mateRName = mFields[MRNM_COL];
-        if (mateRName.equals("*")) {
-            if (samRecord.getReadPairedFlag() && !samRecord.getMateUnmappedFlag()) {
-                reportErrorParsingLine("MRNM not specified but flags indicate mate mapped");
-            }
-        } else {
-            if (!samRecord.getReadPairedFlag()) {
-                reportErrorParsingLine("MRNM specified but flags indicate unpaired");
-            }
-            if (!"=".equals(mateRName)) {
-                mateRName = SAMSequenceRecord.truncateSequenceName(mateRName);
-            }
-            validateReferenceName(mateRName, "MRNM");
-            if (mateRName.equals("=")) {
-                if (samRecord.getReferenceName() == null) {
-                    reportErrorParsingLine("MRNM is '=', but RNAME is not set");
-                }
-                samRecord.setMateReferenceName(samRecord.getReferenceName());
-            } else {
-                samRecord.setMateReferenceName(mateRName);
-            }
-        }
-
-        final int matePos = parseInt(mFields[MPOS_COL], "MPOS");
-        final int isize = parseInt(mFields[ISIZE_COL], "ISIZE");
-        if (!samRecord.getMateReferenceName().equals(
-                SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) {
-            if (matePos == 0) {
-                reportErrorParsingLine("MPOS must be non-zero if MRNM is specified");
-            }
-        } else {
-            if (matePos != 0) {
-                reportErrorParsingLine("MPOS must be zero if MRNM is not specified");
-            }
-            if (isize != 0) {
-                reportErrorParsingLine("ISIZE must be zero if MRNM is not specified");
-            }
-        }
-        samRecord.setMateAlignmentStart(matePos);
-        samRecord.setInferredInsertSize(isize);
-        if (!mFields[SEQ_COL].equals("*")) {
-            validateReadBases(mFields[SEQ_COL]);
-            samRecord.setReadString(mFields[SEQ_COL]);
-        } else {
-            samRecord.setReadBases(SAMRecord.NULL_SEQUENCE);
-        }
-        if (!mFields[QUAL_COL].equals("*")) {
-            if (samRecord.getReadBases() == SAMRecord.NULL_SEQUENCE) {
-                reportErrorParsingLine("QUAL should not be specified if SEQ is not specified");
-            }
-            if (samRecord.getReadString().length() != mFields[QUAL_COL].length()) {
-                reportErrorParsingLine("length(QUAL) != length(SEQ)");
-            }
-            samRecord.setBaseQualityString(mFields[QUAL_COL]);
-        } else {
-            samRecord.setBaseQualities(SAMRecord.NULL_QUALS);
-        }
-
-        for (int i = NUM_REQUIRED_FIELDS; i < numFields; ++i) {
-            parseTag(samRecord, mFields[i]);
-        }
-
-        // Only call samRecord.isValid() if errors would be reported since the validation
-        // is quite expensive in and of itself.
-        if (this.validationStringency != ValidationStringency.SILENT) {
-            final List<SAMValidationError> validationErrors = samRecord.isValid();
-            if (validationErrors != null) {
-                for (final SAMValidationError errorMessage : validationErrors) {
-                    reportErrorParsingLine(errorMessage.getMessage());
-                }
-            }
-        }
-
-        return samRecord;
-    }
-
-    private void validateReadBases(final String bases) {
-        /*
-        * Using regex is slow, so check for invalid characters via
-        * isValidReadBase(), which hopefully the JIT will optimize. if
-        * (!VALID_BASES.matcher(bases).matches()) {
-        * reportErrorParsingLine("Invalid character in read bases"); }
-        */
-        for (int i = 0; i < bases.length(); ++i) {
-            if (!isValidReadBase(bases.charAt(i))) {
-                reportErrorParsingLine("Invalid character in read bases");
-                return;
-            }
-        }
-    }
-
-    private boolean isValidReadBase(final char base) {
-        switch (base) {
-            case 'a':
-            case 'c':
-            case 'm':
-            case 'g':
-            case 'r':
-            case 's':
-            case 'v':
-            case 't':
-            case 'w':
-            case 'y':
-            case 'h':
-            case 'k':
-            case 'd':
-            case 'b':
-            case 'n':
-            case 'A':
-            case 'C':
-            case 'M':
-            case 'G':
-            case 'R':
-            case 'S':
-            case 'V':
-            case 'T':
-            case 'W':
-            case 'Y':
-            case 'H':
-            case 'K':
-            case 'D':
-            case 'B':
-            case 'N':
-            case '.':
-            case '=':
-                return true;
-            default:
-                return false;
-        }
-    }
-
-    private void parseTag(final SAMRecord samRecord, final String tag) {
-        Map.Entry<String, Object> entry = null;
-        try {
-            entry = tagCodec.decode(tag);
-        } catch (SAMFormatException e) {
-            reportErrorParsingLine(e);
-        }
-        if (entry != null) {
-            if (entry.getValue() instanceof TagValueAndUnsignedArrayFlag) {
-                final TagValueAndUnsignedArrayFlag valueAndFlag =
-                        (TagValueAndUnsignedArrayFlag) entry.getValue();
-                if (valueAndFlag.isUnsignedArray) {
-                    samRecord.setUnsignedArrayAttribute(entry.getKey(),
-                            valueAndFlag.value);
-                } else {
-                    samRecord.setAttribute(entry.getKey(), valueAndFlag.value);
-                }
-            } else {
-                samRecord.setAttribute(entry.getKey(), entry.getValue());
-            }
-        }
-    }
-
-    //
-    // Error methods
-    //
-
-    private RuntimeException reportFatalErrorParsingLine(final String reason) {
-        return new SAMFormatException(makeErrorString(reason));
-    }
-
-    private void reportErrorParsingLine(final String reason) {
-        final String errorMessage = makeErrorString(reason);
-
-        if (validationStringency == ValidationStringency.STRICT) {
-            throw new SAMFormatException(errorMessage);
-        } else if (validationStringency == ValidationStringency.LENIENT) {
-            System.err
-                    .println("Ignoring SAM validation error due to lenient parsing:");
-            System.err.println(errorMessage);
-        }
-    }
-
-    private void reportErrorParsingLine(final Exception e) {
-        final String errorMessage = makeErrorString(e.getMessage());
-        if (validationStringency == ValidationStringency.STRICT) {
-            throw new SAMFormatException(errorMessage);
-        } else if (validationStringency == ValidationStringency.LENIENT) {
-            System.err
-                    .println("Ignoring SAM validation error due to lenient parsing:");
-            System.err.println(errorMessage);
-        }
-    }
-
-    private String makeErrorString(final String reason) {
-        String fileMessage = "";
-        if (mFile != null) {
-            fileMessage = "File " + mFile + "; ";
-        }
-        return "Error parsing text SAM file. "
-                + reason + "; " + fileMessage + "Line "
-                + (this.currentLineNumber <= 0 ? "unknown" : this.currentLineNumber)
-                + "\nLine: " + this.currentLine;
-    }
-
-}
diff --git a/src/java/htsjdk/samtools/SAMRecordSetBuilder.java b/src/java/htsjdk/samtools/SAMRecordSetBuilder.java
deleted file mode 100644
index c33d30f..0000000
--- a/src/java/htsjdk/samtools/SAMRecordSetBuilder.java
+++ /dev/null
@@ -1,577 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.DuplicateScoringStrategy.ScoringStrategy;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CoordMath;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.samtools.util.SequenceUtil;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Random;
-import java.util.TreeSet;
-
-/**
- * Factory class for creating SAMRecords for testing purposes. Various methods can be called
- * to add new SAM records (or pairs of records) to a list which can then be returned at
- * any point. The records must reference human chromosomes (excluding randoms etc.).
- * <p/>
- * Although this is a class for testing, it is in the src tree because it is included in the sam jarfile.
- *
- * @author Tim Fennell
- */
-public class SAMRecordSetBuilder implements Iterable<SAMRecord> {
-    private static final String[] chroms = {
-            "chr1", "chr2", "chr3", "chr4", "chr5", "chr6", "chr7", "chr8", "chr9", "chr10",
-            "chr11", "chr12", "chr13", "chr14", "chr15", "chr16", "chr17", "chr18", "chr19", "chr20",
-            "chr21", "chr22", "chrX", "chrY", "chrM"
-    };
-    private static final byte[] BASES = {'A', 'C', 'G', 'T'};
-    private static final String READ_GROUP_ID = "1";
-    private static final String SAMPLE = "FREE_SAMPLE";
-    private final Random random = new Random();
-
-    private SAMFileHeader header;
-    private final Collection<SAMRecord> records;
-
-    private int readLength = 36;
-
-    private SAMProgramRecord programRecord = null;
-    private SAMReadGroupRecord readGroup = null;
-    private boolean useNmFlag = false;
-
-    private boolean unmappedHasBasesAndQualities = true;
-    
-    public static final int DEFAULT_CHROMOSOME_LENGTH = 200000000;
-
-    public static final ScoringStrategy DEFAULT_DUPLICATE_SCORING_STRATEGY = ScoringStrategy.TOTAL_MAPPED_REFERENCE_LENGTH;
-
-    /**
-     * Constructs a new SAMRecordSetBuilder with all the data needed to keep the records
-     * sorted in coordinate order.
-     */
-    public SAMRecordSetBuilder() {
-        this(true, SAMFileHeader.SortOrder.coordinate);
-    }
-
-    /**
-     * Construct a new SAMRecordSetBuilder.
-     *
-     * @param sortOrder If sortForMe, defines the sort order.
-     * @param sortForMe If true, keep the records created in sorted order.
-     */
-    public SAMRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder) {
-        this(sortForMe, sortOrder, true);
-    }
-
-    public SAMRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder, final boolean addReadGroup) {
-        this(sortForMe, sortOrder, addReadGroup, DEFAULT_CHROMOSOME_LENGTH);
-    }
-
-    public SAMRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder, final boolean addReadGroup, final int defaultChromosomeLength) {
-        this(sortForMe, sortOrder, addReadGroup, defaultChromosomeLength, DEFAULT_DUPLICATE_SCORING_STRATEGY);
-    }
-
-    public SAMRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder, final boolean addReadGroup,
-                               final int defaultChromosomeLength, final ScoringStrategy duplicateScoringStrategy) {
-        final List<SAMSequenceRecord> sequences = new ArrayList<SAMSequenceRecord>();
-        for (final String chrom : chroms) {
-            final SAMSequenceRecord sequenceRecord = new SAMSequenceRecord(chrom, defaultChromosomeLength);
-            sequences.add(sequenceRecord);
-        }
-
-        this.header = new SAMFileHeader();
-        this.header.setSequenceDictionary(new SAMSequenceDictionary(sequences));
-        this.header.setSortOrder(sortOrder);
-        if (sortForMe) {
-            final SAMRecordComparator comparator;
-            if (sortOrder == SAMFileHeader.SortOrder.queryname) {
-                comparator = new SAMRecordQueryNameComparator();
-            } else {
-                comparator = new SAMRecordCoordinateComparator();
-            }
-            this.records = new TreeSet<SAMRecord>(comparator);
-        } else {
-            this.records = new ArrayList<SAMRecord>();
-        }
-
-        if (addReadGroup) {
-            final SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord(READ_GROUP_ID);
-            readGroupRecord.setSample(SAMPLE);
-            readGroupRecord.setPlatform("ILLUMINA");
-            final List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>();
-            readGroups.add(readGroupRecord);
-            this.header.setReadGroups(readGroups);
-        }
-    }
-
-    public void setUnmappedHasBasesAndQualities(final boolean value) {
-        this.unmappedHasBasesAndQualities = value;
-    }
-
-    public int size() {
-        return this.records.size();
-    }
-
-    /**
-     * Set the seed of the random number generator for cases in which repeatable result is desired.
-     *
-     * @param seed
-     */
-    public void setRandomSeed(final long seed) {
-        random.setSeed(seed);
-    }
-
-    /**
-     * Adds the given program record to the header, and assigns the PG tag to any SAMRecords
-     * created after it has been added. May be called multiple times in order to assign different
-     * PG IDs to different SAMRecords.  programRecord may be null to stop assignment of PG tag.
-     * It is up to the caller to ensure that program record IDs do not collide.
-     */
-    public void setProgramRecord(final SAMProgramRecord programRecord) {
-        this.programRecord = programRecord;
-        if (programRecord != null) {
-            this.header.addProgramRecord(programRecord);
-        }
-    }
-
-    public void setUseNmFlag(final boolean useNmFlag) {
-        this.useNmFlag = useNmFlag;
-    }
-
-    public void setReadGroup(final SAMReadGroupRecord readGroup) {
-        this.readGroup = readGroup;
-        if (readGroup != null) {
-            this.header.addReadGroup(readGroup);
-        }
-    }
-
-    /** Returns the accumulated list of sam records. */
-    public Collection<SAMRecord> getRecords() { return this.records; }
-
-    public void setHeader(final SAMFileHeader header) {
-        this.header = header.clone();
-    }
-
-
-    /** The record should already have the DS and MC tags computed */
-    public void addRecord(final SAMRecord record) {
-        if (record.getReadPairedFlag() && !record.getMateUnmappedFlag() &&
-                null == record.getAttribute(SAMTagUtil.getSingleton().MC)) {
-            throw new SAMException("Mate Cigar tag (MC) not found in: " + record.getReadName());
-        }
-        this.records.add(record);
-    }
-
-    /** Returns a CloseableIterator over the collection of SAMRecords. */
-    public CloseableIterator<SAMRecord> iterator() {
-        return new CloseableIterator<SAMRecord>() {
-            private final Iterator<SAMRecord> iterator = records.iterator();
-
-            public void close() { /** Do nothing. */}
-
-            public boolean hasNext() { return this.iterator.hasNext(); }
-
-            public SAMRecord next() { return this.iterator.next(); }
-
-            public void remove() { this.iterator.remove(); }
-        };
-    }
-
-    /**
-     * Adds a fragment record (mapped or unmapped) to the set using the provided contig start and optionally the strand,
-     * cigar string, quality string or default quality score.  This does not modify the flag field, which should be updated
-     * if desired before adding the return to the list of records.
-     */
-    private SAMRecord createReadNoFlag(final String name, final int contig, final int start, final boolean negativeStrand,
-                                       final boolean recordUnmapped, final String cigar, final String qualityString,
-                                       final int defaultQuality) throws SAMException {
-        final SAMRecord rec = new SAMRecord(this.header);
-        rec.setReadName(name);
-        if (chroms.length <= contig) {
-            throw new SAMException("Contig too big [" + chroms.length + " < " + contig);
-        }
-        if (0 <= contig) {
-            rec.setReferenceIndex(contig);
-            rec.setReferenceName(chroms[contig]);
-            rec.setAlignmentStart(start);
-        }
-        if (!recordUnmapped) {
-            rec.setReadNegativeStrandFlag(negativeStrand);
-            if (null != cigar) {
-                rec.setCigarString(cigar);
-            } else if (!rec.getReadUnmappedFlag()) {
-                rec.setCigarString(readLength + "M");
-            }
-            rec.setMappingQuality(255);
-        } else {
-            rec.setReadUnmappedFlag(true);
-        }
-        rec.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
-
-        if(useNmFlag){
-            rec.setAttribute(SAMTag.NM.name(), SequenceUtil.calculateSamNmTagFromCigar(rec));
-        }
-
-        if (programRecord != null) {
-            rec.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
-        }
-
-        if (readGroup != null) {
-            rec.setAttribute(SAMTag.RG.name(), readGroup.getReadGroupId());
-        }
-
-        if (!recordUnmapped || this.unmappedHasBasesAndQualities) {
-        fillInBasesAndQualities(rec, qualityString, defaultQuality);
-        }
-
-        return rec;
-    }
-
-    /**
-     * Adds a skeletal fragment (non-PE) record to the set using the provided
-     * contig start and strand information.
-     */
-    public SAMRecord addFrag(final String name, final int contig, final int start, final boolean negativeStrand) {
-        return addFrag(name, contig, start, negativeStrand, false, null, null, -1);
-    }
-
-    /**
-     * Adds a fragment record (mapped or unmapped) to the set using the provided contig start and optionally the strand,
-     * cigar string, quality string or default quality score.
-     */
-    public SAMRecord addFrag(final String name, final int contig, final int start, final boolean negativeStrand,
-                             final boolean recordUnmapped, final String cigar, final String qualityString,
-                             final int defaultQuality) throws SAMException {
-        return addFrag(name, contig, start, negativeStrand, recordUnmapped, cigar, qualityString, defaultQuality, false);
-    }
-
-    /**
-     * Adds a fragment record (mapped or unmapped) to the set using the provided contig start and optionally the strand,
-     * cigar string, quality string or default quality score.
-     */
-    public SAMRecord addFrag(final String name, final int contig, final int start, final boolean negativeStrand,
-                             final boolean recordUnmapped, final String cigar, final String qualityString,
-                             final int defaultQuality, final boolean isSecondary) throws SAMException {
-        final htsjdk.samtools.SAMRecord rec = createReadNoFlag(name, contig, start, negativeStrand, recordUnmapped, cigar, qualityString, defaultQuality);
-        if (isSecondary) rec.setNotPrimaryAlignmentFlag(true);
-        this.records.add(rec);
-        return rec;
-    }
-
-    /**
-     * Adds a fragment record (mapped or unmapped) to the set using the provided contig start and optionally the strand,
-     * cigar string, quality string or default quality score.
-     */
-    public SAMRecord addFrag(final String name, final int contig, final int start, final boolean negativeStrand,
-                             final boolean recordUnmapped, final String cigar, final String qualityString,
-                             final int defaultQuality, final boolean isSecondary, final boolean isSupplementary) throws SAMException {
-        final htsjdk.samtools.SAMRecord rec = createReadNoFlag(name, contig, start, negativeStrand, recordUnmapped, cigar, qualityString, defaultQuality);
-        if (isSecondary) rec.setNotPrimaryAlignmentFlag(true);
-        if (isSupplementary) rec.setSupplementaryAlignmentFlag(true);
-        this.records.add(rec);
-        return rec;
-    }
-
-
-    /**
-     * Fills in the bases and qualities for the given record. Quality data is randomly generated if the defaultQuality
-     * is set to -1. Otherwise all qualities will be set to defaultQuality. If a quality string is provided that string
-     * will be used instead of the defaultQuality.
-     */
-    private void fillInBasesAndQualities(final SAMRecord rec, final String qualityString, final int defaultQuality) {
-
-        if (null == qualityString) {
-            fillInBasesAndQualities(rec, defaultQuality);
-        } else {
-            fillInBases(rec);
-            rec.setBaseQualityString(qualityString);
-        }
-    }
-
-    /**
-     * Randomly fills in the bases for the given record.
-     */
-    private void fillInBases(final SAMRecord rec) {
-        final int length = this.readLength;
-        final byte[] bases = new byte[length];
-
-        for (int i = 0; i < length; ++i) {
-            bases[i] = BASES[this.random.nextInt(BASES.length)];
-        }
-
-        rec.setReadBases(bases);
-    }
-
-    /**
-     * Adds an unmapped fragment read to the builder.
-     */
-    public void addUnmappedFragment(final String name) {
-        addFrag(name, -1, -1, false, true, null, null, -1, false);
-    }
-
-
-    /**
-     * Adds a skeletal pair of records to the set using the provided
-     * contig starts.  The pair is assumed to be a well
-     * formed pair sitting on a single contig.
-     */
-    public void addPair(final String name, final int contig, final int start1, final int start2) {
-        final SAMRecord end1 = new SAMRecord(this.header);
-        final SAMRecord end2 = new SAMRecord(this.header);
-        final boolean end1IsFirstOfPair = this.random.nextBoolean();
-
-        end1.setReadName(name);
-        end1.setReferenceIndex(contig);
-        end1.setAlignmentStart(start1);
-        end1.setReadNegativeStrandFlag(false);
-        end1.setCigarString(readLength + "M");
-        if(useNmFlag) end1.setAttribute(ReservedTagConstants.NM, 0);
-        end1.setMappingQuality(255);
-        end1.setReadPairedFlag(true);
-        end1.setProperPairFlag(true);
-        end1.setMateReferenceIndex(contig);
-        end1.setAttribute(SAMTag.MC.name(), readLength + "M");
-        end1.setMateAlignmentStart(start2);
-        end1.setMateNegativeStrandFlag(true);
-        end1.setFirstOfPairFlag(end1IsFirstOfPair);
-        end1.setSecondOfPairFlag(!end1IsFirstOfPair);
-        end1.setInferredInsertSize((int) CoordMath.getLength(start1, CoordMath.getEnd(start2, this.readLength)));
-        end1.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
-        if (programRecord != null) {
-            end1.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
-        }
-        if (readGroup != null) {
-            end1.setAttribute(SAMTag.RG.name(), readGroup.getReadGroupId());
-        }
-        fillInBasesAndQualities(end1);
-
-        end2.setReadName(name);
-        end2.setReferenceIndex(contig);
-        end2.setAlignmentStart(start2);
-        end2.setReadNegativeStrandFlag(true);
-        end2.setCigarString(readLength + "M");
-        if(useNmFlag) end2.setAttribute(ReservedTagConstants.NM,0);
-        end2.setMappingQuality(255);
-        end2.setReadPairedFlag(true);
-        end2.setProperPairFlag(true);
-        end2.setMateReferenceIndex(contig);
-        end2.setAttribute(SAMTag.MC.name(), readLength + "M");
-        end2.setMateAlignmentStart(start1);
-        end2.setMateNegativeStrandFlag(false);
-        end2.setFirstOfPairFlag(!end1IsFirstOfPair);
-        end2.setSecondOfPairFlag(end1IsFirstOfPair);
-        end2.setInferredInsertSize(end1.getInferredInsertSize());
-        end2.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
-        if (programRecord != null) {
-            end2.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
-        }
-        if (readGroup != null) {
-            end2.setAttribute(SAMTag.RG.name(), readGroup.getReadGroupId());
-        }
-        fillInBasesAndQualities(end2);
-
-        this.records.add(end1);
-        this.records.add(end2);
-    }
-
-    /**
-     * Adds a pair of records (mapped or unmmapped) to the set using the provided contig starts.
-     * The pair is assumed to be a well formed pair sitting on a single contig.
-     */
-    public List<SAMRecord> addPair(final String name, final int contig, final int start1, final int start2,
-                                   final boolean record1Unmapped, final boolean record2Unmapped, final String cigar1,
-                                   final String cigar2, final boolean strand1, final boolean strand2, final int defaultQuality) {
-        return this.addPair(name, contig, contig, start1, start2, record1Unmapped, record2Unmapped, cigar1, cigar2, strand1, strand2, false, false, defaultQuality);
-    }
-
-    /**
-     * Adds a pair of records (mapped or unmmapped) to the set using the provided contig starts.
-     * The pair is assumed to be a well formed pair sitting on a single contig.
-     */
-    public List<SAMRecord> addPair(final String name, final int contig1, final int contig2, final int start1, final int start2,
-                                   final boolean record1Unmapped, final boolean record2Unmapped, final String cigar1,
-                                   final String cigar2, final boolean strand1, final boolean strand2, final boolean record1NonPrimary,
-                                   final boolean record2NonPrimary, final int defaultQuality) {
-        final List<SAMRecord> recordsList = new LinkedList<SAMRecord>();
-
-        final SAMRecord end1 = createReadNoFlag(name, contig1, start1, strand1, record1Unmapped, cigar1, null, defaultQuality);
-        final SAMRecord end2 = createReadNoFlag(name, contig2, start2, strand2, record2Unmapped, cigar2, null, defaultQuality);
-
-        end1.setReadPairedFlag(true);
-        end1.setFirstOfPairFlag(true);
-
-        if (!record1Unmapped && !record2Unmapped) {
-            end1.setProperPairFlag(true);
-            end2.setProperPairFlag(true);
-        }
-        end2.setReadPairedFlag(true);
-        end2.setSecondOfPairFlag(true);
-
-        if (record1NonPrimary) end1.setNotPrimaryAlignmentFlag(true);
-        if (record2NonPrimary) end2.setNotPrimaryAlignmentFlag(true);
-
-        if (record1NonPrimary) end1.setNotPrimaryAlignmentFlag(true);
-        if (record2NonPrimary) end2.setNotPrimaryAlignmentFlag(true);
-
-        // set mate info
-        SamPairUtil.setMateInfo(end1, end2, true);
-
-        recordsList.add(end1);
-        recordsList.add(end2);
-
-        records.add(end1);
-        records.add(end2);
-
-        return recordsList;
-    }
-
-    /**
-     * Adds a pair of records (mapped or unmmapped) to the set using the provided contig starts.
-     * The pair is assumed to be a well formed pair sitting on a single contig.
-     */
-    public List<SAMRecord> addPair(final String name, final int contig, final int start1, final int start2,
-                                   final boolean record1Unmapped, final boolean record2Unmapped, final String cigar1,
-                                   final String cigar2, final boolean strand1, final boolean strand2, final boolean record1NonPrimary,
-                                   final boolean record2NonPrimary, final int defaultQuality) {
-        return addPair(name, contig, contig, start1, start2, record1Unmapped, record2Unmapped, cigar1, cigar2, strand1, strand2,
-                record1NonPrimary, record2NonPrimary, defaultQuality);
-    }
-
-    /**
-     * Adds a pair with both ends unmapped to the builder.
-     */
-    public void addUnmappedPair(final String name) {
-        final SAMRecord end1 = new SAMRecord(this.header);
-        final SAMRecord end2 = new SAMRecord(this.header);
-        final boolean end1IsFirstOfPair = this.random.nextBoolean();
-
-        end1.setReadName(name);
-        end1.setReadPairedFlag(true);
-        end1.setReadUnmappedFlag(true);
-        end1.setAttribute(SAMTag.MC.name(), null);
-        end1.setProperPairFlag(false);
-        end1.setFirstOfPairFlag(end1IsFirstOfPair);
-        end1.setSecondOfPairFlag(!end1IsFirstOfPair);
-        end1.setMateUnmappedFlag(true);
-        end1.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
-        if (programRecord != null) {
-            end1.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
-        }
-        if (this.unmappedHasBasesAndQualities) {
-        fillInBasesAndQualities(end1);
-        }
-
-        end2.setReadName(name);
-        end2.setReadPairedFlag(true);
-        end2.setReadUnmappedFlag(true);
-        end2.setAttribute(SAMTag.MC.name(), null);
-        end2.setProperPairFlag(false);
-        end2.setFirstOfPairFlag(!end1IsFirstOfPair);
-        end2.setSecondOfPairFlag(end1IsFirstOfPair);
-        end2.setMateUnmappedFlag(true);
-        end2.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
-        if (programRecord != null) {
-            end2.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
-        }
-        if (this.unmappedHasBasesAndQualities) {
-        fillInBasesAndQualities(end2);
-        }
-
-        this.records.add(end1);
-        this.records.add(end2);
-    }
-
-    /**
-     * Fills in bases and qualities with randomly generated data.
-     * Relies on the alignment start and end having been set to get read length.
-     */
-    private void fillInBasesAndQualities(final SAMRecord rec) {
-        fillInBasesAndQualities(rec, -1);
-    }
-
-    /**
-     * Fills in bases and qualities with a set default quality. If the defaultQuality is set to -1 quality scores will
-     * be randomly generated.
-     * Relies on the alignment start and end having been set to get read length.
-     */
-    private void fillInBasesAndQualities(final SAMRecord rec, final int defaultQuality) {
-        final int length = this.readLength;
-        final byte[] quals = new byte[length];
-
-        if (-1 != defaultQuality) {
-            Arrays.fill(quals, (byte) defaultQuality);
-        } else {
-            for (int i = 0; i < length; ++i) {
-                quals[i] = (byte) this.random.nextInt(50);
-            }
-        }
-        rec.setBaseQualities(quals);
-        fillInBases(rec);
-    }
-
-    /**
-     * Creates samFileReader from the data in instance of this class
-     *
-     * @return SAMFileReader
-     */
-    public SamReader getSamReader() {
-
-        final File tempFile;
-
-        try {
-            tempFile = File.createTempFile("temp", ".sam");
-        } catch (final IOException e) {
-            throw new RuntimeIOException("problems creating tempfile", e);
-        }
-
-        this.header.setAttribute("VN", "1.0");
-        final SAMFileWriter w = new SAMFileWriterFactory().makeBAMWriter(this.header, true, tempFile);
-        for (final SAMRecord r : this.getRecords()) {
-            w.addAlignment(r);
-        }
-
-        w.close();
-
-        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(tempFile);
-        tempFile.deleteOnExit();
-
-        return reader;
-    }
-
-    public SAMFileHeader getHeader() {
-        return header;
-    }
-
-    public void setReadLength(final int readLength) { this.readLength = readLength; }
-
-}
diff --git a/src/java/htsjdk/samtools/SAMTextWriter.java b/src/java/htsjdk/samtools/SAMTextWriter.java
deleted file mode 100644
index 26d15bc..0000000
--- a/src/java/htsjdk/samtools/SAMTextWriter.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.AsciiWriter;
-import htsjdk.samtools.util.RuntimeIOException;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.StringWriter;
-import java.io.Writer;
-
-/**
- * Writer for text-format SAM files.
- */
-public class SAMTextWriter extends SAMFileWriterImpl {
-    private static final String FIELD_SEPARATOR = "\t";
-
-    private final Writer out;
-    // For error reporting only.
-    private final File file;
-    private final TextTagCodec tagCodec = new TextTagCodec();
-    private final SAMTagUtil tagUtil = new SAMTagUtil();
-
-    /**
-     * Constructs a SAMTextWriter that outputs to a Writer.
-     * @param out Writer.
-     */
-    public SAMTextWriter(Writer out) {
-	this.out = out;
-	this.file = null;
-    }
-
-    /**
-     * Constructs a SAMTextWriter that writes to a File.
-     * @param file Where to write the output.
-     */
-    public SAMTextWriter(final File file) {
-        try {
-            this.file = file;
-            this.out = new AsciiWriter(new FileOutputStream(file));
-        } catch (IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    /**
-     * Returns the Writer used by this instance.  Useful for flushing the output.
-     */
-    public Writer getWriter() {
-	return out;
-    }
-
-    /**
-     * Constructs a SAMTextWriter that writes to an OutputStream.  The OutputStream
-     * is wrapped in an AsciiWriter, which can be retrieved with getWriter().
-     * @param stream Need not be buffered because this class provides buffering. 
-     */
-    public SAMTextWriter(final OutputStream stream) {
-        this.file = null;
-        this.out = new AsciiWriter(stream);
-    }
-
-    /**
-     * Write the record.
-     *
-     * @param alignment SAMRecord.
-     */
-    public void writeAlignment(final SAMRecord alignment) {
-        try {
-            out.write(alignment.getReadName());
-            out.write(FIELD_SEPARATOR);
-            out.write(Integer.toString(alignment.getFlags()));
-            out.write(FIELD_SEPARATOR);
-            out.write(alignment.getReferenceName());
-            out.write(FIELD_SEPARATOR);
-            out.write(Integer.toString(alignment.getAlignmentStart()));
-            out.write(FIELD_SEPARATOR);
-            out.write(Integer.toString(alignment.getMappingQuality()));
-            out.write(FIELD_SEPARATOR);
-            out.write(alignment.getCigarString());
-            out.write(FIELD_SEPARATOR);
-
-            //  == is OK here because these strings are interned
-            if (alignment.getReferenceName() == alignment.getMateReferenceName() &&
-                    SAMRecord.NO_ALIGNMENT_REFERENCE_NAME != alignment.getReferenceName()) {
-                out.write("=");
-            } else {
-                out.write(alignment.getMateReferenceName());
-            }
-            out.write(FIELD_SEPARATOR);
-            out.write(Integer.toString(alignment.getMateAlignmentStart()));
-            out.write(FIELD_SEPARATOR);
-            out.write(Integer.toString(alignment.getInferredInsertSize()));
-            out.write(FIELD_SEPARATOR);
-            out.write(alignment.getReadString());
-            out.write(FIELD_SEPARATOR);
-            out.write(alignment.getBaseQualityString());
-            SAMBinaryTagAndValue attribute = alignment.getBinaryAttributes();
-            while (attribute != null) {
-                out.write(FIELD_SEPARATOR);
-                final String encodedTag;
-                if (attribute.isUnsignedArray()) {
-                    encodedTag = tagCodec.encodeUnsignedArray(tagUtil.makeStringTag(attribute.tag), attribute.value);
-                } else {
-                    encodedTag = tagCodec.encode(tagUtil.makeStringTag(attribute.tag), attribute.value);
-                }
-                out.write(encodedTag);
-                attribute = attribute.getNext();
-            }
-            out.write("\n");
-
-        } catch (IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    /* This method is called by SAMRecord.getSAMString(). */
-    private static SAMTextWriter textWriter = null;
-    private static StringWriter stringWriter = null;
-    static synchronized String getSAMString(final SAMRecord alignment) {
-	if (stringWriter == null) stringWriter = new StringWriter();
-	if (textWriter == null) textWriter = new SAMTextWriter(stringWriter);
-	stringWriter.getBuffer().setLength(0);
-	textWriter.writeAlignment(alignment);
-	return stringWriter.toString();
-    }
-
-    /**
-     * Write the header text.  This method can also be used to write
-     * an arbitrary String, not necessarily the header.
-     *
-     * @param textHeader String containing the text to write.
-     */
-    public void writeHeader(final String textHeader) {
-        try {
-            out.write(textHeader);
-        } catch (IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    /**
-     * Do any required flushing here.
-     */
-    public void finish() {
-        try {
-            out.close();
-        } catch (IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    /**
-     * For producing error messages.
-     *
-     * @return Output filename, or null if there isn't one.
-     */
-    public String getFilename() {
-        if (file == null) {
-            return null;
-        }
-        return file.getAbsolutePath();
-    }
-}
diff --git a/src/java/htsjdk/samtools/SRAFileReader.java b/src/java/htsjdk/samtools/SRAFileReader.java
deleted file mode 100644
index 14d7df8..0000000
--- a/src/java/htsjdk/samtools/SRAFileReader.java
+++ /dev/null
@@ -1,306 +0,0 @@
-/*===========================================================================
-*
-*                            PUBLIC DOMAIN NOTICE
-*               National Center for Biotechnology Information
-*
-*  This software/database is a "United States Government Work" under the
-*  terms of the United States Copyright Act.  It was written as part of
-*  the author's official duties as a United States Government employee and
-*  thus cannot be copyrighted.  This software/database is freely available
-*  to the public for use. The National Library of Medicine and the U.S.
-*  Government have not placed any restriction on its use or reproduction.
-*
-*  Although all reasonable efforts have been taken to ensure the accuracy
-*  and reliability of the software and data, the NLM and the U.S.
-*  Government do not and cannot warrant the performance or results that
-*  may be obtained by using this software or data. The NLM and the U.S.
-*  Government disclaim all warranties, express or implied, including
-*  warranties of performance, merchantability or fitness for any particular
-*  purpose.
-*
-*  Please cite the author in any work or product based on this material.
-*
-* ===========================================================================
-*
-*/
-
-/**
- * Created by andrii.nikitiuk on 8/11/15.
- */
-
-package htsjdk.samtools;
-
-import htsjdk.samtools.sra.ReferenceCache;
-import htsjdk.samtools.sra.SRAAccession;
-import htsjdk.samtools.util.CloseableIterator;
-
-import htsjdk.samtools.SamReader.Type;
-
-import htsjdk.samtools.util.Log;
-import ngs.ErrorMsg;
-import ngs.ReadCollection;
-import ngs.ReadGroupIterator;
-import ngs.ReferenceIterator;
-import ngs.Reference;
-
-import java.util.ArrayList;
-import java.util.List;
-
-
-public class SRAFileReader extends SamReader.ReaderImplementation implements SamReader.Indexing {
-    private static final Log log = Log.getInstance(SRAFileReader.class);
-    private SRAAccession acc;
-    private SAMFileHeader virtualHeader;
-    private ReadCollection run;
-    private ValidationStringency validationStringency;
-    private SRAIterator.RecordRangeInfo recordRangeInfo;
-    private SRAIndex index;
-    private ReferenceCache cachedReferences;
-
-    public SRAFileReader(final SRAAccession acc) {
-        this.acc = acc;
-
-        if (!acc.isValid()) {
-            throw new IllegalArgumentException("Invalid SRA accession was passed to SRA reader: " + acc);
-        }
-
-        try {
-            run = gov.nih.nlm.ncbi.ngs.NGS.openReadCollection(acc.toString());
-            virtualHeader = loadSamHeader();
-        } catch (final Exception e) {
-            throw new RuntimeException(e);
-        }
-
-        cachedReferences = new ReferenceCache(run, virtualHeader);
-        recordRangeInfo = SRAIterator.getRecordsRangeInfo(run);
-        index = new SRAIndex(virtualHeader, recordRangeInfo);
-    }
-
-    @Override
-    public Type type() {
-        return Type.SRA_TYPE;
-    }
-
-    @Override
-    public boolean hasIndex() {
-        return true;
-    }
-
-    @Override
-    public BAMIndex getIndex() {
-        return index;
-    }
-
-    @Override
-    public SAMFileHeader getFileHeader() {
-        return virtualHeader;
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> getIterator() {
-        return getIterator(getFilePointerSpanningReads());
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> getIterator(SAMFileSpan chunks) {
-        if (run == null) {
-            throw new RuntimeException("Cannot create iterator - SRA run is uninitialized");
-        }
-
-        if (virtualHeader == null) {
-            throw new RuntimeException("Cannot create iterator - SAM file header is uninitialized");
-        }
-
-        List<Chunk> chunkList = ((BAMFileSpan) chunks).getChunks();
-
-        final SRAIterator newIterator = new SRAIterator(acc, run, virtualHeader, cachedReferences, recordRangeInfo, chunkList);
-        if (validationStringency != null) {
-            newIterator.setValidationStringency(validationStringency);
-        }
-
-        return newIterator;
-    }
-
-    @Override
-    public SAMFileSpan getFilePointerSpanningReads() {
-        if (recordRangeInfo.getTotalRecordRangeLength() <= 0) {
-            throw new RuntimeException("Cannot create file span - SRA file is empty");
-        }
-
-        return new BAMFileSpan(new Chunk(0, recordRangeInfo.getTotalRecordRangeLength()));
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> query(QueryInterval[] intervals, boolean contained) {
-        BAMFileSpan span = new BAMFileSpan();
-        BrowseableBAMIndex index = getBrowseableIndex();
-
-        for (QueryInterval interval : intervals) {
-            BAMFileSpan intervalSpan;
-            if (!contained) {
-                intervalSpan = index.getSpanOverlapping(interval.referenceIndex, interval.start, interval.end);
-
-            } else {
-                intervalSpan = getSpanContained(interval.referenceIndex, interval.start, interval.end);
-            }
-            span.add(intervalSpan);
-        }
-
-        return getIterator(span);
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> queryAlignmentStart(String sequence, int start) {
-        int sequenceIndex = virtualHeader.getSequenceIndex(sequence);
-        if (sequenceIndex == -1) {
-            throw new IllegalArgumentException("Unknown sequence '" + sequence + "' was passed to SRAFileReader");
-        }
-
-        return getIterator(getSpanContained(sequenceIndex, start, -1));
-    }
-
-    @Override
-    public CloseableIterator<SAMRecord> queryUnmapped() {
-        if (recordRangeInfo.getTotalRecordRangeLength() <= 0) {
-            throw new RuntimeException("Cannot create file span - SRA file is empty");
-        }
-
-        SAMFileSpan span = new BAMFileSpan(new Chunk(recordRangeInfo.getTotalReferencesLength(), recordRangeInfo.getTotalRecordRangeLength()));
-        return getIterator(span);
-    }
-
-    @Override
-    public void close() { }
-
-    @Override
-    public ValidationStringency getValidationStringency() {
-        return validationStringency;
-    }
-
-
-    /** INDEXING */
-
-
-    /**
-     * Returns true if the supported index is browseable, meaning the bins in it can be traversed
-     * and chunk data inspected and retrieved.
-     *
-     * @return True if the index supports the BrowseableBAMIndex interface.  False otherwise.
-     */
-    @Override
-    public boolean hasBrowseableIndex() {
-        return true;
-    }
-
-    /**
-     * Gets an index tagged with the BrowseableBAMIndex interface.  Throws an exception if no such
-     * index is available.
-     *
-     * @return An index with a browseable interface, if possible.
-     * @throws SAMException if no such index is available.
-     */
-    @Override
-    public BrowseableBAMIndex getBrowseableIndex() {
-        return index;
-    }
-
-    /**
-     * Iterate through the given chunks in the file.
-     *
-     * @param chunks List of chunks for which to retrieve data.
-     * @return An iterator over the given chunks.
-     */
-    @Override
-    public SAMRecordIterator iterator(final SAMFileSpan chunks) {
-        CloseableIterator<SAMRecord> it = getIterator(chunks);
-        if (it == null) {
-            return null;
-        }
-        return (SAMRecordIterator) it;
-    }
-
-    /** ReaderImplementation */
-    @Override
-    void enableFileSource(final SamReader reader, final boolean enabled) {
-        log.info("enableFileSource is not supported");
-    }
-
-    @Override
-    void enableIndexCaching(final boolean enabled) {
-        log.info("enableIndexCaching is not supported");
-    }
-
-    @Override
-    void enableIndexMemoryMapping(final boolean enabled) {
-        log.info("enableIndexMemoryMapping is not supported");
-    }
-
-    @Override
-    void enableCrcChecking(final boolean enabled) {
-        log.info("enableCrcChecking is not supported");
-    }
-
-    @Override
-    void setSAMRecordFactory(final SAMRecordFactory factory) {
-        log.info("setSAMRecordFactory is not supported");
-    }
-
-    @Override
-    void setValidationStringency(final ValidationStringency validationStringency) {
-        this.validationStringency = validationStringency;
-    }
-
-    protected SRAIterator.RecordRangeInfo getRecordsRangeInfo() {
-        return recordRangeInfo;
-    }
-
-    private SAMFileHeader loadSamHeader() throws ErrorMsg {
-        if (run == null) {
-            throw new RuntimeException("Cannot load SAMFileHeader - SRA run is uninitialized");
-        }
-
-        String runName = run.getName();
-
-        SAMFileHeader header = new SAMFileHeader();
-        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
-
-        ReadGroupIterator itRg = run.getReadGroups();
-        while (itRg.nextReadGroup()) {
-            String rgName = itRg.getName();
-            if (rgName.isEmpty())
-                rgName = runName;
-            SAMReadGroupRecord rg = new SAMReadGroupRecord(rgName);
-            rg.setSample(runName);
-            header.addReadGroup(rg);
-        }
-
-        ReferenceIterator itRef = run.getReferences();
-        while (itRef.nextReference()) {
-            header.addSequence(new SAMSequenceRecord(itRef.getCanonicalName(), (int) itRef.getLength()));
-        }
-
-        return header;
-    }
-
-    private BAMFileSpan getSpanContained(int sequenceIndex, long start, long end) {
-        if (recordRangeInfo.getTotalRecordRangeLength() <= 0) {
-            throw new RuntimeException("Cannot create file span - SRA file is empty");
-        }
-
-        long sequenceOffset = recordRangeInfo.getReferenceOffsets().get(sequenceIndex);
-        long sequenceLength = recordRangeInfo.getReferenceLengthsAligned().get(sequenceIndex);
-        if (end == -1) {
-            end = sequenceLength;
-        }
-
-        if (start > sequenceLength) {
-            throw new IllegalArgumentException("Sequence start position is larger than its length");
-        }
-
-        if (end > sequenceLength) {
-            throw new IllegalArgumentException("Sequence end position is larger than its length");
-        }
-
-        return new BAMFileSpan(new Chunk(sequenceOffset + start, sequenceOffset + end));
-    }
-}
diff --git a/src/java/htsjdk/samtools/SRAIterator.java b/src/java/htsjdk/samtools/SRAIterator.java
deleted file mode 100644
index 1347e1c..0000000
--- a/src/java/htsjdk/samtools/SRAIterator.java
+++ /dev/null
@@ -1,248 +0,0 @@
-/*===========================================================================
-*
-*                            PUBLIC DOMAIN NOTICE
-*               National Center for Biotechnology Information
-*
-*  This software/database is a "United States Government Work" under the
-*  terms of the United States Copyright Act.  It was written as part of
-*  the author's official duties as a United States Government employee and
-*  thus cannot be copyrighted.  This software/database is freely available
-*  to the public for use. The National Library of Medicine and the U.S.
-*  Government have not placed any restriction on its use or reproduction.
-*
-*  Although all reasonable efforts have been taken to ensure the accuracy
-*  and reliability of the software and data, the NLM and the U.S.
-*  Government do not and cannot warrant the performance or results that
-*  may be obtained by using this software or data. The NLM and the U.S.
-*  Government disclaim all warranties, express or implied, including
-*  warranties of performance, merchantability or fitness for any particular
-*  purpose.
-*
-*  Please cite the author in any work or product based on this material.
-*
-* ===========================================================================
-*
-*/
-
-/**
- * Created by andrii.nikitiuk on 8/11/15.
- */
-
-package htsjdk.samtools;
-
-import htsjdk.samtools.SAMFileHeader.SortOrder;
-
-import htsjdk.samtools.sra.ReferenceCache;
-import htsjdk.samtools.sra.SRAAccession;
-import htsjdk.samtools.sra.SRAAlignmentIterator;
-import htsjdk.samtools.sra.SRAUnalignmentIterator;
-import htsjdk.samtools.sra.SRAUtils;
-import ngs.ErrorMsg;
-import ngs.ReadCollection;
-import ngs.Reference;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-
-/**
- * SRA iterator which returns SAMRecords for requested list of chunks
- */
-public class SRAIterator implements SAMRecordIterator {
-    private ValidationStringency validationStringency;
-
-    private SRAAccession accession;
-    private ReadCollection run;
-    private SAMFileHeader header;
-    private ReferenceCache cachedReferences;
-    private RecordRangeInfo recordRangeInfo;
-    private Iterator<Chunk> chunksIterator;
-    private Chunk currentChunk;
-
-    private SRAAlignmentIterator alignmentIterator;
-    private SRAUnalignmentIterator unalignmentIterator;
-
-    /**
-     * Describes record ranges info needed for emulating BAM index
-     */
-    public static class RecordRangeInfo {
-        private List<Long> referenceOffsets;
-        private List<Long> referenceLengthsAligned;
-        private long totalReferencesLength;
-        private long numberOfReads; // is used for unaligned read space
-        private long totalRecordRangeLength;
-
-        /**
-         * @param referenceLengthsAligned a list with lengths of each reference
-         * @param numberOfReads total number of reads within SRA archive
-         */
-        public RecordRangeInfo(List<Long> referenceLengthsAligned, long numberOfReads) {
-            this.numberOfReads = numberOfReads;
-            this.referenceLengthsAligned = referenceLengthsAligned;
-
-            referenceOffsets = new ArrayList<Long>();
-
-            totalReferencesLength = 0;
-            for (Long refLen : referenceLengthsAligned) {
-                referenceOffsets.add(totalReferencesLength);
-                totalReferencesLength += refLen;
-            }
-
-            totalRecordRangeLength = totalReferencesLength + this.numberOfReads;
-        }
-
-        public long getNumberOfReads() {
-            return numberOfReads;
-        }
-
-        public long getTotalReferencesLength() {
-            return totalReferencesLength;
-        }
-
-        public long getTotalRecordRangeLength() {
-            return totalRecordRangeLength;
-        }
-
-        public final List<Long> getReferenceOffsets() {
-            return Collections.unmodifiableList(referenceOffsets);
-        }
-
-        public final List<Long> getReferenceLengthsAligned() {
-            return Collections.unmodifiableList(referenceLengthsAligned);
-        }
-    }
-
-    /**
-     * Loads record ranges needed for emulating BAM index
-     * @param run read collection
-     * @return record ranges
-     */
-    public static RecordRangeInfo getRecordsRangeInfo(ReadCollection run) {
-        try {
-            return new RecordRangeInfo(SRAUtils.getReferencesLengthsAligned(run), SRAUtils.getNumberOfReads(run));
-        } catch (ErrorMsg e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * @param run opened read collection
-     * @param header sam header
-     * @param cachedReferences list of cached references shared among all iterators from a single SRAFileReader
-     * @param recordRangeInfo info about record ranges withing SRA archive
-     * @param chunks used to determine which records the iterator should return
-     */
-    public SRAIterator(SRAAccession accession, final ReadCollection run, final SAMFileHeader header, ReferenceCache cachedReferences,
-                       final RecordRangeInfo recordRangeInfo, final List<Chunk> chunks) {
-        this.accession = accession;
-        this.run = run;
-        this.header = header;
-        this.cachedReferences = cachedReferences;
-        this.recordRangeInfo = recordRangeInfo;
-        chunksIterator = chunks.iterator();
-        if (chunksIterator.hasNext()) {
-            currentChunk = chunksIterator.next();
-        }
-
-        hasNext();
-    }
-
-    /**
-     * NGS iterators implement a single method "nextObject" which return true if the operation was successful or
-     * false if there are no more objects available.
-     * That means that there is no way to check "hasNext" without actually moving the iterator forward.
-     * Because of that all the logic of moving iterator forward is actually happens in "hasNext".
-     *
-     * Here is explanation of how it works:
-     *  Iterator holds a list of chunks of requested records. Here we have chunksIterator that walks though that list.
-     *  We walk though that list using chunksIterator. If current chunk can represent aligned fragments then we create
-     *  SRAAlignmentIterator iterator, pass the chunk into it and ask if it can find any record. If record was found,
-     *  we say that we have next; otherwise we check if the chunk can represent unaligned fragments and then create
-     *  SRAUnalignmentIterator if so and do the same steps as with alignemnt iterator.
-     *
-     *  If record was not found in both SRAAlignmentIterator and SRAUnalignmentIterator (it is possible that reference
-     *  range has no alignments or that reads range has all aligned fragment), we try the next chunk.
-     *
-     *  When there are no more chunks and both iterators have no more records we return false.
-     *
-     * @return true if there are more records available
-     */
-    @Override
-    public boolean hasNext() {
-        while (currentChunk != null) {
-            if (alignmentIterator == null) {
-                if (currentChunk.getChunkStart() < recordRangeInfo.getTotalReferencesLength()) {
-                    alignmentIterator = new SRAAlignmentIterator(accession, run, header, cachedReferences, recordRangeInfo, currentChunk);
-                    if (validationStringency != null) {
-                        alignmentIterator.setValidationStringency(validationStringency);
-                    }
-                }
-            }
-
-            if (alignmentIterator != null && alignmentIterator.hasNext()) {
-                return true;
-            }
-
-            if (unalignmentIterator == null) {
-                if (currentChunk.getChunkEnd() > recordRangeInfo.getTotalReferencesLength()) {
-                    unalignmentIterator = new SRAUnalignmentIterator(accession, run, header, recordRangeInfo, currentChunk);
-                    if (validationStringency != null) {
-                        unalignmentIterator.setValidationStringency(validationStringency);
-                    }
-                }
-            }
-            if (unalignmentIterator != null && unalignmentIterator.hasNext()) {
-                return true;
-            }
-
-            alignmentIterator = null;
-            unalignmentIterator = null;
-            if (chunksIterator.hasNext()) {
-                currentChunk = chunksIterator.next();
-            } else {
-                currentChunk = null;
-            }
-        }
-        return false;
-    }
-
-    /**
-     * Call hasNext to make sure that one of inner iterators points to the next record, the retrieve the record from
-     * one of them.
-     * @return lazy SRA record
-     */
-    @Override
-    public SAMRecord next() {
-        if (!hasNext()) {
-            throw new NoSuchElementException("No more records are available in SRAIterator");
-        }
-
-        if (alignmentIterator != null && alignmentIterator.hasNext()) {
-            return alignmentIterator.next();
-        }
-
-        return unalignmentIterator.next();
-    }
-
-    @Override
-    public void remove() { throw new UnsupportedOperationException("Removal of records not implemented."); }
-
-    @Override
-    public void close() { }
-
-    @Override
-    public SAMRecordIterator assertSorted(final SortOrder sortOrder) { throw new UnsupportedOperationException("assertSorted is not implemented."); }
-
-    public void setValidationStringency(ValidationStringency validationStringency) {
-        this.validationStringency = validationStringency;
-
-        if (alignmentIterator != null) {
-            alignmentIterator.setValidationStringency(validationStringency);
-        }
-        if (unalignmentIterator != null) {
-            unalignmentIterator.setValidationStringency(validationStringency);
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/SamFileValidator.java b/src/java/htsjdk/samtools/SamFileValidator.java
deleted file mode 100644
index 8971093..0000000
--- a/src/java/htsjdk/samtools/SamFileValidator.java
+++ /dev/null
@@ -1,856 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools;
-
-import htsjdk.samtools.SAMValidationError.Type;
-import htsjdk.samtools.BamIndexValidator.IndexValidationStringency;
-import htsjdk.samtools.metrics.MetricBase;
-import htsjdk.samtools.metrics.MetricsFile;
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequenceFileWalker;
-import htsjdk.samtools.util.BlockCompressedInputStream;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.FastqQualityFormat;
-import htsjdk.samtools.util.Histogram;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.ProgressLogger;
-import htsjdk.samtools.util.QualityEncodingDetector;
-import htsjdk.samtools.util.SequenceUtil;
-import htsjdk.samtools.util.StringUtil;
-
-import java.io.BufferedInputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.PrintWriter;
-import java.util.AbstractMap;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * Validates SAM files as follows:
- * <ul>
- * <li>checks sam file header for sequence dictionary</li>
- * <li>checks sam file header for read groups</li>
- * <li>for each sam record
- * <ul>
- * <li>reports error detected by SAMRecord.isValid()</li>
- * <li>validates NM (nucleotide differences) exists and matches reality</li>
- * <li>validates mate fields agree with data in the mate record</li>
- * </ul>
- * </li>
- * </ul>
- *
- * @author Doug Voet
- * @see SAMRecord#isValid()
- */
-public class SamFileValidator {
-    private Histogram<Type> errorsByType = new Histogram<Type>();
-    private final PrintWriter out;
-    private PairEndInfoMap pairEndInfoByName;
-    private ReferenceSequenceFileWalker refFileWalker = null;
-    private boolean verbose = false;
-    private int maxVerboseOutput = 100;
-    private SAMSortOrderChecker orderChecker;
-    private Set<Type> errorsToIgnore = EnumSet.noneOf(Type.class);
-    private boolean ignoreWarnings = false;
-    private boolean bisulfiteSequenced = false;
-    private IndexValidationStringency indexValidationStringency = IndexValidationStringency.NONE;
-    private boolean sequenceDictionaryEmptyAndNoWarningEmitted = false;
-    private final int maxTempFiles;
-
-    private final static Log log = Log.getInstance(SamFileValidator.class);
-
-    public SamFileValidator(final PrintWriter out, final int maxTempFiles) {
-        this.out = out;
-        this.maxTempFiles = maxTempFiles;
-    }
-
-    /**
-     * Sets one or more error types that should not be reported on.
-     */
-    public void setErrorsToIgnore(final Collection<Type> types) {
-        if (!types.isEmpty()) {
-            this.errorsToIgnore = EnumSet.copyOf(types);
-        }
-    }
-
-    public void setIgnoreWarnings(final boolean ignoreWarnings) {
-        this.ignoreWarnings = ignoreWarnings;
-    }
-
-    /**
-     * Outputs validation summary report to out.
-     *
-     * @param samReader records to validate
-     * @param reference if null, NM tag validation is skipped
-     * @return boolean  true if there are no validation errors, otherwise false
-     */
-    public boolean validateSamFileSummary(final SamReader samReader, final ReferenceSequenceFile reference) {
-        init(reference, samReader.getFileHeader());
-
-        validateSamFile(samReader, out);
-
-        boolean result = errorsByType.isEmpty();
-
-        if (errorsByType.getCount() > 0) {
-            // Convert to a histogram with String IDs so that WARNING: or ERROR: can be prepended to the error type.
-            final Histogram<String> errorsAndWarningsByType = new Histogram<String>("Error Type", "Count");
-            for (final Histogram<SAMValidationError.Type>.Bin bin : errorsByType.values()) {
-                errorsAndWarningsByType.increment(bin.getId().getHistogramString(), bin.getValue());
-            }
-            final MetricsFile<ValidationMetrics, String> metricsFile = new MetricsFile<ValidationMetrics, String>();
-            errorsByType.setBinLabel("Error Type");
-            errorsByType.setValueLabel("Count");
-            metricsFile.setHistogram(errorsAndWarningsByType);
-            metricsFile.write(out);
-        }
-        cleanup();
-        return result;
-    }
-
-    /**
-     * Outputs validation error details to out.
-     *
-     * @param samReader records to validate
-     * @param reference if null, NM tag validation is skipped
-     *                  processing will stop after this threshold has been reached
-     * @return boolean  true if there are no validation errors, otherwise false
-     */
-    public boolean validateSamFileVerbose(final SamReader samReader, final ReferenceSequenceFile reference) {
-        init(reference, samReader.getFileHeader());
-
-        try {
-            validateSamFile(samReader, out);
-        } catch (MaxOutputExceededException e) {
-            out.println("Maximum output of [" + maxVerboseOutput + "] errors reached.");
-        }
-        boolean result = errorsByType.isEmpty();
-        cleanup();
-        return result;
-    }
-
-    public void validateBamFileTermination(final File inputFile) {
-        BufferedInputStream inputStream = null;
-        try {
-            inputStream = IOUtil.toBufferedStream(new FileInputStream(inputFile));
-            if (!BlockCompressedInputStream.isValidFile(inputStream)) {
-                return;
-            }
-            final BlockCompressedInputStream.FileTermination terminationState =
-                    BlockCompressedInputStream.checkTermination(inputFile);
-            if (terminationState.equals(BlockCompressedInputStream.FileTermination.DEFECTIVE)) {
-                addError(new SAMValidationError(Type.TRUNCATED_FILE, "BAM file has defective last gzip block",
-                        inputFile.getPath()));
-            } else if (terminationState.equals(BlockCompressedInputStream.FileTermination.HAS_HEALTHY_LAST_BLOCK)) {
-                addError(new SAMValidationError(Type.BAM_FILE_MISSING_TERMINATOR_BLOCK,
-                        "Older BAM file -- does not have terminator block",
-                        inputFile.getPath()));
-
-            }
-        } catch (IOException e) {
-            throw new SAMException("IOException", e);
-        } finally {
-            if (inputStream != null) {
-                CloserUtil.close(inputStream);
-            }
-        }
-    }
-
-    private void validateSamFile(final SamReader samReader, final PrintWriter out) {
-        try {
-            validateHeader(samReader.getFileHeader());
-            orderChecker = new SAMSortOrderChecker(samReader.getFileHeader().getSortOrder());
-            validateSamRecordsAndQualityFormat(samReader, samReader.getFileHeader());
-            validateUnmatchedPairs();
-            if (indexValidationStringency != IndexValidationStringency.NONE) {
-                try {
-                    if (indexValidationStringency == IndexValidationStringency.LESS_EXHAUSTIVE) {
-                        BamIndexValidator.lessExhaustivelyTestIndex(samReader);
-                    }
-                    else {
-                        BamIndexValidator.exhaustivelyTestIndex(samReader);
-                    }
-                } catch (Exception e) {
-                    addError(new SAMValidationError(Type.INVALID_INDEX_FILE_POINTER, e.getMessage(), null));
-                }
-            }
-
-            if (errorsByType.isEmpty()) {
-                out.println("No errors found");
-            }
-        } finally {
-            out.flush();
-        }
-    }
-
-
-    /**
-     * Report on reads marked as paired, for which the mate was not found.
-     */
-    private void validateUnmatchedPairs() {
-        final InMemoryPairEndInfoMap inMemoryPairMap;
-        if (pairEndInfoByName instanceof CoordinateSortedPairEndInfoMap) {
-            // For the coordinate-sorted map, need to detect mate pairs in which the mateReferenceIndex on one end
-            // does not match the readReference index on the other end, so the pairs weren't united and validated.
-            inMemoryPairMap = new InMemoryPairEndInfoMap();
-            CloseableIterator<Map.Entry<String, PairEndInfo>> it = ((CoordinateSortedPairEndInfoMap) pairEndInfoByName).iterator();
-            while (it.hasNext()) {
-                Map.Entry<String, PairEndInfo> entry = it.next();
-                PairEndInfo pei = inMemoryPairMap.remove(entry.getValue().readReferenceIndex, entry.getKey());
-                if (pei != null) {
-                    // Found a mismatch btw read.mateReferenceIndex and mate.readReferenceIndex
-                    List<SAMValidationError> errors = pei.validateMates(entry.getValue(), entry.getKey());
-                    for (final SAMValidationError error : errors) {
-                        addError(error);
-                    }
-                } else {
-                    // Mate not found.
-                    inMemoryPairMap.put(entry.getValue().mateReferenceIndex, entry.getKey(), entry.getValue());
-                }
-            }
-            it.close();
-        } else {
-            inMemoryPairMap = (InMemoryPairEndInfoMap) pairEndInfoByName;
-        }
-        // At this point, everything in InMemoryMap is a read marked as a pair, for which a mate was not found.
-        for (final Map.Entry<String, PairEndInfo> entry : inMemoryPairMap) {
-            addError(new SAMValidationError(Type.MATE_NOT_FOUND, "Mate not found for paired read", entry.getKey()));
-        }
-    }
-
-    /**
-     * SAM record and quality format validations are combined into a single method because validation must be completed
-     * in only a single pass of the SamRecords (because a SamReader's iterator() method may not return the same
-     * records on a subsequent call).
-     */
-    private void validateSamRecordsAndQualityFormat(final Iterable<SAMRecord> samRecords, final SAMFileHeader header) {
-        final SAMRecordIterator iter = (SAMRecordIterator) samRecords.iterator();
-        final ProgressLogger progress = new ProgressLogger(log, 10000000, "Validated Read");
-        final QualityEncodingDetector qualityDetector = new QualityEncodingDetector();
-        try {
-            while (iter.hasNext()) {
-                final SAMRecord record = iter.next();
-
-                qualityDetector.add(record);
-
-                final long recordNumber = progress.getCount() + 1;
-                final Collection<SAMValidationError> errors = record.isValid();
-                if (errors != null) {
-                    for (final SAMValidationError error : errors) {
-                        error.setRecordNumber(recordNumber);
-                        addError(error);
-                    }
-                }
-
-                validateMateFields(record, recordNumber);
-                final boolean hasValidSortOrder = validateSortOrder(record, recordNumber);
-                validateReadGroup(record, header);
-                final boolean cigarIsValid = validateCigar(record, recordNumber);
-                if (cigarIsValid) {
-                    try {
-                        validateNmTag(record, recordNumber);
-                    }
-                    catch (SAMException e) {
-                        if (hasValidSortOrder) {
-                            // If a CRAM file has an invalid sort order, the ReferenceFileWalker will throw a
-                            // SAMException due to an out of order request when retrieving reference bases during NM
-                            // tag validation; rethrow the exception only if the sort order is valid, otherwise
-                            // swallow the exception and carry on validating
-                            throw e;
-                        }
-                    }
-                }
-                validateSecondaryBaseCalls(record, recordNumber);
-                validateTags(record, recordNumber);
-                if (sequenceDictionaryEmptyAndNoWarningEmitted && !record.getReadUnmappedFlag()) {
-                    addError(new SAMValidationError(Type.MISSING_SEQUENCE_DICTIONARY, "Sequence dictionary is empty", null));
-                    sequenceDictionaryEmptyAndNoWarningEmitted = false;
-
-                }
-                progress.record(record);
-            }
-
-            try {
-                if (progress.getCount() > 0) { // Avoid exception being thrown as a result of no qualities being read
-                    final FastqQualityFormat format = qualityDetector.generateBestGuess(QualityEncodingDetector.FileContext.SAM, FastqQualityFormat.Standard);
-                    if (format != FastqQualityFormat.Standard) {
-                        addError(new SAMValidationError(Type.INVALID_QUALITY_FORMAT, String.format("Detected %s quality score encoding, but expected %s.", format, FastqQualityFormat.Standard), null));
-                    }
-                }
-            } catch (SAMException e) {
-                addError(new SAMValidationError(Type.INVALID_QUALITY_FORMAT, e.getMessage(), null));
-            }
-        } catch (SAMFormatException e) {
-            // increment record number because the iterator behind the SAMFileReader
-            // reads one record ahead so we will get this failure one record ahead
-            final String msg = "SAMFormatException on record " + progress.getCount() + 1;
-            out.println(msg);
-            throw new SAMException(msg, e);
-        } catch (FileTruncatedException e) {
-            addError(new SAMValidationError(Type.TRUNCATED_FILE, "File is truncated", null));
-        } finally {
-            iter.close();
-        }
-    }
-
-    private void validateReadGroup(final SAMRecord record, final SAMFileHeader header) {
-        final SAMReadGroupRecord rg = record.getReadGroup();
-        if (rg == null) {
-            addError(new SAMValidationError(Type.RECORD_MISSING_READ_GROUP,
-                    "A record is missing a read group", record.getReadName()));
-        } else if (header.getReadGroup(rg.getId()) == null) {
-            addError(new SAMValidationError(Type.READ_GROUP_NOT_FOUND,
-                    "A record has a read group not found in the header: ",
-                    record.getReadName() + ", " + rg.getReadGroupId()));
-        }
-    }
-
-    /**
-     * Report error if a tag value is a Long.
-     */
-    private void validateTags(final SAMRecord record, final long recordNumber) {
-        for (final SAMRecord.SAMTagAndValue tagAndValue : record.getAttributes()) {
-            if (tagAndValue.value instanceof Long) {
-                addError(new SAMValidationError(Type.TAG_VALUE_TOO_LARGE,
-                        "Numeric value too large for tag " + tagAndValue.tag,
-                        record.getReadName(), recordNumber));
-            }
-        }
-    }
-
-    private void validateSecondaryBaseCalls(final SAMRecord record, final long recordNumber) {
-        final String e2 = (String) record.getAttribute(SAMTag.E2.name());
-        if (e2 != null) {
-            if (e2.length() != record.getReadLength()) {
-                addError(new SAMValidationError(Type.MISMATCH_READ_LENGTH_AND_E2_LENGTH,
-                        String.format("E2 tag length (%d) != read length (%d)", e2.length(), record.getReadLength()),
-                        record.getReadName(), recordNumber));
-            }
-            final byte[] bases = record.getReadBases();
-            final byte[] secondaryBases = StringUtil.stringToBytes(e2);
-            for (int i = 0; i < Math.min(bases.length, secondaryBases.length); ++i) {
-                if (SequenceUtil.isNoCall(bases[i]) || SequenceUtil.isNoCall(secondaryBases[i])) {
-                    continue;
-                }
-                if (SequenceUtil.basesEqual(bases[i], secondaryBases[i])) {
-                    addError(new SAMValidationError(Type.E2_BASE_EQUALS_PRIMARY_BASE,
-                            String.format("Secondary base call  (%c) == primary base call (%c)",
-                                    (char) secondaryBases[i], (char) bases[i]),
-                            record.getReadName(), recordNumber));
-                    break;
-                }
-            }
-        }
-        final String u2 = (String) record.getAttribute(SAMTag.U2.name());
-        if (u2 != null && u2.length() != record.getReadLength()) {
-            addError(new SAMValidationError(Type.MISMATCH_READ_LENGTH_AND_U2_LENGTH,
-                    String.format("U2 tag length (%d) != read length (%d)", u2.length(), record.getReadLength()),
-                    record.getReadName(), recordNumber));
-        }
-    }
-
-    private boolean validateCigar(final SAMRecord record, final long recordNumber) {
-        if (record.getReadUnmappedFlag()) {
-            return true;
-        }
-        return validateCigar(record, recordNumber, true);
-    }
-
-    private boolean validateMateCigar(final SAMRecord record, final long recordNumber) {
-        return validateCigar(record, recordNumber, false);
-    }
-
-    private boolean validateCigar(final SAMRecord record, final long recordNumber, final boolean isReadCigar) {
-        final ValidationStringency savedStringency = record.getValidationStringency();
-        record.setValidationStringency(ValidationStringency.LENIENT);
-        final List<SAMValidationError> errors = isReadCigar ? record.validateCigar(recordNumber) : SAMUtils.validateMateCigar(record, recordNumber);
-        record.setValidationStringency(savedStringency);
-        if (errors == null) {
-            return true;
-        }
-        boolean valid = true;
-        for (final SAMValidationError error : errors) {
-            addError(error);
-            valid = false;
-        }
-        return valid;
-    }
-
-
-    private boolean validateSortOrder(final SAMRecord record, final long recordNumber) {
-        final SAMRecord prev = orderChecker.getPreviousRecord();
-        boolean isValidSortOrder = orderChecker.isSorted(record);
-        if (!isValidSortOrder) {
-            addError(new SAMValidationError(
-                    Type.RECORD_OUT_OF_ORDER,
-                    String.format(
-                            "The record is out of [%s] order, prior read name [%s], prior coodinates [%d:%d]",
-                            record.getHeader().getSortOrder().name(),
-                            prev.getReadName(),
-                            prev.getReferenceIndex(),
-                            prev.getAlignmentStart()),
-                    record.getReadName(),
-                    recordNumber));
-        }
-        return isValidSortOrder;
-    }
-
-    private void init(final ReferenceSequenceFile reference, final SAMFileHeader header) {
-        if (header.getSortOrder() == SAMFileHeader.SortOrder.coordinate) {
-            this.pairEndInfoByName = new CoordinateSortedPairEndInfoMap();
-        } else {
-            this.pairEndInfoByName = new InMemoryPairEndInfoMap();
-        }
-        if (reference != null) {
-            this.refFileWalker = new ReferenceSequenceFileWalker(reference);
-        }
-    }
-
-    private void cleanup() {
-        this.errorsByType = null;
-        this.pairEndInfoByName = null;
-        this.refFileWalker = null;
-    }
-
-    private void validateNmTag(final SAMRecord record, final long recordNumber) {
-        if (!record.getReadUnmappedFlag()) {
-            final Integer tagNucleotideDiffs = record.getIntegerAttribute(ReservedTagConstants.NM);
-            if (tagNucleotideDiffs == null) {
-                addError(new SAMValidationError(
-                        Type.MISSING_TAG_NM,
-                        "NM tag (nucleotide differences) is missing",
-                        record.getReadName(),
-                        recordNumber));
-            } else if (refFileWalker != null) {
-                final ReferenceSequence refSequence = refFileWalker.get(record.getReferenceIndex());
-                final int actualNucleotideDiffs = SequenceUtil.calculateSamNmTag(record, refSequence.getBases(),
-                        0, isBisulfiteSequenced());
-
-                if (!tagNucleotideDiffs.equals(actualNucleotideDiffs)) {
-                    addError(new SAMValidationError(
-                            Type.INVALID_TAG_NM,
-                            "NM tag (nucleotide differences) in file [" + tagNucleotideDiffs +
-                                    "] does not match reality [" + actualNucleotideDiffs + "]",
-                            record.getReadName(),
-                            recordNumber));
-                }
-            }
-        }
-    }
-
-    private void validateMateFields(final SAMRecord record, final long recordNumber) {
-        if (!record.getReadPairedFlag() || record.isSecondaryOrSupplementary()) {
-            return;
-        }
-        validateMateCigar(record, recordNumber);
-
-        final PairEndInfo pairEndInfo = pairEndInfoByName.remove(record.getReferenceIndex(), record.getReadName());
-        if (pairEndInfo == null) {
-            pairEndInfoByName.put(record.getMateReferenceIndex(), record.getReadName(), new PairEndInfo(record, recordNumber));
-        } else {
-            final List<SAMValidationError> errors =
-                    pairEndInfo.validateMates(new PairEndInfo(record, recordNumber), record.getReadName());
-            for (final SAMValidationError error : errors) {
-                addError(error);
-            }
-        }
-    }
-
-    private void validateHeader(final SAMFileHeader fileHeader) {
-        for (final SAMValidationError error : fileHeader.getValidationErrors()) {
-            addError(error);
-        }
-        if (fileHeader.getVersion() == null) {
-            addError(new SAMValidationError(Type.MISSING_VERSION_NUMBER, "Header has no version number", null));
-        } else if (!SAMFileHeader.ACCEPTABLE_VERSIONS.contains(fileHeader.getVersion())) {
-            addError(new SAMValidationError(Type.INVALID_VERSION_NUMBER, "Header version: " +
-                    fileHeader.getVersion() + " does not match any of the acceptable versions: " +
-                    StringUtil.join(", ", SAMFileHeader.ACCEPTABLE_VERSIONS.toArray(new String[0])),
-                    null));
-        }
-        if (fileHeader.getSequenceDictionary().isEmpty()) {
-            sequenceDictionaryEmptyAndNoWarningEmitted = true;
-        }
-        if (fileHeader.getReadGroups().isEmpty()) {
-            addError(new SAMValidationError(Type.MISSING_READ_GROUP, "Read groups is empty", null));
-        }
-        final List<SAMProgramRecord> pgs = fileHeader.getProgramRecords();
-        for (int i = 0; i < pgs.size() - 1; i++) {
-            for (int j = i + 1; j < pgs.size(); j++) {
-                if (pgs.get(i).getProgramGroupId().equals(pgs.get(j).getProgramGroupId())) {
-                    addError(new SAMValidationError(Type.DUPLICATE_PROGRAM_GROUP_ID, "Duplicate " +
-                            "program group id: " + pgs.get(i).getProgramGroupId(), null));
-                }
-            }
-        }
-
-        final List<SAMReadGroupRecord> rgs = fileHeader.getReadGroups();
-        final Set<String> readGroupIDs = new HashSet<String>();
-
-        for (final SAMReadGroupRecord record : rgs) {
-            final String readGroupID = record.getReadGroupId();
-            if (readGroupIDs.contains(readGroupID)) {
-                addError(new SAMValidationError(Type.DUPLICATE_READ_GROUP_ID, "Duplicate " +
-                        "read group id: " + readGroupID, null));
-            } else {
-                readGroupIDs.add(readGroupID);
-            }
-
-            final String platformValue = record.getPlatform();
-            if (platformValue == null || "".equals(platformValue)) {
-                addError(new SAMValidationError(Type.MISSING_PLATFORM_VALUE,
-                        "A platform (PL) attribute was not found for read group ",
-                        readGroupID));
-            }
-            else { 
-                // NB: cannot be null, so not catching a NPE
-                try {
-                    SAMReadGroupRecord.PlatformValue.valueOf(platformValue.toUpperCase());
-                } catch (IllegalArgumentException e) {
-                    addError(new SAMValidationError(Type.INVALID_PLATFORM_VALUE, 
-                            "The platform (PL) attribute (" + platformValue + ") + was not one of the valid values for read group ",
-                            readGroupID));
-                }
-            }
-        }
-    }
-
-    private void addError(final SAMValidationError error) {
-        // Just ignore an error if it's of a type we're not interested in
-        if (this.errorsToIgnore.contains(error.getType())) return;
-
-        if (this.ignoreWarnings && error.getType().severity == SAMValidationError.Severity.WARNING) return;
-
-        this.errorsByType.increment(error.getType());
-        if (verbose) {
-            out.println(error);
-            out.flush();
-            if (this.errorsByType.getCount() >= maxVerboseOutput) {
-                throw new MaxOutputExceededException();
-            }
-        }
-    }
-
-    /**
-     * Control verbosity
-     *
-     * @param verbose          True in order to emit a message per error or warning.
-     * @param maxVerboseOutput If verbose, emit no more than this many messages.  Ignored if !verbose.
-     */
-    public void setVerbose(final boolean verbose, final int maxVerboseOutput) {
-        this.verbose = verbose;
-        this.maxVerboseOutput = maxVerboseOutput;
-    }
-
-    public boolean isBisulfiteSequenced() {
-        return bisulfiteSequenced;
-    }
-
-    public void setBisulfiteSequenced(boolean bisulfiteSequenced) {
-        this.bisulfiteSequenced = bisulfiteSequenced;
-    }
-
-    /**
-     * @deprecated use setIndexValidationStringency instead
-     */
-    public SamFileValidator setValidateIndex(final boolean validateIndex) {
-        // The SAMFileReader must also have IndexCaching enabled to have the index validated,
-        return this.setIndexValidationStringency(validateIndex ? IndexValidationStringency.EXHAUSTIVE : IndexValidationStringency.NONE);
-    }
-
-    public SamFileValidator setIndexValidationStringency(final IndexValidationStringency stringency) {
-        this.indexValidationStringency = stringency;
-        return this;
-    }
-
-    public static class ValidationMetrics extends MetricBase {
-    }
-
-    /**
-     * This class is used so we don't have to store the entire SAMRecord in memory while we wait
-     * to find a record's mate and also to store the record number.
-     */
-    private static class PairEndInfo {
-        private final int readAlignmentStart;
-        private final int readReferenceIndex;
-        private final boolean readNegStrandFlag;
-        private final boolean readUnmappedFlag;
-        private final String readCigarString;
-
-        private final int mateAlignmentStart;
-        private final int mateReferenceIndex;
-        private final boolean mateNegStrandFlag;
-        private final boolean mateUnmappedFlag;
-        private final String mateCigarString;
-
-        private final boolean firstOfPairFlag;
-
-        private final long recordNumber;
-
-        public PairEndInfo(final SAMRecord record, final long recordNumber) {
-            this.recordNumber = recordNumber;
-
-            this.readAlignmentStart = record.getAlignmentStart();
-            this.readNegStrandFlag = record.getReadNegativeStrandFlag();
-            this.readReferenceIndex = record.getReferenceIndex();
-            this.readUnmappedFlag = record.getReadUnmappedFlag();
-            this.readCigarString = record.getCigarString();
-
-            this.mateAlignmentStart = record.getMateAlignmentStart();
-            this.mateNegStrandFlag = record.getMateNegativeStrandFlag();
-            this.mateReferenceIndex = record.getMateReferenceIndex();
-            this.mateUnmappedFlag = record.getMateUnmappedFlag();
-            final Object mcs = record.getAttribute(SAMTag.MC.name());
-            this.mateCigarString = (mcs != null) ? (String) mcs : null;
-
-            this.firstOfPairFlag = record.getFirstOfPairFlag();
-        }
-
-        private PairEndInfo(int readAlignmentStart, int readReferenceIndex, boolean readNegStrandFlag, boolean readUnmappedFlag,
-                            String readCigarString,
-                            int mateAlignmentStart, int mateReferenceIndex, boolean mateNegStrandFlag, boolean mateUnmappedFlag,
-                            String mateCigarString,
-                            boolean firstOfPairFlag, long recordNumber) {
-            this.readAlignmentStart = readAlignmentStart;
-            this.readReferenceIndex = readReferenceIndex;
-            this.readNegStrandFlag = readNegStrandFlag;
-            this.readUnmappedFlag = readUnmappedFlag;
-            this.readCigarString = readCigarString;
-            this.mateAlignmentStart = mateAlignmentStart;
-            this.mateReferenceIndex = mateReferenceIndex;
-            this.mateNegStrandFlag = mateNegStrandFlag;
-            this.mateUnmappedFlag = mateUnmappedFlag;
-            this.mateCigarString = mateCigarString;
-            this.firstOfPairFlag = firstOfPairFlag;
-            this.recordNumber = recordNumber;
-        }
-
-        public List<SAMValidationError> validateMates(final PairEndInfo mate, final String readName) {
-            final List<SAMValidationError> errors = new ArrayList<SAMValidationError>();
-            validateMateFields(this, mate, readName, errors);
-            validateMateFields(mate, this, readName, errors);
-            // Validations that should not be repeated on both ends
-            if (this.firstOfPairFlag == mate.firstOfPairFlag) {
-                final String whichEnd = this.firstOfPairFlag ? "first" : "second";
-                errors.add(new SAMValidationError(
-                        Type.MATES_ARE_SAME_END,
-                        "Both mates are marked as " + whichEnd + " of pair",
-                        readName,
-                        this.recordNumber
-                ));
-            }
-            return errors;
-        }
-
-        private void validateMateFields(final PairEndInfo end1, final PairEndInfo end2, final String readName, final List<SAMValidationError> errors) {
-            if (end1.mateAlignmentStart != end2.readAlignmentStart) {
-                errors.add(new SAMValidationError(
-                        Type.MISMATCH_MATE_ALIGNMENT_START,
-                        "Mate alignment does not match alignment start of mate",
-                        readName,
-                        end1.recordNumber));
-            }
-            if (end1.mateNegStrandFlag != end2.readNegStrandFlag) {
-                errors.add(new SAMValidationError(
-                        Type.MISMATCH_FLAG_MATE_NEG_STRAND,
-                        "Mate negative strand flag does not match read negative strand flag of mate",
-                        readName,
-                        end1.recordNumber));
-            }
-            if (end1.mateReferenceIndex != end2.readReferenceIndex) {
-                errors.add(new SAMValidationError(
-                        Type.MISMATCH_MATE_REF_INDEX,
-                        "Mate reference index (MRNM) does not match reference index of mate",
-                        readName,
-                        end1.recordNumber));
-            }
-            if (end1.mateUnmappedFlag != end2.readUnmappedFlag) {
-                errors.add(new SAMValidationError(
-                        Type.MISMATCH_FLAG_MATE_UNMAPPED,
-                        "Mate unmapped flag does not match read unmapped flag of mate",
-                        readName,
-                        end1.recordNumber));
-            }
-            if ((end1.mateCigarString != null) && (!end1.mateCigarString.equals(end2.readCigarString))) {
-                errors.add(new SAMValidationError(
-                        Type.MISMATCH_MATE_CIGAR_STRING,
-                        "Mate CIGAR string does not match CIGAR string of mate",
-                        readName,
-                        end1.recordNumber));
-            }
-            // Note - don't need to validate that the mateCigarString is a valid cigar string, since this
-            // will be validated by validateCigar on the mate's record itself.
-        }
-    }
-
-    /**
-     * Thrown in addError indicating that maxVerboseOutput has been exceeded and processing should stop
-     */
-    private static class MaxOutputExceededException extends SAMException {
-        MaxOutputExceededException() {
-            super("maxVerboseOutput exceeded.");
-        }
-    }
-
-    interface PairEndInfoMap extends Iterable<Map.Entry<String, PairEndInfo>> {
-        void put(int mateReferenceIndex, String key, PairEndInfo value);
-
-        PairEndInfo remove(int mateReferenceIndex, String key);
-
-        CloseableIterator<Map.Entry<String, PairEndInfo>> iterator();
-    }
-
-    private class CoordinateSortedPairEndInfoMap implements PairEndInfoMap {
-        private final CoordinateSortedPairInfoMap<String, PairEndInfo> onDiskMap =
-                new CoordinateSortedPairInfoMap<String, PairEndInfo>(maxTempFiles, new Codec());
-
-        public void put(int mateReferenceIndex, String key, PairEndInfo value) {
-            onDiskMap.put(mateReferenceIndex, key, value);
-        }
-
-        public PairEndInfo remove(int mateReferenceIndex, String key) {
-            return onDiskMap.remove(mateReferenceIndex, key);
-        }
-
-        public CloseableIterator<Map.Entry<String, PairEndInfo>> iterator() {
-            return onDiskMap.iterator();
-        }
-
-        private class Codec implements CoordinateSortedPairInfoMap.Codec<String, PairEndInfo> {
-            private DataInputStream in;
-            private DataOutputStream out;
-
-            public void setOutputStream(final OutputStream os) {
-                this.out = new DataOutputStream(os);
-            }
-
-            public void setInputStream(final InputStream is) {
-                this.in = new DataInputStream(is);
-            }
-
-            public void encode(final String key, final PairEndInfo record) {
-                try {
-                    out.writeUTF(key);
-                    out.writeInt(record.readAlignmentStart);
-                    out.writeInt(record.readReferenceIndex);
-                    out.writeBoolean(record.readNegStrandFlag);
-                    out.writeBoolean(record.readUnmappedFlag);
-                    out.writeUTF(record.readCigarString);
-                    out.writeInt(record.mateAlignmentStart);
-                    out.writeInt(record.mateReferenceIndex);
-                    out.writeBoolean(record.mateNegStrandFlag);
-                    out.writeBoolean(record.mateUnmappedFlag);
-                    // writeUTF can't take null, so store a null mateCigarString as an empty string
-                    out.writeUTF(record.mateCigarString != null ? record.mateCigarString : "");
-                    out.writeBoolean(record.firstOfPairFlag);
-                    out.writeLong(record.recordNumber);
-                } catch (IOException e) {
-                    throw new SAMException("Error spilling PairInfo to disk", e);
-                }
-            }
-
-            public Map.Entry<String, PairEndInfo> decode() {
-                try {
-                    final String key = in.readUTF();
-                    final int readAlignmentStart = in.readInt();
-                    final int readReferenceIndex = in.readInt();
-                    final boolean readNegStrandFlag = in.readBoolean();
-                    final boolean readUnmappedFlag = in.readBoolean();
-                    final String readCigarString = in.readUTF();
-
-                    final int mateAlignmentStart = in.readInt();
-                    final int mateReferenceIndex = in.readInt();
-                    final boolean mateNegStrandFlag = in.readBoolean();
-                    final boolean mateUnmappedFlag = in.readBoolean();
-
-                    // read mateCigarString - note that null value is stored as an empty string
-                    final String mcs = in.readUTF();
-                    final String mateCigarString = !mcs.isEmpty() ? mcs : null;
-
-                    final boolean firstOfPairFlag = in.readBoolean();
-
-                    final long recordNumber = in.readLong();
-                    final PairEndInfo rec = new PairEndInfo(readAlignmentStart, readReferenceIndex, readNegStrandFlag,
-                            readUnmappedFlag, readCigarString, mateAlignmentStart, mateReferenceIndex, mateNegStrandFlag,
-                            mateUnmappedFlag, mateCigarString,
-                            firstOfPairFlag, recordNumber);
-                    return new AbstractMap.SimpleEntry(key, rec);
-                } catch (IOException e) {
-                    throw new SAMException("Error reading PairInfo from disk", e);
-                }
-            }
-        }
-    }
-
-    private static class InMemoryPairEndInfoMap implements PairEndInfoMap {
-        private final Map<String, PairEndInfo> map = new HashMap<String, PairEndInfo>();
-
-        public void put(int mateReferenceIndex, String key, PairEndInfo value) {
-            if (mateReferenceIndex != value.mateReferenceIndex)
-                throw new IllegalArgumentException("mateReferenceIndex does not agree with PairEndInfo");
-            map.put(key, value);
-        }
-
-        public PairEndInfo remove(int mateReferenceIndex, String key) {
-            return map.remove(key);
-        }
-
-        public CloseableIterator<Map.Entry<String, PairEndInfo>> iterator() {
-            final Iterator<Map.Entry<String, PairEndInfo>> it = map.entrySet().iterator();
-            return new CloseableIterator<Map.Entry<String, PairEndInfo>>() {
-                public void close() {
-                    // do nothing
-                }
-
-                public boolean hasNext() {
-                    return it.hasNext();
-                }
-
-                public Map.Entry<String, PairEndInfo> next() {
-                    return it.next();
-                }
-
-                public void remove() {
-                    it.remove();
-                }
-            };
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/SamFiles.java b/src/java/htsjdk/samtools/SamFiles.java
deleted file mode 100644
index 0a703a7..0000000
--- a/src/java/htsjdk/samtools/SamFiles.java
+++ /dev/null
@@ -1,81 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.CRAIIndex;
-import htsjdk.samtools.cram.build.CramIO;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * @author mccowan
- */
-public class SamFiles {
-
-    /**
-     * Finds the index file associated with the provided SAM file.  The index file must exist and be reachable to be found.
-     *
-     * If the file is a symlink and the index cannot be found, try to unsymlink the file and look for the bai in the actual file path.
-     *
-     * @return The index for the provided SAM, or null if one was not found.
-     */
-    public static File findIndex(final File samFile) {
-        final File indexFile = lookForIndex(samFile); //try to find the index
-        if (indexFile == null) {
-            return unsymlinkAndLookForIndex(samFile);
-        } else {
-            return indexFile;
-        }
-    }
-
-    /**
-     * resolve the canonical path of samFile and attempt to find an index there.
-     * @return an index file or null if no index is found.
-     */
-    private static File unsymlinkAndLookForIndex(File samFile) {
-        try {
-            final File canonicalSamFile = samFile.getCanonicalFile();
-            final File canonicalIndexFile = lookForIndex(canonicalSamFile);
-            if ( canonicalIndexFile != null) {
-                System.err.println("The index file " + canonicalIndexFile.getPath()
-                        + " was found by resolving the canonical path of a symlink: "
-                        + samFile.getPath() + " -> " + samFile.getCanonicalPath());
-            }
-            return canonicalIndexFile;
-        } catch (IOException e) {
-            return null;
-        }
-    }
-
-    private static File lookForIndex(final File samFile) {// If input is foo.bam, look for foo.bai
-        File indexFile;
-        final String fileName = samFile.getName();
-        if (fileName.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION)) {
-            final String bai = fileName.substring(0, fileName.length() - BamFileIoUtils.BAM_FILE_EXTENSION.length()) + BAMIndex.BAMIndexSuffix;
-            indexFile = new File(samFile.getParent(), bai);
-            if (indexFile.isFile()) {
-                return indexFile;
-            }
-
-
-        } else if (fileName.endsWith(CramIO.CRAM_FILE_EXTENSION)) {
-            final String crai = fileName.substring(0, fileName.length() - CramIO.CRAM_FILE_EXTENSION.length()) + CRAIIndex.CRAI_INDEX_SUFFIX;
-            indexFile = new File(samFile.getParent(), crai);
-            if (indexFile.isFile()) {
-                return indexFile;
-            }
-
-            indexFile = new File(samFile.getParent(), samFile.getName() + CRAIIndex.CRAI_INDEX_SUFFIX);
-            if (indexFile.isFile()) {
-                return indexFile;
-            }
-        }
-
-        // If foo.bai doesn't exist look for foo.bam.bai
-        indexFile = new File(samFile.getParent(), samFile.getName() + BAMIndex.BAMIndexSuffix);
-        if (indexFile.isFile()) {
-            return indexFile;
-        }
-
-        return null;
-    }
-}
diff --git a/src/java/htsjdk/samtools/SamInputResource.java b/src/java/htsjdk/samtools/SamInputResource.java
deleted file mode 100644
index 2692c6e..0000000
--- a/src/java/htsjdk/samtools/SamInputResource.java
+++ /dev/null
@@ -1,360 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.seekablestream.SeekableFileStream;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.seekablestream.SeekableStreamFactory;
-import htsjdk.samtools.sra.SRAAccession;
-import htsjdk.samtools.util.Lazy;
-import htsjdk.samtools.util.RuntimeIOException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URL;
-
-/**
- * Describes a SAM-like resource, including its data (where the records are), and optionally an index.
- * <p/>
- * A data or index source may originate from a {@link java.io.File}, {@link java.io.InputStream}, {@link URL}, or
- * {@link htsjdk.samtools.seekablestream.SeekableStream}; look for the appropriate overload for
- * {@code htsjdk.samtools.SamInputResource#of()}.
- *
- * @author mccowan
- */
-public class SamInputResource {
-    private final InputResource source;
-    private InputResource index;
-
-    SamInputResource(final InputResource data) {
-        this(data, null);
-    }
-
-    SamInputResource(final InputResource source, final InputResource index) {
-        if (source == null) throw new NullPointerException("source");
-        this.source = source;
-        this.index = index;
-    }
-
-    /** The resource that is the SAM data (e.g., records) */
-    InputResource data() {
-        return source;
-    }
-
-    /**
-     * The resource that is the SAM index
-     *
-     * @return null, if no index is defined for this resource
-     */
-    InputResource indexMaybe() {
-        return index;
-    }
-
-    @Override
-    public String toString() {
-        return String.format("data=%s;index=%s", source, index);
-    }
-
-    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
-    public static SamInputResource of(final File file) { return new SamInputResource(new FileInputResource(file)); }
-
-    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
-    public static SamInputResource of(final InputStream inputStream) { return new SamInputResource(new InputStreamInputResource(inputStream)); }
-
-    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
-    public static SamInputResource of(final URL url) { return new SamInputResource(new UrlInputResource(url)); }
-
-    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
-    public static SamInputResource of(final SeekableStream seekableStream) { return new SamInputResource(new SeekableStreamInputResource(seekableStream)); }
-
-    public static SamInputResource of(final SRAAccession acc) { return new SamInputResource(new SRAInputResource(acc)); }
-
-    /** Creates a {@link SamInputResource} from a string specifying *either* a url or a file path */
-    public static SamInputResource of(final String string) { 
-      try {
-        URL url = new URL(string);    // this will throw if its not a url
-        return of(url); 
-      } catch (MalformedURLException e) {
-       // ignore
-      }
-      return of(new File(string));
-    }
-    
-    /** Updates the index to point at the provided resource, then returns itself. */
-    public SamInputResource index(final File file) {
-        this.index = new FileInputResource(file);
-        return this;
-    }
-
-    /** Updates the index to point at the provided resource, then returns itself. */
-    public SamInputResource index(final InputStream inputStream) {
-        this.index = new InputStreamInputResource(inputStream);
-        return this;
-    }
-
-    /** Updates the index to point at the provided resource, then returns itself. */
-    public SamInputResource index(final URL url) {
-        this.index = new UrlInputResource(url);
-        return this;
-    }
-
-    /** Updates the index to point at the provided resource, then returns itself. */
-    public SamInputResource index(final SeekableStream seekableStream) {
-        this.index = new SeekableStreamInputResource(seekableStream);
-        return this;
-    }
-
-}
-
-/**
- * Describes an arbitrary input source, which is something that can be accessed as either a
- * {@link htsjdk.samtools.seekablestream.SeekableStream} or {@link java.io.InputStream}.  A concrete implementation of this class exists for
- * each of {@link InputResource.Type}.
- */
-abstract class InputResource {
-    protected InputResource(final Type type) {this.type = type;}
-
-    enum Type {
-        FILE, URL, SEEKABLE_STREAM, INPUT_STREAM, SRA_ACCESSION
-    }
-
-    private final Type type;
-
-    final Type type() {
-        return type;
-    }
-
-    /** Returns null if this resource cannot be represented as a {@link File}. */
-    abstract File asFile();
-
-    /** Returns null if this resource cannot be represented as a {@link URL}. */
-    abstract URL asUrl();
-
-    /** Returns null if this resource cannot be represented as a {@link htsjdk.samtools.seekablestream.SeekableStream}. */
-    abstract SeekableStream asUnbufferedSeekableStream();
-
-    /** All resource types support {@link java.io.InputStream} generation. */
-    abstract InputStream asUnbufferedInputStream();
-
-    /** SRA archive resource */
-    abstract SRAAccession asSRAAccession();
-
-    @Override
-    public String toString() {
-        final String childToString;
-        switch (type()) {
-            case FILE:
-                childToString = asFile().toString();
-                break;
-            case INPUT_STREAM:
-                childToString = asUnbufferedInputStream().toString();
-                break;
-            case SEEKABLE_STREAM:
-                childToString = asUnbufferedSeekableStream().toString();
-                break;
-            case URL:
-                childToString = asUrl().toString();
-                break;
-            case SRA_ACCESSION:
-                childToString = asSRAAccession().toString();
-                break;
-            default:
-                throw new IllegalStateException();
-        }
-        return String.format("%s:%s", type(), childToString);
-    }
-}
-
-class FileInputResource extends InputResource {
-
-    final File fileResource;
-    final Lazy<SeekableStream> lazySeekableStream = new Lazy<SeekableStream>(new Lazy.LazyInitializer<SeekableStream>() {
-        @Override
-        public SeekableStream make() {
-            try {
-                return new SeekableFileStream(fileResource);
-            } catch (final FileNotFoundException e) {
-                throw new RuntimeIOException(e);
-            }
-        }
-    });
-
-
-    FileInputResource(final File fileResource) {
-        super(Type.FILE);
-        this.fileResource = fileResource;
-    }
-
-    @Override
-    public File asFile() {
-        return fileResource;
-    }
-
-    @Override
-    public URL asUrl() {
-        return null;
-    }
-
-    @Override
-    public SeekableStream asUnbufferedSeekableStream() {
-        return lazySeekableStream.get();
-    }
-
-    @Override
-    public InputStream asUnbufferedInputStream() {
-        return asUnbufferedSeekableStream();
-    }
-
-    @Override
-    public SRAAccession asSRAAccession() {
-        return null;
-    }
-}
-
-class UrlInputResource extends InputResource {
-
-    final URL urlResource;
-    final Lazy<SeekableStream> lazySeekableStream = new Lazy<SeekableStream>(new Lazy.LazyInitializer<SeekableStream>() {
-        @Override
-        public SeekableStream make() {
-            try { return SeekableStreamFactory.getInstance().getStreamFor(urlResource); }
-            catch (final IOException ioe) { throw new RuntimeIOException(ioe); }
-        }
-    });
-
-    UrlInputResource(final URL urlResource) {
-        super(Type.URL);
-        this.urlResource = urlResource;
-    }
-
-    @Override
-    public File asFile() {
-        return null;
-    }
-
-    @Override
-    public URL asUrl() {
-        return urlResource;
-    }
-
-    @Override
-    public SeekableStream asUnbufferedSeekableStream() {
-        return lazySeekableStream.get();
-    }
-
-    @Override
-    public InputStream asUnbufferedInputStream() {
-        return asUnbufferedSeekableStream();
-    }
-
-    @Override
-    public SRAAccession asSRAAccession() {
-        return null;
-    }
-}
-
-class SeekableStreamInputResource extends InputResource {
-
-    final SeekableStream seekableStreamResource;
-
-    SeekableStreamInputResource(final SeekableStream seekableStreamResource) {
-        super(Type.SEEKABLE_STREAM);
-        this.seekableStreamResource = seekableStreamResource;
-    }
-
-    @Override
-    File asFile() {
-        return null;
-    }
-
-    @Override
-    URL asUrl() {
-        return null;
-    }
-
-    @Override
-    SeekableStream asUnbufferedSeekableStream() {
-        return seekableStreamResource;
-    }
-
-    @Override
-    InputStream asUnbufferedInputStream() {
-        return asUnbufferedSeekableStream();
-    }
-
-    @Override
-    public SRAAccession asSRAAccession() {
-        return null;
-    }
-}
-
-class InputStreamInputResource extends InputResource {
-
-    final InputStream inputStreamResource;
-
-    InputStreamInputResource(final InputStream inputStreamResource) {
-        super(Type.INPUT_STREAM);
-        this.inputStreamResource = inputStreamResource;
-    }
-
-    @Override
-    File asFile() {
-        return null;
-    }
-
-    @Override
-    URL asUrl() {
-        return null;
-    }
-
-    @Override
-    SeekableStream asUnbufferedSeekableStream() {
-        return null;
-    }
-
-    @Override
-    InputStream asUnbufferedInputStream() {
-        return inputStreamResource;
-    }
-
-    @Override
-    public SRAAccession asSRAAccession() {
-        return null;
-    }
-}
-
-class SRAInputResource extends InputResource {
-
-    final SRAAccession accession;
-
-    SRAInputResource(final SRAAccession accession) {
-        super(Type.SRA_ACCESSION);
-        this.accession = accession;
-    }
-
-    @Override
-    File asFile() {
-        return null;
-    }
-
-    @Override
-    URL asUrl() {
-        return null;
-    }
-
-    @Override
-    SeekableStream asUnbufferedSeekableStream() {
-        return null;
-    }
-
-    @Override
-    InputStream asUnbufferedInputStream() {
-        return null;
-    }
-
-    @Override
-    public SRAAccession asSRAAccession() {
-        return accession;
-    }
-}
\ No newline at end of file
diff --git a/src/java/htsjdk/samtools/SamReaderFactory.java b/src/java/htsjdk/samtools/SamReaderFactory.java
deleted file mode 100644
index 2e62a53..0000000
--- a/src/java/htsjdk/samtools/SamReaderFactory.java
+++ /dev/null
@@ -1,507 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.ref.CRAMReferenceSource;
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.sra.SRAAccession;
-import htsjdk.samtools.util.BlockCompressedInputStream;
-import htsjdk.samtools.util.BlockCompressedStreamConstants;
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.RuntimeIOException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.zip.GZIPInputStream;
-
-/**
- * <p>Describes the functionality for producing {@link SamReader}, and offers a
- * handful of static generators.</p>
- * <pre>
- *     SamReaderFactory.makeDefault().open(new File("/my/bam.bam");
- * </pre>
- * <p>Example: Configure a factory</p>
- * <pre>
- *      final {@link SamReaderFactory} factory =
- *          SamReaderFactory.makeDefault()
- *              .enable({@link Option#INCLUDE_SOURCE_IN_RECORDS}, {@link Option#VALIDATE_CRC_CHECKSUMS})
- *              .validationStringency({@link ValidationStringency#SILENT});
- *
- * </pre>
- * <p>Example: Open two bam files from different sources, using different options</p>
- * <pre>
- *     final {@link SamReaderFactory} factory =
- *          SamReaderFactory.makeDefault()
- *              .enable({@link Option#INCLUDE_SOURCE_IN_RECORDS}, {@link Option#VALIDATE_CRC_CHECKSUMS})
- *              .validationStringency({@link ValidationStringency#SILENT});
- *
- *     // File-based bam
- *     final {@link SamReader} fileReader = factory.open(new File("/my/bam.bam"));
- *
- *     // HTTP-hosted BAM with index from an arbitrary stream
- *     final SeekableStream myBamIndexStream = ...
- *     final {@link SamInputResource} resource =
- *          {@link SamInputResource}.of(new URL("http://example.com/data.bam")).index(myBamIndexStream);
- *     final {@link SamReader} complicatedReader = factory.open(resource);
- * </pre>
- *
- * @author mccowan
- */
-public abstract class SamReaderFactory {
-
-    private static ValidationStringency defaultValidationStringency = ValidationStringency.DEFAULT_STRINGENCY;
-    
-    abstract public SamReader open(final File file);
-
-    abstract public SamReader open(final SamInputResource resource);
-
-    abstract public ValidationStringency validationStringency();
-
-    abstract public CRAMReferenceSource referenceSource();
-
-    /** Set this factory's {@link htsjdk.samtools.SAMRecordFactory} to the provided one, then returns itself. */
-    abstract public SamReaderFactory samRecordFactory(final SAMRecordFactory samRecordFactory);
-
-    /** Enables the provided {@link Option}s, then returns itself. */
-    abstract public SamReaderFactory enable(final Option... options);
-
-    /** Disables the provided {@link Option}s, then returns itself. */
-    abstract public SamReaderFactory disable(final Option... options);
-
-    /** Sets a specific Option to a boolean value. * */
-    abstract public SamReaderFactory setOption(final Option option, boolean value);
-
-    /** Sets the specified reference sequence * */
-    abstract public SamReaderFactory referenceSequence(File referenceSequence);
-
-    /** Sets the specified reference sequence * */
-    abstract public SamReaderFactory referenceSource(CRAMReferenceSource referenceSequence);
-
-    /** Utility method to open the file get the header and close the file */
-    abstract public SAMFileHeader getFileHeader(File samFile);
-
-    /** Reapplies any changed options to the reader * */
-    abstract public void reapplyOptions(SamReader reader);
-
-    /** Set this factory's {@link ValidationStringency} to the provided one, then returns itself. */
-    abstract public SamReaderFactory validationStringency(final ValidationStringency validationStringency);
-
-    /** Set whether readers created by this factory will use asynchronous IO.
-     * If this methods is not called, this flag will default to the value of {@link Defaults#USE_ASYNC_IO_FOR_SAMTOOLS}.
-     * Note that this option may not be applicable to all readers returned from this factory.
-     * Returns the factory itself. */
-    abstract public SamReaderFactory setUseAsyncIo(final boolean asynchronousIO);
-
-    private static SamReaderFactoryImpl DEFAULT =
-            new SamReaderFactoryImpl(Option.DEFAULTS, defaultValidationStringency, DefaultSAMRecordFactory.getInstance());
-
-    public static void setDefaultValidationStringency(final ValidationStringency defaultValidationStringency) {
-        SamReaderFactory.defaultValidationStringency = defaultValidationStringency;
-        // The default may have changed, so reset the default SamReader
-        DEFAULT = new SamReaderFactoryImpl(Option.DEFAULTS, defaultValidationStringency, DefaultSAMRecordFactory.getInstance());
-    }
-
-    /** Creates a copy of the default {@link SamReaderFactory}. */
-    public static SamReaderFactory makeDefault() {
-        return SamReaderFactoryImpl.copyOf(DEFAULT);
-    }
-
-    /**
-     * Creates an "empty" factory with no enabled {@link Option}s, {@link ValidationStringency#DEFAULT_STRINGENCY}, and
-     * {@link htsjdk.samtools.DefaultSAMRecordFactory}.
-     */
-    public static SamReaderFactory make() {
-        return new SamReaderFactoryImpl(EnumSet.noneOf(Option.class), ValidationStringency.DEFAULT_STRINGENCY, DefaultSAMRecordFactory.getInstance());
-    }
-
-    private static class SamReaderFactoryImpl extends SamReaderFactory {
-        private final static Log LOG = Log.getInstance(SamReaderFactory.class);
-        private final EnumSet<Option> enabledOptions;
-        private ValidationStringency validationStringency;
-        private boolean asynchronousIO = Defaults.USE_ASYNC_IO_FOR_SAMTOOLS;
-        private SAMRecordFactory samRecordFactory;
-        private CustomReaderFactory customReaderFactory;
-        private CRAMReferenceSource referenceSource;
-
-        private SamReaderFactoryImpl(final EnumSet<Option> enabledOptions, final ValidationStringency validationStringency, final SAMRecordFactory samRecordFactory) {
-            this.enabledOptions = EnumSet.copyOf(enabledOptions);
-            this.samRecordFactory = samRecordFactory;
-            this.validationStringency = validationStringency;
-            this.customReaderFactory = CustomReaderFactory.getInstance();
-        }
-   
-        @Override
-        public SamReader open(final File file) {
-            final SamInputResource r = SamInputResource.of(file);
-            final File indexMaybe = SamFiles.findIndex(file);
-            if (indexMaybe != null) r.index(indexMaybe);
-            return open(r);
-        }
-
-
-        @Override
-        public ValidationStringency validationStringency() {
-            return validationStringency;
-        }
-
-        @Override
-        public CRAMReferenceSource referenceSource() {
-            return referenceSource;
-        }
-
-        @Override
-        public SamReaderFactory samRecordFactory(final SAMRecordFactory samRecordFactory) {
-            this.samRecordFactory = samRecordFactory;
-            return this;
-        }
-
-        @Override
-        public SamReaderFactory enable(final Option... options) {
-            Collections.addAll(this.enabledOptions, options);
-            return this;
-        }
-
-        @Override
-        public SamReaderFactory disable(final Option... options) {
-            for (final Option option : options) {
-                this.enabledOptions.remove(option);
-            }
-            return this;
-        }
-
-        @Override
-        public SamReaderFactory setOption(final Option option, final boolean value) {
-            if (value) {
-                return enable(option);
-            } else {
-                return disable(option);
-            }
-        }
-
-        @Override
-        public SamReaderFactory referenceSequence(final File referenceSequence) {
-            this.referenceSource = new ReferenceSource(referenceSequence);
-            return this;
-        }
-
-        @Override
-        public SamReaderFactory referenceSource(final CRAMReferenceSource referenceSource) {
-            this.referenceSource = referenceSource;
-            return this;
-        }
-
-        @Override
-        public SAMFileHeader getFileHeader(final File samFile) {
-            final SamReader reader = open(samFile);
-            final SAMFileHeader header = reader.getFileHeader();
-            CloserUtil.close(reader);
-            return header;
-        }
-
-        @Override
-        public void reapplyOptions(final SamReader reader) {
-            for (final Option option : enabledOptions) {
-                option.applyTo((SamReader.PrimitiveSamReaderToSamReaderAdapter) reader);
-            }
-        }
-
-        @Override
-        public SamReaderFactory validationStringency(final ValidationStringency validationStringency) {
-            this.validationStringency = validationStringency;
-            return this;
-        }
-
-        @Override
-        public SamReaderFactory setUseAsyncIo(final boolean asynchronousIO){
-            this.asynchronousIO = asynchronousIO;
-            return this;
-        }
-
-        @Override
-        public SamReader open(final SamInputResource resource) {
-            final SamReader.PrimitiveSamReader primitiveSamReader;
-            try {
-                final InputResource data = resource.data();
-                final InputResource indexMaybe = resource.indexMaybe();
-                final boolean indexDefined = indexMaybe != null;
-
-                final InputResource.Type type = data.type();
-                if (type == InputResource.Type.URL) {
-                  SamReader reader = customReaderFactory.maybeOpen(
-                      data.asUrl());
-                  if (reader != null) {
-                    return reader;
-                  }
-                }
-                if (type == InputResource.Type.SEEKABLE_STREAM || type == InputResource.Type.URL) {
-                    if (SamStreams.sourceLikeBam(data.asUnbufferedSeekableStream())) {
-                        final SeekableStream bufferedIndexStream;
-                        if (indexDefined && indexMaybe.asUnbufferedSeekableStream() != null) {
-                            bufferedIndexStream = IOUtil.maybeBufferedSeekableStream(indexMaybe.asUnbufferedSeekableStream());
-                        } else {
-                            // TODO: Throw an exception here?  An index _may_ have been provided, but we're ignoring it
-                            bufferedIndexStream = null;
-                        }
-                        primitiveSamReader = new BAMFileReader(
-                                IOUtil.maybeBufferedSeekableStream(data.asUnbufferedSeekableStream()),
-                                bufferedIndexStream,
-                                false,
-                                asynchronousIO,
-                                validationStringency,
-                                this.samRecordFactory
-                        );
-                    } else {
-                        throw new SAMFormatException("Unrecognized file format: " + data.asUnbufferedSeekableStream());
-                    }
-                } else if (type == InputResource.Type.SRA_ACCESSION) {
-                    primitiveSamReader = new SRAFileReader(data.asSRAAccession());
-                } else {
-                    InputStream bufferedStream =
-                            IOUtil.maybeBufferInputStream(
-                                    data.asUnbufferedInputStream(),
-                                    Math.max(Defaults.BUFFER_SIZE, BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE)
-                            );
-                    File sourceFile = data.asFile();
-                    final File indexFile = indexMaybe == null ? null : indexMaybe.asFile();
-                    if (SamStreams.isBAMFile(bufferedStream)) {
-                        if (sourceFile == null || !sourceFile.isFile()) {
-                            // Handle case in which file is a named pipe, e.g. /dev/stdin or created by mkfifo
-                            primitiveSamReader = new BAMFileReader(bufferedStream, indexFile, false, asynchronousIO, validationStringency, this.samRecordFactory);
-                        } else {
-                            bufferedStream.close();
-                            primitiveSamReader = new BAMFileReader(sourceFile, indexFile, false, asynchronousIO, validationStringency, this.samRecordFactory);
-                        }
-                    } else if (BlockCompressedInputStream.isValidFile(bufferedStream)) {
-                        primitiveSamReader = new SAMTextReader(new BlockCompressedInputStream(bufferedStream), validationStringency, this.samRecordFactory);
-                    } else if (SamStreams.isGzippedSAMFile(bufferedStream)) {
-                        primitiveSamReader = new SAMTextReader(new GZIPInputStream(bufferedStream), validationStringency, this.samRecordFactory);
-                    } else if (SamStreams.isCRAMFile(bufferedStream)) {
-                        if (referenceSource == null) {
-                            referenceSource = ReferenceSource.getDefaultCRAMReferenceSource();
-                        }
-                        if (sourceFile == null || !sourceFile.isFile()) {
-                            primitiveSamReader = new CRAMFileReader(bufferedStream, indexFile, referenceSource, validationStringency);
-                        } else {
-                            bufferedStream.close();
-                            primitiveSamReader = new CRAMFileReader(sourceFile, indexFile, referenceSource, validationStringency);
-                        }
-                    } else if (sourceFile != null && isSra(sourceFile)) {
-                        if (bufferedStream != null) {
-                            bufferedStream.close();
-                        }
-                        primitiveSamReader = new SRAFileReader(new SRAAccession(sourceFile.getPath()));
-                    } else {
-                        if (indexDefined) {
-                            bufferedStream.close();
-                            throw new RuntimeException("Cannot use index file with textual SAM file");
-                        }
-                        primitiveSamReader = new SAMTextReader(bufferedStream, sourceFile, validationStringency, this.samRecordFactory);
-                    }
-                }
-
-                // Apply the options defined by this factory to this reader
-                final SamReader.PrimitiveSamReaderToSamReaderAdapter reader =
-                        new SamReader.PrimitiveSamReaderToSamReaderAdapter(primitiveSamReader, resource);
-
-                for (final Option option : enabledOptions) {
-                    option.applyTo(reader);
-                }
-
-                return reader;
-            } catch (final IOException e) {
-                throw new RuntimeIOException(e);
-            }
-        }
-
-        /** Attempts to detect whether the file is an SRA accessioned file. If SRA support is not available, returns false. */
-        private boolean isSra(final File sourceFile) {
-            try {
-                return SRAAccession.isValid(sourceFile.getPath());
-            } catch (final Exception e) {
-                return false;
-            }
-        }
-
-        public static SamReaderFactory copyOf(final SamReaderFactoryImpl target) {
-            return new SamReaderFactoryImpl(target.enabledOptions, target.validationStringency, target.samRecordFactory);
-        }
-    }
-
-    /** A collection of binary {@link SamReaderFactory} options. */
-    public enum Option {
-        /**
-         * The factory's {@link SamReader}s will produce populated (non-null) values when calling {@link SAMRecord#getFileSource()}.
-         * <p/>
-         * This option increases memory footprint slightly per {@link htsjdk.samtools.SAMRecord}.
-         */
-        INCLUDE_SOURCE_IN_RECORDS {
-            @Override
-            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableFileSource(reader, true);
-            }
-
-            @Override
-            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableFileSource(reader, true);
-            }
-
-            @Override
-            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableFileSource(reader, true);
-            }
-
-            @Override
-            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableFileSource(reader, true);
-            }
-        },
-
-        /**
-         * The factory's {@link SamReader}s' {@link SamReader#indexing()}'s calls to {@link SamReader.Indexing#getIndex()} will produce
-         * {@link BAMIndex}es that do some caching in memory instead of reading the index from the disk for each query operation.
-         *
-         * @see SamReader#indexing()
-         * @see htsjdk.samtools.SamReader.Indexing#getIndex()
-         */
-        CACHE_FILE_BASED_INDEXES {
-            @Override
-            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableIndexCaching(true);
-            }
-
-            @Override
-            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
-                logDebugIgnoringOption(reader, this);
-            }
-
-            @Override
-            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableIndexCaching(true);
-            }
-
-            @Override
-            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableIndexCaching(true);
-            }
-        },
-
-        /**
-         * The factory's {@link SamReader}s' will not use memory mapping for accessing index files (which is used by default).  This is
-         * slower but more scalable when accessing large numbers of BAM files sequentially.
-         *
-         * @see SamReader#indexing()
-         * @see htsjdk.samtools.SamReader.Indexing#getIndex()
-         */
-        DONT_MEMORY_MAP_INDEX {
-            @Override
-            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableIndexMemoryMapping(false);
-            }
-
-            @Override
-            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
-                logDebugIgnoringOption(reader, this);
-            }
-
-            @Override
-            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableIndexMemoryMapping(false);
-            }
-
-            @Override
-            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableIndexMemoryMapping(false);
-            }
-        },
-
-        /**
-         * Eagerly decode {@link htsjdk.samtools.SamReader}'s {@link htsjdk.samtools.SAMRecord}s, which can reduce memory footprint if many
-         * fields are being read per record, or if fields are going to be updated.
-         */
-        EAGERLY_DECODE {
-            @Override
-            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.setEagerDecode(true);
-            }
-
-            @Override
-            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
-                logDebugIgnoringOption(reader, this);
-            }
-
-            @Override
-            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
-                logDebugIgnoringOption(reader, this);
-            }
-
-            @Override
-            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
-                logDebugIgnoringOption(reader, this);
-            }
-        },
-
-        /**
-         * For {@link htsjdk.samtools.SamReader}s backed by block-compressed streams, enable CRC validation of those streams.  This is an
-         * expensive operation, but serves to ensure validity of the stream.
-         */
-        VALIDATE_CRC_CHECKSUMS {
-            @Override
-            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
-                underlyingReader.enableCrcChecking(true);
-            }
-
-            @Override
-            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
-                logDebugIgnoringOption(reader, this);
-            }
-
-            @Override
-            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
-                logDebugIgnoringOption(reader, this);
-            }
-
-            @Override
-            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
-                logDebugIgnoringOption(reader, this);
-            }
-
-        };
-
-        public static EnumSet<Option> DEFAULTS = EnumSet.noneOf(Option.class);
-
-        /** Applies this option to the provided reader, if applicable. */
-        void applyTo(final SamReader.PrimitiveSamReaderToSamReaderAdapter reader) {
-            final SamReader.PrimitiveSamReader underlyingReader = reader.underlyingReader();
-            if (underlyingReader instanceof BAMFileReader) {
-                applyTo((BAMFileReader) underlyingReader, reader);
-            } else if (underlyingReader instanceof SAMTextReader) {
-                applyTo((SAMTextReader) underlyingReader, reader);
-            } else if (underlyingReader instanceof CRAMFileReader) {
-                applyTo((CRAMFileReader) underlyingReader, reader);
-            } else if (underlyingReader instanceof SRAFileReader) {
-                applyTo((SRAFileReader) underlyingReader, reader);
-            } else {
-                throw new IllegalArgumentException(String.format("Unrecognized reader type: %s.", underlyingReader.getClass()));
-            }
-
-        }
-
-        private static void logDebugIgnoringOption(final SamReader r, final Option option) {
-            LOG.debug(String.format("Ignoring %s option; does not apply to %s readers.", option, r.getClass().getSimpleName()));
-        }
-
-        private final static Log LOG = Log.getInstance(Option.class);
-
-        abstract void applyTo(final BAMFileReader underlyingReader, final SamReader reader);
-
-        abstract void applyTo(final SAMTextReader underlyingReader, final SamReader reader);
-
-        abstract void applyTo(final CRAMFileReader underlyingReader, final SamReader reader);
-
-        abstract void applyTo(final SRAFileReader underlyingReader, final SamReader reader);
-    }
-}
diff --git a/src/java/htsjdk/samtools/SamStreams.java b/src/java/htsjdk/samtools/SamStreams.java
deleted file mode 100644
index a6710bc..0000000
--- a/src/java/htsjdk/samtools/SamStreams.java
+++ /dev/null
@@ -1,99 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.structure.CramHeader;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.BlockCompressedInputStream;
-import htsjdk.samtools.util.BlockCompressedStreamConstants;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Arrays;
-import java.util.zip.GZIPInputStream;
-
-/**
- * Utilities related to processing of {@link java.io.InputStream}s encoding SAM data
- * 
- * @author mccowan
- */
-public class SamStreams {
-    private static int readBytes(final InputStream stream, final byte[] buffer, final int offset, final int length)
-            throws IOException {
-        int bytesRead = 0;
-        while (bytesRead < length) {
-            final int count = stream.read(buffer, offset + bytesRead, length - bytesRead);
-            if (count <= 0) {
-                break;
-            }
-            bytesRead += count;
-        }
-        return bytesRead;
-    }
-
-    public static boolean isCRAMFile(final InputStream stream) throws IOException {
-        stream.mark(4);
-        final int buffSize = CramHeader.MAGIC.length;
-        final byte[] buffer = new byte[buffSize];
-        readBytes(stream, buffer, 0, buffSize);
-        stream.reset();
-
-        return Arrays.equals(buffer, CramHeader.MAGIC);
-    }
-
-    /**
-     * @param stream stream.markSupported() must be true
-     * @return true if this looks like a BAM file.
-     */
-    public static boolean isBAMFile(final InputStream stream)
-            throws IOException {
-        if (!BlockCompressedInputStream.isValidFile(stream)) {
-            return false;
-        }
-        final int buffSize = BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE;
-        stream.mark(buffSize);
-        final byte[] buffer = new byte[buffSize];
-        readBytes(stream, buffer, 0, buffSize);
-        stream.reset();
-        try(final BlockCompressedInputStream bcis = new BlockCompressedInputStream(new ByteArrayInputStream(buffer))){
-            final byte[] magicBuf = new byte[4];
-            final int magicLength = readBytes(bcis, magicBuf, 0, 4);
-            return magicLength == BAMFileConstants.BAM_MAGIC.length && Arrays.equals(BAMFileConstants.BAM_MAGIC, magicBuf);
-        }
-    }
-
-    /**
-     * Checks whether the file is a gzipped sam file.  Returns true if it
-     * is and false otherwise.
-     */
-    public static boolean isGzippedSAMFile(final InputStream stream) {
-        if (!stream.markSupported()) {
-            throw new IllegalArgumentException("Cannot test a stream that doesn't support marking.");
-        }
-        stream.mark(8000);
-
-        try {
-            final GZIPInputStream gunzip = new GZIPInputStream(stream);
-            final int ch = gunzip.read();
-            return true;
-        } catch (final IOException ioe) {
-            return false;
-        } finally {
-            try {
-                stream.reset();
-            } catch (final IOException ioe) {
-                throw new IllegalStateException("Could not reset stream.");
-            }
-        }
-    }
-
-    // Its too expensive to examine the remote file to determine type.
-    // Rely on file extension.
-    public static boolean sourceLikeBam(final SeekableStream strm) {
-        String source = strm.getSource();
-        if (source == null) return true;
-        source = source.toLowerCase();
-        //Source will typically be a file path or URL
-        //If it's a URL we require one of the query parameters to be bam file
-        return source.endsWith(".bam") || source.contains(".bam?") || source.contains(".bam&") || source.contains(".bam%26");
-    }
-}
diff --git a/src/java/htsjdk/samtools/cram/CRAIEntry.java b/src/java/htsjdk/samtools/cram/CRAIEntry.java
deleted file mode 100644
index 777dd13..0000000
--- a/src/java/htsjdk/samtools/cram/CRAIEntry.java
+++ /dev/null
@@ -1,177 +0,0 @@
-package htsjdk.samtools.cram;
-
-import htsjdk.samtools.cram.structure.Container;
-import htsjdk.samtools.cram.structure.Slice;
-import htsjdk.samtools.util.RuntimeIOException;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.List;
-
-/**
- * A class representing CRAI index entry: file and alignment offsets for each slice.
- * Created by vadim on 10/08/2015.
- */
-public class CRAIEntry implements Comparable<CRAIEntry>, Cloneable {
-    public int sequenceId;
-    public int alignmentStart;
-    public int alignmentSpan;
-    public long containerStartOffset;
-    public int sliceOffset;
-    public int sliceSize;
-    public int sliceIndex;
-
-    private static int CRAI_INDEX_COLUMNS = 6;
-    private static String entryFormat = "%d\t%d\t%d\t%d\t%d\t%d";
-
-    public CRAIEntry() {
-    }
-
-    /**
-     * Create a CRAI Entry from a serialized CRAI index line.
-     *
-     * @param line string formatted as a CRAI index entry
-     * @throws CRAIIndex.CRAIIndexException
-     */
-    public CRAIEntry(final String line) throws CRAIIndex.CRAIIndexException {
-        final String[] chunks = line.split("\t");
-        if (chunks.length != CRAI_INDEX_COLUMNS) {
-            throw new CRAIIndex.CRAIIndexException(
-                    "Malformed CRAI index entry: expecting " + CRAI_INDEX_COLUMNS + " columns but got " + chunks.length);
-        }
-
-        try {
-            sequenceId = Integer.parseInt(chunks[0]);
-            alignmentStart = Integer.parseInt(chunks[1]);
-            alignmentSpan = Integer.parseInt(chunks[2]);
-            containerStartOffset = Long.parseLong(chunks[3]);
-            sliceOffset = Integer.parseInt(chunks[4]);
-            sliceSize = Integer.parseInt(chunks[5]);
-        } catch (final NumberFormatException e) {
-            throw new CRAIIndex.CRAIIndexException(e);
-        }
-    }
-
-    /**
-     * Serialize the entry to a CRAI index stream.
-     * @param os stream to write to
-     */
-    public void writeToStream(OutputStream os) {
-        try {
-            os.write(serializeToString().getBytes());
-            os.write('\n');
-        }
-        catch (IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    /**
-     * Format the entry as a string suitable for serialization in the CRAI index
-     */
-    private String serializeToString() {
-        return String.format(entryFormat,
-                sequenceId, alignmentStart, alignmentSpan,
-                containerStartOffset, sliceOffset, sliceSize);
-    }
-
-    @Override
-    public String toString() { return serializeToString(); }
-
-    public static List<CRAIEntry> fromContainer(final Container container) {
-        final List<CRAIEntry> entries = new ArrayList<>(container.slices.length);
-        for (int i = 0; i < container.slices.length; i++) {
-            final Slice s = container.slices[i];
-            final CRAIEntry e = new CRAIEntry();
-            e.sequenceId = s.sequenceId;
-            e.alignmentStart = s.alignmentStart;
-            e.alignmentSpan = s.alignmentSpan;
-            e.containerStartOffset = s.containerOffset;
-            e.sliceOffset = container.landmarks[i];
-            e.sliceSize = s.size;
-
-            e.sliceIndex = i;
-            entries.add(e);
-        }
-        return entries;
-    }
-
-    @Override
-    public int compareTo(final CRAIEntry o) {
-        if (o == null) {
-            return 1;
-        }
-        if (sequenceId != o.sequenceId) {
-            return sequenceId - o.sequenceId;
-        }
-        if (alignmentStart != o.alignmentStart) {
-            return alignmentStart - o.alignmentStart;
-        }
-
-        return (int) (containerStartOffset - o.containerStartOffset);
-    }
-
-    @Override
-    public CRAIEntry clone() throws CloneNotSupportedException {
-        super.clone();
-        final CRAIEntry entry = new CRAIEntry();
-        entry.sequenceId = sequenceId;
-        entry.alignmentStart = alignmentStart;
-        entry.alignmentSpan = alignmentSpan;
-        entry.containerStartOffset = containerStartOffset;
-        entry.sliceOffset = sliceOffset;
-        entry.sliceSize = sliceSize;
-        return entry;
-    }
-
-    public static Comparator<CRAIEntry> byEnd = new Comparator<CRAIEntry>() {
-
-        @Override
-        public int compare(final CRAIEntry o1, final CRAIEntry o2) {
-            if (o1.sequenceId != o2.sequenceId) {
-                return o2.sequenceId - o1.sequenceId;
-            }
-            if (o1.alignmentStart + o1.alignmentSpan != o2.alignmentStart + o2.alignmentSpan) {
-                return o1.alignmentStart + o1.alignmentSpan - o2.alignmentStart - o2.alignmentSpan;
-            }
-
-            return (int) (o1.containerStartOffset - o2.containerStartOffset);
-        }
-    };
-
-    public static final Comparator<CRAIEntry> byStart = new Comparator<CRAIEntry>() {
-
-        @Override
-        public int compare(final CRAIEntry o1, final CRAIEntry o2) {
-            if (o1.sequenceId != o2.sequenceId) {
-                return o2.sequenceId - o1.sequenceId;
-            }
-            if (o1.alignmentStart != o2.alignmentStart) {
-                return o1.alignmentStart - o2.alignmentStart;
-            }
-
-            return (int) (o1.containerStartOffset - o2.containerStartOffset);
-        }
-    };
-
-
-    public static boolean intersect(final CRAIEntry e0, final CRAIEntry e1) {
-        if (e0.sequenceId != e1.sequenceId) {
-            return false;
-        }
-        if (e0.sequenceId < 0) {
-            return false;
-        }
-
-        final int a0 = e0.alignmentStart;
-        final int a1 = e1.alignmentStart;
-
-        final int b0 = a0 + e0.alignmentSpan;
-        final int b1 = a1 + e1.alignmentSpan;
-
-        return Math.abs(a0 + b0 - a1 - b1) < (e0.alignmentSpan + e1.alignmentSpan);
-
-    }
-}
diff --git a/src/java/htsjdk/samtools/cram/CRAIIndex.java b/src/java/htsjdk/samtools/cram/CRAIIndex.java
deleted file mode 100644
index 8d9e28b..0000000
--- a/src/java/htsjdk/samtools/cram/CRAIIndex.java
+++ /dev/null
@@ -1,161 +0,0 @@
-package htsjdk.samtools.cram;
-
-import htsjdk.samtools.CRAMBAIIndexer;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.cram.structure.Slice;
-import htsjdk.samtools.seekablestream.SeekableMemoryStream;
-import htsjdk.samtools.seekablestream.SeekableStream;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Scanner;
-import java.util.zip.GZIPInputStream;
-
-/**
- * A collection of static methods to read, write and convert CRAI index.
- */
-public class CRAIIndex {
-    public static final String CRAI_INDEX_SUFFIX = ".crai";
-
-    public static void writeIndex(final OutputStream os, final List<CRAIEntry> index) throws IOException {
-        index.stream().forEach(e -> e.writeToStream(os));
-    }
-
-    public static List<CRAIEntry> readIndex(final InputStream is) throws CRAIIndexException {
-        final List<CRAIEntry> list = new LinkedList<CRAIEntry>();
-        final Scanner scanner = new Scanner(is);
-
-        try {
-            while (scanner.hasNextLine()) {
-                final String line = scanner.nextLine();
-                final CRAIEntry entry = new CRAIEntry(line);
-                list.add(entry);
-            }
-        } finally {
-            scanner.close();
-        }
-
-        return list;
-    }
-
-    public static List<CRAIEntry> find(final List<CRAIEntry> list, final int seqId, final int start, final int span) {
-        final boolean whole = start < 1 || span < 1;
-        final CRAIEntry query = new CRAIEntry();
-        query.sequenceId = seqId;
-        query.alignmentStart = start < 1 ? 1 : start;
-        query.alignmentSpan = span < 1 ? Integer.MAX_VALUE : span;
-        query.containerStartOffset = Long.MAX_VALUE;
-        query.sliceOffset = Integer.MAX_VALUE;
-        query.sliceSize = Integer.MAX_VALUE;
-
-        final List<CRAIEntry> l = new ArrayList<CRAIEntry>();
-        for (final CRAIEntry e : list) {
-            if (e.sequenceId != seqId) {
-                continue;
-            }
-            if (whole || CRAIEntry.intersect(e, query)) {
-                l.add(e);
-            }
-        }
-        Collections.sort(l, CRAIEntry.byStart);
-        return l;
-    }
-
-    public static CRAIEntry getLeftmost(final List<CRAIEntry> list) {
-        if (list == null || list.isEmpty()) {
-            return null;
-        }
-        CRAIEntry left = list.get(0);
-
-        for (final CRAIEntry e : list) {
-            if (e.alignmentStart < left.alignmentStart) {
-                left = e;
-            }
-        }
-
-        return left;
-    }
-
-    /**
-     * Find index of the last aligned entry in the list. Assumes the index is sorted by coordinate and unmapped entries (with sequence id = -1) follow the mapped entries.
-     *
-     * @param list a list of CRAI entries
-     * @return integer index of the last entry with sequence id not equal to -1
-     */
-    public static int findLastAlignedEntry(final List<CRAIEntry> list) {
-        if (list.isEmpty()) {
-            return -1;
-        }
-
-        int low = 0;
-        int high = list.size() - 1;
-
-        while (low <= high) {
-            final int mid = (low + high) >>> 1;
-            final CRAIEntry midVal = list.get(mid);
-
-            if (midVal.sequenceId >= 0) {
-                low = mid + 1;
-            } else {
-                high = mid - 1;
-            }
-        }
-        if (low >= list.size()) {
-            return list.size() - 1;
-        }
-        for (; low >= 0 && list.get(low).sequenceId == -1; low--) {
-        }
-        return low;
-    }
-
-    public static SeekableStream openCraiFileAsBaiStream(final File cramIndexFile, final SAMSequenceDictionary dictionary) throws IOException {
-        return openCraiFileAsBaiStream(new FileInputStream(cramIndexFile), dictionary);
-    }
-
-    public static SeekableStream openCraiFileAsBaiStream(final InputStream indexStream, final SAMSequenceDictionary dictionary) throws IOException, CRAIIndexException {
-        final List<CRAIEntry> full = CRAIIndex.readIndex(new GZIPInputStream(indexStream));
-        Collections.sort(full);
-
-        final SAMFileHeader header = new SAMFileHeader();
-        header.setSequenceDictionary(dictionary);
-
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        final CRAMBAIIndexer indexer = new CRAMBAIIndexer(baos, header);
-
-        for (final CRAIEntry entry : full) {
-            final Slice slice = new Slice();
-            slice.containerOffset = entry.containerStartOffset;
-            slice.alignmentStart = entry.alignmentStart;
-            slice.alignmentSpan = entry.alignmentSpan;
-            slice.sequenceId = entry.sequenceId;
-            slice.nofRecords = entry.sliceSize;
-            slice.index = entry.sliceIndex;
-            slice.offset = entry.sliceOffset;
-
-            indexer.processSingleReferenceSlice(slice);
-        }
-        indexer.finish();
-
-        return new SeekableMemoryStream(baos.toByteArray(), null);
-    }
-
-    public static class CRAIIndexException extends RuntimeException {
-
-        public CRAIIndexException(final String s) {
-            super(s);
-        }
-
-        public CRAIIndexException(final NumberFormatException e) {
-            super(e);
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/cram/common/CramVersions.java b/src/java/htsjdk/samtools/cram/common/CramVersions.java
deleted file mode 100644
index 5e1b726..0000000
--- a/src/java/htsjdk/samtools/cram/common/CramVersions.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package htsjdk.samtools.cram.common;
-
-public class CramVersions {
-    public static final Version CRAM_v2_1 = new Version(2, 1, 0);
-    public static final Version CRAM_v3 = new Version(3, 0, 0);
-
-    /**
-     * The default CRAM version when creating a new CRAM output file or stream.
-     */
-    public static final Version DEFAULT_CRAM_VERSION = CRAM_v2_1;
-}
diff --git a/src/java/htsjdk/samtools/cram/io/ExternalCompression.java b/src/java/htsjdk/samtools/cram/io/ExternalCompression.java
deleted file mode 100644
index 61a8357..0000000
--- a/src/java/htsjdk/samtools/cram/io/ExternalCompression.java
+++ /dev/null
@@ -1,151 +0,0 @@
-package htsjdk.samtools.cram.io;
-
-import htsjdk.samtools.cram.encoding.rans.RANS;
-import htsjdk.samtools.util.IOUtil;
-import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
-import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
-import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
-import org.apache.tools.bzip2.CBZip2InputStream;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.zip.GZIPInputStream;
-import java.util.zip.GZIPOutputStream;
-
-/**
- * Methods to provide CRAM external compression/decompression features.
- */
-public class ExternalCompression {
-    private static final int GZIP_COMPRESSION_LEVEL = Integer.valueOf(System.getProperty("gzip.compression.level", "5"));
-
-    /**
-     * Compress a byte array into GZIP blob. The method obeys {@link ExternalCompression#GZIP_COMPRESSION_LEVEL} compression level.
-     *
-     * @param data byte array to compress
-     * @return compressed blob
-     */
-    public static byte[] gzip(final byte[] data) throws IOException {
-        final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
-        final GZIPOutputStream gos = new GZIPOutputStream(byteArrayOutputStream) {
-            {
-                def.setLevel(GZIP_COMPRESSION_LEVEL);
-            }
-        };
-        IOUtil.copyStream(new ByteArrayInputStream(data), gos);
-        gos.close();
-
-        return byteArrayOutputStream.toByteArray();
-    }
-
-    /**
-     * Uncompress a GZIP data blob into a new byte array.
-     *
-     * @param data compressed data blob
-     * @return uncompressed data
-     * @throws IOException as per java IO contract
-     */
-    public static byte[] gunzip(final byte[] data) throws IOException {
-        final GZIPInputStream gzipInputStream = new GZIPInputStream(new ByteArrayInputStream(data));
-        return InputStreamUtils.readFully(gzipInputStream);
-    }
-
-    /**
-     * Compress a byte array into BZIP2 blob.
-     *
-     * @param data byte array to compress
-     * @return compressed blob
-     */
-    public static byte[] bzip2(final byte[] data) throws IOException {
-        return InputStreamUtils.readFully(new BZip2CompressorInputStream(new ByteArrayInputStream(data)));
-    }
-
-    /**
-     * Uncompress a BZIP2 data blob into a new byte array.
-     *
-     * @param data compressed data blob
-     * @return uncompressed data
-     * @throws IOException as per java IO contract
-     */
-    @SuppressWarnings("ResultOfMethodCallIgnored")
-    public static byte[] unbzip2(final byte[] data) throws IOException {
-        final ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(data);
-        // hello, apache!
-        byteArrayInputStream.read();
-        byteArrayInputStream.read();
-        return InputStreamUtils.readFully(new CBZip2InputStream(byteArrayInputStream));
-    }
-
-    /**
-     * Compress a byte array into rANS blob.
-     *
-     * @param data  byte array to compress
-     * @param order rANS order
-     * @return compressed blob
-     */
-    public static byte[] rans(final byte[] data, final RANS.ORDER order) {
-        final ByteBuffer buffer = RANS.compress(ByteBuffer.wrap(data), order, null);
-        return toByteArray(buffer);
-    }
-
-    /**
-     * Compress a byte array into rANS blob.
-     *
-     * @param data  byte array to compress
-     * @param order rANS order
-     * @return compressed blob
-     */
-    public static byte[] rans(final byte[] data, final int order) {
-        final ByteBuffer buffer = RANS.compress(ByteBuffer.wrap(data), RANS.ORDER.fromInt(order), null);
-        return toByteArray(buffer);
-    }
-
-    /**
-     * Uncompress a rANS data blob into a new byte array.
-     *
-     * @param data compressed data blob
-     * @return uncompressed data
-     */
-    public static byte[] unrans(final byte[] data) {
-        final ByteBuffer buf = RANS.uncompress(ByteBuffer.wrap(data), null);
-        return toByteArray(buf);
-    }
-
-
-    /**
-     * Compress a byte array into XZ blob.
-     *
-     * @param data byte array to compress
-     * @return compressed blob
-     */
-    public static byte[] xz(final byte[] data) throws IOException {
-        final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(data.length * 2);
-        final XZCompressorOutputStream xzCompressorOutputStream = new XZCompressorOutputStream(byteArrayOutputStream);
-        xzCompressorOutputStream.write(data);
-        xzCompressorOutputStream.close();
-        return byteArrayOutputStream.toByteArray();
-    }
-
-
-    /**
-     * Uncompress a XZ data blob into a new byte array.
-     *
-     * @param data compressed data blob
-     * @return uncompressed data
-     * @throws IOException as per java IO contract
-     */
-    public static byte[] unxz(final byte[] data) throws IOException {
-        final XZCompressorInputStream xzCompressorInputStream = new XZCompressorInputStream(new ByteArrayInputStream(data));
-        return InputStreamUtils.readFully(xzCompressorInputStream);
-    }
-
-
-    private static byte[] toByteArray(final ByteBuffer buffer) {
-        if (buffer.hasArray() && buffer.arrayOffset() == 0 && buffer.array().length == buffer.limit()) return buffer.array();
-
-        final byte[] bytes = new byte[buffer.remaining()];
-        buffer.get(bytes);
-        return bytes;
-    }
-}
diff --git a/src/java/htsjdk/samtools/cram/ref/ReferenceSource.java b/src/java/htsjdk/samtools/cram/ref/ReferenceSource.java
deleted file mode 100644
index ab531b9..0000000
--- a/src/java/htsjdk/samtools/cram/ref/ReferenceSource.java
+++ /dev/null
@@ -1,274 +0,0 @@
-/**
- * ****************************************************************************
- * Copyright 2013 EMBL-EBI
- * <p/>
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ****************************************************************************
- */
-package htsjdk.samtools.cram.ref;
-
-import htsjdk.samtools.Defaults;
-import htsjdk.samtools.SAMException;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.cram.io.InputStreamUtils;
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.SequenceUtil;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.lang.ref.WeakReference;
-import java.net.URL;
-import java.nio.file.Path;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Pattern;
-
-/**
- * Used to represent a CRAM reference, the backing source for which can either be
- * a file or the EBI ENA reference service.
- *
- * NOTE: In a future release, this class will be renamed and the functionality it
- * contains will be refactored and distributed into one or more separate reference
- * source implementations, each corresponding to the type of resource backing the
- * reference.
- */
-public class ReferenceSource implements CRAMReferenceSource {
-    private static final Log log = Log.getInstance(ReferenceSource.class);
-    private ReferenceSequenceFile rsFile;
-    private int downloadTriesBeforeFailing = 2;
-
-    private final Map<String, WeakReference<byte[]>> cacheW = new HashMap<String, WeakReference<byte[]>>();
-
-    private ReferenceSource() {
-    }
-
-    public ReferenceSource(final File file) {
-        this(file == null ? null : file.toPath());
-    }
-
-    public ReferenceSource(final Path path) {
-        if (path != null)
-            rsFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(path);
-    }
-
-    public ReferenceSource(final ReferenceSequenceFile rsFile) {
-        this.rsFile = rsFile;
-    }
-
-    /**
-     * Attempts to construct a default CRAMReferenceSource for use with CRAM files when
-     * one has not been explicitly provided.
-     *
-     * @return CRAMReferenceSource if one can be acquired. Guaranteed to not be null if none
-     * of the listed exceptions is thrown.
-     * @throws IllegalStateException if no default reference source can be acquired
-     * @throws IllegalArgumentException if the reference_fasta environment variable refers to a
-     * a file that doesn't exist
-     *<p>
-     * Construct a default reference source to use when an explicit reference has not been
-     * provided by checking for fallback sources in this order:
-     *<p><ul>
-     * <li>Defaults.REFERENCE_FASTA - the value of the system property "reference_fasta". If set,
-     * must refer to a valid reference file.</li>
-     * <li>ENA Reference Service if it is enabled</li>
-     * </ul>
-     */
-     public static CRAMReferenceSource getDefaultCRAMReferenceSource() {
-        if (null != Defaults.REFERENCE_FASTA) {
-            if (Defaults.REFERENCE_FASTA.exists()) {
-                return new ReferenceSource(Defaults.REFERENCE_FASTA);
-            }
-            else {
-                throw new IllegalArgumentException(
-                        "The file specified by the reference_fasta property does not exist: " + Defaults.REFERENCE_FASTA.getName());
-            }
-        }
-        else if (Defaults.USE_CRAM_REF_DOWNLOAD) {
-            return new ReferenceSource();
-        }
-        else {
-            throw new IllegalStateException(
-                    "A valid CRAM reference was not supplied and one cannot be acquired via the property settings reference_fasta or use_cram_ref_download");
-        }
-    }
-
-    public void clearCache() {
-        cacheW.clear();
-    }
-
-    private byte[] findInCache(final String name) {
-        final WeakReference<byte[]> weakReference = cacheW.get(name);
-        if (weakReference != null) {
-            final byte[] bytes = weakReference.get();
-            if (bytes != null)
-                return bytes;
-        }
-        return null;
-    }
-
-    public synchronized byte[] getReferenceBases(final SAMSequenceRecord record,
-                                                 final boolean tryNameVariants) {
-        { // check cache by sequence name:
-            final String name = record.getSequenceName();
-            final byte[] bases = findInCache(name);
-            if (bases != null)
-                return bases;
-        }
-
-        final String md5 = record.getAttribute(SAMSequenceRecord.MD5_TAG);
-        { // check cache by md5:
-            if (md5 != null) {
-                byte[] bases = findInCache(md5);
-                if (bases != null)
-                    return bases;
-                bases = findInCache(md5.toLowerCase());
-                if (bases != null)
-                    return bases;
-                bases = findInCache(md5.toUpperCase());
-                if (bases != null)
-                    return bases;
-            }
-        }
-
-        byte[] bases;
-
-        { // try to fetch sequence by name:
-            bases = findBasesByName(record.getSequenceName(), tryNameVariants);
-            if (bases != null) {
-                SequenceUtil.upperCase(bases);
-                cacheW.put(record.getSequenceName(), new WeakReference<byte[]>(
-                        bases));
-                return bases;
-            }
-        }
-
-        {
-            if (Defaults.USE_CRAM_REF_DOWNLOAD) { // try to fetch sequence by md5:
-                if (md5 != null) {
-                    try {
-                        bases = findBasesByMD5(md5.toLowerCase());
-                    } catch (final Exception e) {
-                        throw new RuntimeException(e);
-                    }
-                }
-                if (bases != null) {
-                    SequenceUtil.upperCase(bases);
-                    cacheW.put(md5, new WeakReference<byte[]>(bases));
-                    return bases;
-                }
-            }
-        }
-
-        // sequence not found, give up:
-        return null;
-    }
-
-    byte[] findBasesByName(final String name, final boolean tryVariants) {
-        if (rsFile == null || !rsFile.isIndexed())
-            return null;
-
-        ReferenceSequence sequence = null;
-        try {
-            sequence = rsFile.getSequence(name);
-        } catch (final SAMException e) {
-            // the only way to test if rsFile contains the sequence is to try and catch exception.
-        }
-        if (sequence != null)
-            return sequence.getBases();
-
-        if (tryVariants) {
-            for (final String variant : getVariants(name)) {
-                try {
-                    sequence = rsFile.getSequence(variant);
-                } catch (final SAMException e) {
-                    log.warn("Sequence not found: " + variant);
-                }
-                if (sequence != null)
-                    return sequence.getBases();
-            }
-        }
-        return null;
-    }
-
-    byte[] findBasesByMD5(final String md5) throws
-            IOException {
-        final String url = String.format(Defaults.EBI_REFERENCE_SEVICE_URL_MASK, md5);
-
-        for (int i = 0; i < downloadTriesBeforeFailing; i++) {
-            final InputStream is = new URL(url).openStream();
-            if (is == null)
-                return null;
-
-            log.debug("Downloading reference sequence: " + url);
-            final byte[] data = InputStreamUtils.readFully(is);
-            log.debug("Downloaded " + data.length + " bytes for md5 " + md5);
-            is.close();
-
-            try {
-                final String downloadedMD5 = SequenceUtil.calculateMD5String(data);
-                if (md5.equals(downloadedMD5)) {
-                    return data;
-                } else {
-                    final String message = String
-                            .format("Downloaded sequence is corrupt: requested md5=%s, received md5=%s",
-                                    md5, downloadedMD5);
-                    log.error(message);
-                }
-            } catch (final NoSuchAlgorithmException e) {
-                throw new RuntimeException(e);
-            }
-        }
-        throw new RuntimeException("Giving up on downloading sequence for md5 "
-                + md5);
-    }
-
-    private static final Pattern chrPattern = Pattern.compile("chr.*",
-            Pattern.CASE_INSENSITIVE);
-
-    List<String> getVariants(final String name) {
-        final List<String> variants = new ArrayList<String>();
-
-        if (name.equals("M"))
-            variants.add("MT");
-
-        if (name.equals("MT"))
-            variants.add("M");
-
-        final boolean chrPatternMatch = chrPattern.matcher(name).matches();
-        if (chrPatternMatch)
-            variants.add(name.substring(3));
-        else
-            variants.add("chr" + name);
-
-        if ("chrM".equals(name)) {
-            // chrM case:
-            variants.add("MT");
-        }
-        return variants;
-    }
-
-    public int getDownloadTriesBeforeFailing() {
-        return downloadTriesBeforeFailing;
-    }
-
-    public void setDownloadTriesBeforeFailing(final int downloadTriesBeforeFailing) {
-        this.downloadTriesBeforeFailing = downloadTriesBeforeFailing;
-    }
-}
diff --git a/src/java/htsjdk/samtools/cram/structure/ContainerHeaderIO.java b/src/java/htsjdk/samtools/cram/structure/ContainerHeaderIO.java
deleted file mode 100644
index 613e555..0000000
--- a/src/java/htsjdk/samtools/cram/structure/ContainerHeaderIO.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * ****************************************************************************
- * Copyright 2013 EMBL-EBI
- * <p/>
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ****************************************************************************
- */
-package htsjdk.samtools.cram.structure;
-
-import htsjdk.samtools.cram.io.CRC32OutputStream;
-import htsjdk.samtools.cram.io.CramArray;
-import htsjdk.samtools.cram.io.CramInt;
-import htsjdk.samtools.cram.io.ITF8;
-import htsjdk.samtools.cram.io.LTF8;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-class ContainerHeaderIO {
-
-    public boolean readContainerHeader(final Container container, final InputStream inputStream)
-            throws IOException {
-        return readContainerHeader(2, container, inputStream);
-    }
-
-    public boolean readContainerHeader(final int major, final Container container, final InputStream inputStream)
-            throws IOException {
-        final byte[] peek = new byte[4];
-        int character = inputStream.read();
-        if (character == -1)
-            return false;
-
-        peek[0] = (byte) character;
-        for (int i = 1; i < peek.length; i++) {
-            character = inputStream.read();
-            if (character == -1)
-                throw new RuntimeException("Incomplete or broken stream.");
-            peek[i] = (byte) character;
-        }
-
-        container.containerByteSize = CramInt.int32(peek);
-        container.sequenceId = ITF8.readUnsignedITF8(inputStream);
-        container.alignmentStart = ITF8.readUnsignedITF8(inputStream);
-        container.alignmentSpan = ITF8.readUnsignedITF8(inputStream);
-        container.nofRecords = ITF8.readUnsignedITF8(inputStream);
-        container.globalRecordCounter = LTF8.readUnsignedLTF8(inputStream);
-        container.bases = LTF8.readUnsignedLTF8(inputStream);
-        container.blockCount = ITF8.readUnsignedITF8(inputStream);
-        container.landmarks = CramArray.array(inputStream);
-        if (major >= 3)
-            container.checksum = CramInt.int32(inputStream);
-
-        return true;
-    }
-
-    public int writeContainerHeader(final int major, final Container container, final OutputStream outputStream)
-            throws IOException {
-        final CRC32OutputStream crc32OutputStream = new CRC32OutputStream(outputStream);
-
-        int length = (CramInt.writeInt32(container.containerByteSize, crc32OutputStream) + 7) / 8;
-        length += (ITF8.writeUnsignedITF8(container.sequenceId, crc32OutputStream) + 7) / 8;
-        length += (ITF8.writeUnsignedITF8(container.alignmentStart, crc32OutputStream) + 7) / 8;
-        length += (ITF8.writeUnsignedITF8(container.alignmentSpan, crc32OutputStream) + 7) / 8;
-        length += (ITF8.writeUnsignedITF8(container.nofRecords, crc32OutputStream) + 7) / 8;
-        length += (LTF8.writeUnsignedLTF8(container.globalRecordCounter, crc32OutputStream) + 7) / 8;
-        length += (LTF8.writeUnsignedLTF8(container.bases, crc32OutputStream) + 7) / 8;
-        length += (ITF8.writeUnsignedITF8(container.blockCount, crc32OutputStream) + 7) / 8;
-        length += (CramArray.write(container.landmarks, crc32OutputStream) + 7) / 8;
-
-        if (major >= 3) {
-            outputStream.write(crc32OutputStream.getCrc32_LittleEndian());
-            length += 4 * 8;
-        }
-
-        return length;
-    }
-}
diff --git a/src/java/htsjdk/samtools/example/PrintReadsExample.java b/src/java/htsjdk/samtools/example/PrintReadsExample.java
deleted file mode 100755
index b8b4106..0000000
--- a/src/java/htsjdk/samtools/example/PrintReadsExample.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2016 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- */
-package htsjdk.samtools.example;
-
-import htsjdk.samtools.*;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.ProgressLogger;
-import htsjdk.samtools.util.zip.DeflaterFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.util.Arrays;
-import java.util.List;
-import java.util.stream.Collectors;
-
-
-/**
- * This is a example program showing how to use SAM readers and (optionally) writers.
- * It's also useful for measuring time.
- * An example invocation is:
- * java -cp dist/htsjdk-2.1.1.jar htsjdk.samtools.example.PrintReadsExample in.bam false a.bam
- * <p>
- * or (to test the IntelDeflator)
- * java -Dsamjdk.intel_deflater_so_path=$PWD/lib/jni/libIntelDeflater.so -cp dist/htsjdk-2.1.1.jar htsjdk.samtools.example.PrintReadsExample in.bam false a.bam
- * <p>
- * Arguments:
- * - the first argument is the input file (SAM or BAM)
- * - the second argument is a boolean (true or false) that indicates whether reads are to be eagerly decoded (useful for benchmarking)
- * - the third argument is optional and is the name of the output file (nothing gets written if this argument is missing)
- */
-public final class PrintReadsExample {
-    private PrintReadsExample() {
-    }
-
-    private static final Log log = Log.getInstance(PrintReadsExample.class);
-
-    public static void main(String[] args) throws IOException {
-        if (args.length < 2) {
-            System.out.println("Usage: " + PrintReadsExample.class.getCanonicalName() + " inFile eagerDecode [outFile]");
-            System.exit(1);
-        }
-        final File inputFile = new File(args[0]);
-        final boolean eagerDecode = Boolean.parseBoolean(args[1]); //useful to test (realistic) scenarios in which every record is always fully decoded.
-        final File outputFile = args.length >= 3 ? new File(args[2]) : null;
-
-        final long start = System.currentTimeMillis();
-
-        log.info("Start with args:" + Arrays.toString(args));
-        printConfigurationInfo();
-
-        SamReaderFactory readerFactory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT);
-        if (eagerDecode) {
-            readerFactory = readerFactory.enable(SamReaderFactory.Option.EAGERLY_DECODE);
-        }
-
-        try (final SamReader reader = readerFactory.open(inputFile)) {
-            final SAMFileHeader header = reader.getFileHeader();
-            try (final SAMFileWriter writer = outputFile != null ? new SAMFileWriterFactory().makeBAMWriter(header, true, outputFile) : null) {
-                final ProgressLogger pl = new ProgressLogger(log, 1000000);
-                for (final SAMRecord record : reader) {
-                    if (writer != null) {
-                        writer.addAlignment(record);
-                    }
-                    pl.record(record);
-                }
-            }
-        }
-        final long end = System.currentTimeMillis();
-        log.info(String.format("Done. Elapsed time %.3f seconds", (end - start) / 1000.0));
-    }
-
-    private static void printConfigurationInfo() throws IOException {
-        log.info("Executing as " +
-                System.getProperty("user.name") + '@' + InetAddress.getLocalHost().getHostName() +
-                " on " + System.getProperty("os.name") + ' ' + System.getProperty("os.version") +
-                ' ' + System.getProperty("os.arch") + "; " + System.getProperty("java.vm.name") +
-                ' ' + System.getProperty("java.runtime.version") +
-                ' ' + (DeflaterFactory.usingIntelDeflater() ? "IntelDeflater" : "JdkDeflater"));
-
-        final List<String> list = Defaults.allDefaults().entrySet().stream().map(e -> e.getKey() + ':' + e.getValue()).collect(Collectors.toList());
-        log.info(String.join(" ", list));
-    }
-}
diff --git a/src/java/htsjdk/samtools/fastq/FastqWriterFactory.java b/src/java/htsjdk/samtools/fastq/FastqWriterFactory.java
deleted file mode 100644
index 3f7a006..0000000
--- a/src/java/htsjdk/samtools/fastq/FastqWriterFactory.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package htsjdk.samtools.fastq;
-
-import htsjdk.samtools.Defaults;
-
-import java.io.File;
-
-/**
- * Factory class for creating FastqWriter objects.
- *
- * @author Tim Fennell
- */
-public class FastqWriterFactory {
-    boolean useAsyncIo = Defaults.USE_ASYNC_IO_FOR_SAMTOOLS;
-    boolean createMd5  = Defaults.CREATE_MD5;
-
-    /** Sets whether or not to use async io (i.e. a dedicated thread per writer. */
-    public void setUseAsyncIo(final boolean useAsyncIo) { this.useAsyncIo = useAsyncIo; }
-
-    /** If true, compute MD5 and write appropriately-named file when file is closed. */
-    public void setCreateMd5(final boolean createMd5) { this.createMd5 = createMd5; }
-
-    public FastqWriter newWriter(final File out) {
-        final FastqWriter writer = new BasicFastqWriter(out, createMd5);
-        if (useAsyncIo) {
-            return new AsyncFastqWriter(writer, AsyncFastqWriter.DEFAULT_QUEUE_SIZE);
-        }
-        else {
-            return writer;
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/liftover/LiftOver.java b/src/java/htsjdk/samtools/liftover/LiftOver.java
deleted file mode 100644
index deb9067..0000000
--- a/src/java/htsjdk/samtools/liftover/LiftOver.java
+++ /dev/null
@@ -1,292 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.liftover;
-
-import htsjdk.samtools.SAMException;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.Interval;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.OverlapDetector;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Java port of UCSC liftOver.  Only the most basic liftOver functionality is implemented.
- * Internally coordinates are 0-based, half-open. The API is standard Picard 1-based, inclusive.
- *
- * @author alecw at broadinstitute.org
- */
-public class LiftOver {
-    private static final Log LOG = Log.getInstance(LiftOver.class);
-    
-    public static final double DEFAULT_LIFTOVER_MINMATCH = 0.95;
-
-    private double liftOverMinMatch = DEFAULT_LIFTOVER_MINMATCH;
-    private final OverlapDetector<Chain> chains;
-
-    /**
-     * Load UCSC chain file in order to lift over Intervals.
-     */
-    public LiftOver(File chainFile) {
-        IOUtil.assertFileIsReadable(chainFile);
-        chains = Chain.loadChains(chainFile);
-    }
-
-    /**
-     * Throw an exception if all the "to" sequence names in the chains are not found in the given sequence dictionary.
-     */
-    public void validateToSequences(final SAMSequenceDictionary sequenceDictionary) {
-        for (final Chain chain : chains.getAll()) {
-            if (sequenceDictionary.getSequence(chain.toSequenceName) == null) {
-                throw new SAMException("Sequence " + chain.toSequenceName + " from chain file is not found in sequence dictionary.");
-            }
-        }
-
-    }
-
-    /**
-     * Lift over the given interval to the new genome build using the liftOverMinMatch set for this
-     * LiftOver object.
-     * @param interval Interval to be lifted over.
-     * @return Interval in the output build coordinates, or null if it cannot be lifted over.
-     */
-    public Interval liftOver(final Interval interval) {
-        return liftOver(interval, liftOverMinMatch);
-    }
-
-    /**
-     * Lift over the given interval to the new genome build.
-     * @param interval Interval to be lifted over.
-     * @param liftOverMinMatch Minimum fraction of bases that must remap.
-     * @return Interval in the output build coordinates, or null if it cannot be lifted over.
-     */
-    public Interval liftOver(final Interval interval, final double liftOverMinMatch) {
-        if (interval.length() == 0) {
-            throw new IllegalArgumentException("Zero-length interval cannot be lifted over.  Interval: " +
-                    interval.getName());
-        }
-        Chain chainHit = null;
-        TargetIntersection targetIntersection = null;
-        // Number of bases in interval that can be lifted over must be >= this.
-        double minMatchSize = liftOverMinMatch * interval.length();
-
-        // Find the appropriate Chain, and the part of the chain corresponding to the interval to be lifted over.
-        for (final Chain chain : chains.getOverlaps(interval)) {
-            final TargetIntersection candidateIntersection = targetIntersection(chain, interval);
-            if (candidateIntersection != null && candidateIntersection.intersectionLength >= minMatchSize) {
-                if (chainHit != null) {
-                    // In basic liftOver, multiple hits are not allowed.
-                    return null;
-                }
-                chainHit = chain;
-                targetIntersection = candidateIntersection;
-            } else if (candidateIntersection != null) {
-                LOG.info("Interval " + interval.getName() + " failed to match chain " + chain.id +
-                " because intersection length " + candidateIntersection.intersectionLength + " < minMatchSize "
-                + minMatchSize +
-                " (" + (candidateIntersection.intersectionLength/(float)interval.length()) + " < " + liftOverMinMatch + ")");
-            }
-        }
-        if (chainHit == null) {
-            // Can't be lifted over.
-            return null;
-        }
-
-        return createToInterval(interval.getName(), interval.isNegativeStrand(), targetIntersection);
-    }
-
-    public List<PartialLiftover> diagnosticLiftover(final Interval interval) {
-        final List<PartialLiftover> ret = new ArrayList<PartialLiftover>();
-        if (interval.length() == 0) {
-            throw new IllegalArgumentException("Zero-length interval cannot be lifted over.  Interval: " +
-                    interval.getName());
-        }
-        for (final Chain chain : chains.getOverlaps(interval)) {
-            Interval intersectingChain = interval.intersect(chain.interval);
-            final TargetIntersection targetIntersection = targetIntersection(chain, intersectingChain);
-            if (targetIntersection == null) {
-                ret.add(new PartialLiftover(intersectingChain, chain.id));
-            } else {
-                Interval toInterval = createToInterval(interval.getName(), interval.isNegativeStrand(), targetIntersection);
-                float percentLiftedOver = targetIntersection.intersectionLength/(float)interval.length();
-                ret.add(new PartialLiftover(intersectingChain, toInterval, targetIntersection.chain.id, percentLiftedOver));
-            }
-        }
-        return ret;
-    }
-
-    private static Interval createToInterval(final String intervalName, final boolean sourceNegativeStrand, final TargetIntersection targetIntersection) {
-        // Compute the query interval given the offsets of the target interval start and end into the first and
-        // last ContinuousBlocks.
-        int toStart = targetIntersection.chain.getBlock(targetIntersection.firstBlockIndex).toStart + targetIntersection.startOffset;
-        int toEnd = targetIntersection.chain.getBlock(targetIntersection.lastBlockIndex).getToEnd() - targetIntersection.offsetFromEnd;
-        if (toEnd <= toStart || toStart < 0) {
-            throw new SAMException("Something strange lifting over interval " + intervalName);
-        }
-
-        if (targetIntersection.chain.toOppositeStrand) {
-            // Flip if query is negative.
-            int negativeStart = targetIntersection.chain.toSequenceSize - toEnd;
-            int negativeEnd = targetIntersection.chain.toSequenceSize - toStart;
-            toStart = negativeStart;
-            toEnd = negativeEnd;
-        }
-        // Convert to 1-based, inclusive.
-        final boolean negativeStrand = targetIntersection.chain.toOppositeStrand ? !sourceNegativeStrand : sourceNegativeStrand;
-        return new Interval(targetIntersection.chain.toSequenceName, toStart+1, toEnd, negativeStrand, intervalName);
-    }
-
-    /**
-     * Add up overlap btw the blocks in this chain and the given interval.
-     * @return Length of overlap, offsets into first and last ContinuousBlocks, and indices of first and
-     * last ContinuousBlocks.
-     */
-    private static TargetIntersection targetIntersection(final Chain chain, final Interval interval) {
-        int intersectionLength = 0;
-        // Convert interval to 0-based, half-open
-        int start = interval.getStart() - 1;
-        int end = interval.getEnd();
-        int firstBlockIndex = -1;
-        int lastBlockIndex = -1;
-        int startOffset = -1;
-        int offsetFromEnd = -1;
-        List<Chain.ContinuousBlock> blockList = chain.getBlocks();
-        for (int i = 0; i < blockList.size(); ++i) {
-            final Chain.ContinuousBlock block = blockList.get(i);
-            if (block.fromStart >= end) {
-                break;
-            } else if (block.getFromEnd() <= start) {
-                continue;
-            }
-            if (firstBlockIndex == -1) {
-                firstBlockIndex = i;
-                if (start > block.fromStart) {
-                    startOffset = start - block.fromStart;
-                } else {
-                    startOffset = 0;
-                }
-            }
-            lastBlockIndex = i;
-            if (block.getFromEnd() > end) {
-                offsetFromEnd = block.getFromEnd() - end;
-            } else {
-                offsetFromEnd = 0;
-            }
-            int thisIntersection = Math.min(end, block.getFromEnd()) - Math.max(start, block.fromStart);
-            if (thisIntersection <= 0) {
-                throw new SAMException("Should have been some intersection.");
-            }
-            intersectionLength += thisIntersection;
-        }
-        if (intersectionLength == 0) {
-            return null;
-        }
-        return new TargetIntersection(chain, intersectionLength, startOffset, offsetFromEnd, firstBlockIndex, lastBlockIndex);
-    }
-
-    /**
-     * Get minimum fraction of bases that must remap.
-     */
-    public double getLiftOverMinMatch() {
-        return liftOverMinMatch;
-    }
-
-    /**
-     * Set minimum fraction of bases that must remap.
-     */
-    public void setLiftOverMinMatch(final double liftOverMinMatch) {
-        this.liftOverMinMatch = liftOverMinMatch;
-    }
-
-    /**
-    * Value class returned by targetIntersection()
-    */
-    private static class TargetIntersection {
-        /** Chain used for this intersection */
-        final Chain chain;
-        /** Total intersectionLength length */
-        final int intersectionLength;
-        /** Offset of target interval start in first block. */
-        final int startOffset;
-        /** Distance from target interval end to end of last block. */
-        final int offsetFromEnd;
-        /** Index of first ContinuousBlock matching interval. */
-        final int firstBlockIndex;
-        /** Index of last ContinuousBlock matching interval. */
-        final int lastBlockIndex;
-
-        TargetIntersection(final Chain chain,final int intersectionLength, final int startOffset,
-                           final int offsetFromEnd, final int firstBlockIndex, final int lastBlockIndex) {
-            this.chain = chain;
-            this.intersectionLength = intersectionLength;
-            this.startOffset = startOffset;
-            this.offsetFromEnd = offsetFromEnd;
-            this.firstBlockIndex = firstBlockIndex;
-            this.lastBlockIndex = lastBlockIndex;
-        }
-    }
-
-    /**
-     * Represents a portion of a liftover operation, for use in diagnosing liftover failures.
-     */
-    public static class PartialLiftover {
-        /** Intersection between "from" interval and "from" region of a chain. */
-        final Interval fromInterval;
-        /**
-         * Result of lifting over fromInterval (with no percentage mapped requirement).  This is null
-         * if fromInterval falls entirely with a gap of the chain. */
-        final Interval toInterval;
-        /** id of chain used for this liftover */
-        final int chainId;
-        /** Percentage of bases in fromInterval that lifted over.  0 if fromInterval is not covered by any chain. */
-        final float percentLiftedOver;
-
-        PartialLiftover(final Interval fromInterval, final Interval toInterval, final int chainId, final float percentLiftedOver) {
-            this.fromInterval = fromInterval;
-            this.toInterval = toInterval;
-            this.chainId = chainId;
-            this.percentLiftedOver = percentLiftedOver;
-        }
-
-        PartialLiftover(final Interval fromInterval, final int chainId) {
-            this.fromInterval = fromInterval;
-            this.toInterval = null;
-            this.chainId = chainId;
-            this.percentLiftedOver = 0.0f;
-        }
-
-        public String toString() {
-            if (toInterval == null) {
-                // Matched a chain, but entirely within a gap.
-                return fromInterval.toString() + " (len " + fromInterval.length() + ")=>null using chain " + chainId;
-            }
-            final String strand = toInterval.isNegativeStrand()? "-": "+";
-            return fromInterval.toString() + " (len " + fromInterval.length() + ")=>" + toInterval + "(" + strand
-                    + ") using chain " + chainId + " ; pct matched " + percentLiftedOver;
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/metrics/MetricsFile.java b/src/java/htsjdk/samtools/metrics/MetricsFile.java
deleted file mode 100644
index fabac6e..0000000
--- a/src/java/htsjdk/samtools/metrics/MetricsFile.java
+++ /dev/null
@@ -1,592 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools.metrics;
-
-import htsjdk.samtools.SAMException;
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.FormatUtil;
-import htsjdk.samtools.util.Histogram;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.StringUtil;
-
-import java.io.*;
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.TreeSet;
-
-/**
- * Contains a set of metrics that can be written to a file and parsed back
- * again. The set of metrics is composed of zero or more instances of a class,
- * BEAN, that extends {@link MetricBase} (all instances must be of the same type)
- * and may optionally include one or more histograms that share the same key set.
- *
- * @author Tim Fennell
- */
-public class MetricsFile<BEAN extends MetricBase, HKEY extends Comparable> implements Serializable {
-    public static final String MAJOR_HEADER_PREFIX = "## ";
-    public static final String MINOR_HEADER_PREFIX = "# ";
-    public static final String SEPARATOR = "\t";
-    public static final String HISTO_HEADER = "## HISTOGRAM\t";
-    public static final String METRIC_HEADER = "## METRICS CLASS\t";
-
-    private final Set<String> columnLabels = new HashSet<String>();
-    private final List<Header> headers = new ArrayList<Header>();
-    private final List<BEAN> metrics = new ArrayList<BEAN>();
-    private final List<Histogram<HKEY>> histograms = new ArrayList<Histogram<HKEY>>();
-
-    /** Adds a header to the collection of metrics. */
-    public void addHeader(Header h) { this.headers.add(h); }
-
-    /** Returns the list of headers. */
-    public List<Header> getHeaders() { return Collections.unmodifiableList(this.headers); }
-
-    /** Adds a bean to the collection of metrics. */
-    public void addMetric(final BEAN bean) { this.metrics.add(bean); }
-
-    /** Add multiple metric beans at once. */
-    public void addAllMetrics(final Iterable<BEAN> beanz) {
-        for (final BEAN bean : beanz) { this.addMetric(bean); }
-    }
-
-    /** Returns the list of headers. */
-    public List<BEAN> getMetrics() { return Collections.unmodifiableList(this.metrics); }
-
-    public Set<String> getMetricsColumnLabels() { return Collections.unmodifiableSet(this.columnLabels); }
-
-    /** Returns the histogram contained in the metrics file if any. */
-    public Histogram<HKEY> getHistogram() {
-        if (!histograms.isEmpty()) return this.histograms.get(0);
-        else return null;
-    }
-
-    /** Sets the histogram contained in the metrics file. */
-    public void setHistogram(final Histogram<HKEY> histogram) {
-        if (this.histograms.isEmpty()) {
-            if (histogram != null) this.histograms.add(histogram);
-        }
-        else {
-            this.histograms.set(0, histogram);
-        }
-    }
-
-    /** Adds a histogram to the list of histograms in the metrics file. */
-    public void addHistogram(final Histogram<HKEY> histogram) {
-        this.histograms.add(histogram);
-    }
-
-    //** Returns an unmodifiable version of the histogram list */
-    public List<Histogram<HKEY>> getAllHistograms() {
-        return Collections.unmodifiableList(histograms);
-    }
-
-    /** Returns the number of histograms added to the metrics file. */
-    public int getNumHistograms() 
-    {
-    	return this.histograms.size();
-    }
-    
-    /** Returns the list of headers with the specified type. */
-    public List<Header> getHeaders(final Class<? extends Header> type) {
-        List<Header> tmp = new ArrayList<Header>();
-        for (final Header h : this.headers) {
-            if (h.getClass().equals(type)) {
-                tmp.add(h);
-            }
-        }
-
-        return tmp;
-    }
-
-    /**
-     * Writes out the metrics file to the supplied file. The file is written out
-     * headers first, metrics second and histogram third.
-     *
-     * @param f a File into which to write the metrics
-     */
-    public void write(final File f) {
-        FileWriter w = null;
-        try {
-            w = new FileWriter(f);
-            write(w);
-        }
-        catch (IOException ioe) {
-            throw new SAMException("Could not write metrics to file: " + f.getAbsolutePath(), ioe);
-        }
-        finally {
-            if (w != null) {
-                try {
-                    w.close();
-                } catch (IOException e) {
-                }
-            }
-        }
-    }
-
-    /**
-     * Writes out the metrics file to the supplied writer. The file is written out
-     * headers first, metrics second and histogram third.
-     *
-     * @param w a Writer into which to write the metrics
-     */
-    public void write(final Writer w) {
-        try {
-            final FormatUtil formatter = new FormatUtil();
-            final BufferedWriter out = new BufferedWriter(w);
-            printHeaders(out);
-            out.newLine();
-
-            printBeanMetrics(out, formatter);
-            out.newLine();
-
-            printHistogram(out, formatter);
-            out.newLine();
-            out.flush();
-        }
-        catch (IOException ioe) {
-            throw new SAMException("Could not write metrics file.", ioe);
-        }
-    }
-
-    /** Prints the headers into the provided PrintWriter. */
-    private void printHeaders(final BufferedWriter out) throws IOException {
-        for (final Header h : this.headers) {
-            out.append(MAJOR_HEADER_PREFIX);
-            out.append(h.getClass().getName());
-            out.newLine();
-            out.append(MINOR_HEADER_PREFIX);
-            out.append(h.toString());
-            out.newLine();
-        }
-    }
-
-    /** Prints each of the metrics entries into the provided PrintWriter. */
-    private void printBeanMetrics(final BufferedWriter out, final FormatUtil formatter) throws IOException {
-        if (this.metrics.isEmpty()) {
-            return;
-        }
-
-        // Write out a header row with the type of the metric class
-        out.append(METRIC_HEADER + getBeanType().getName());
-        out.newLine();
-
-        // Write out the column headers
-        final Field[] fields = getBeanType().getFields();
-        final int fieldCount = fields.length;
-
-        // Write out the column headers
-        for (int i=0; i<fieldCount; ++i) {
-            out.append(fields[i].getName());
-            if (i < fieldCount - 1) {
-                out.append(MetricsFile.SEPARATOR);
-            }
-            else {
-                out.newLine();
-            }
-        }
-
-        // Write out each of the data rows
-        for (final BEAN bean : this.metrics) {
-            for (int i=0; i<fieldCount; ++i) {
-                try {
-                    final Object value = fields[i].get(bean);
-                    out.append(StringUtil.assertCharactersNotInString(formatter.format(value), '\t', '\n'));
-
-                    if (i < fieldCount - 1) {
-                        out.append(MetricsFile.SEPARATOR);
-                    }
-                    else {
-                        out.newLine();
-                    }
-                }
-                catch (IllegalAccessException iae) {
-                    throw new SAMException("Could not read property " + fields[i].getName()
-                            + " from class of type " + bean.getClass());
-                }
-            }
-        }
-
-        out.flush();
-    }
-
-    /** Prints the histogram if one is present. */
-    private void printHistogram(final BufferedWriter out, final FormatUtil formatter) throws IOException {
-        final List<Histogram<HKEY>> nonEmptyHistograms = new ArrayList<Histogram<HKEY>>();
-        for (final Histogram<HKEY> histo : this.histograms) {
-            if (!histo.isEmpty()) nonEmptyHistograms.add(histo);
-        }
-
-        if (nonEmptyHistograms.isEmpty()) {
-            return;
-        }
-
-        // Build a combined key set.  Assume comparator is the same for all Histograms
-        final java.util.Set<HKEY> keys = new TreeSet<HKEY>(nonEmptyHistograms.get(0).comparator());
-        for (final Histogram<HKEY> histo : nonEmptyHistograms) {
-            if (histo != null) keys.addAll(histo.keySet());
-        }
-
-        // Add a header for the histogram key type
-        out.append(HISTO_HEADER + nonEmptyHistograms.get(0).keySet().iterator().next().getClass().getName());
-        out.newLine();
-
-        // Output a header row
-        out.append(StringUtil.assertCharactersNotInString(nonEmptyHistograms.get(0).getBinLabel(), '\t', '\n'));
-        for (final Histogram<HKEY> histo : nonEmptyHistograms) {
-            out.append(SEPARATOR);
-            out.append(StringUtil.assertCharactersNotInString(histo.getValueLabel(), '\t', '\n'));
-        }
-        out.newLine();
-
-        for (final HKEY key : keys) {
-            out.append(key.toString());
-
-            for (final Histogram<HKEY> histo : nonEmptyHistograms) {
-                final Histogram<HKEY>.Bin bin = histo.get(key);
-                final double value = (bin == null ? 0 : bin.getValue());
-
-                out.append(SEPARATOR);
-                out.append(formatter.format(value));
-            }
-
-            out.newLine();
-        }
-    }
-
-    /** Gets the type of the metrics bean being used. */
-    private Class<?> getBeanType() {
-        if (this.metrics.isEmpty()) {
-            return null;
-        } else {
-            return this.metrics.get(0).getClass();
-        }
-    }
-
-    /** Reads the Metrics in from the given reader. */
-    public void read(final Reader r) {
-        final BufferedReader in = new BufferedReader(r);
-        final FormatUtil formatter = new FormatUtil();
-        String line = null;
-
-        try {
-            // First read the headers
-            Header header = null;
-            while ((line = in.readLine()) != null) {
-                line = line.trim();
-                if ("".equals(line)) {
-                    // Do nothing! Nothing to be done!
-                }
-                else if (line.startsWith(METRIC_HEADER) || line.startsWith(HISTO_HEADER)) {
-                    // A line that starts with "## METRICS CLASS" heralds the start of the actual
-                    // data. Bounce our butts out of header parsing without reading the next line.
-                    // This isn't in the while loop's conditional because we want to trim() first.
-                    break;
-                }
-                else if (line.startsWith(MAJOR_HEADER_PREFIX)) {
-                    if (header != null) {
-                        throw new IllegalStateException("Consecutive header class lines encountered.");
-                    }
-                    
-                    final String className = line.substring(MAJOR_HEADER_PREFIX.length()).trim();
-                    try {
-                        header = (Header) loadClass(className, true).newInstance();
-                    }
-                    catch (final Exception e) {
-                        throw new SAMException("Error load and/or instantiating an instance of " + className, e);
-                    }
-                }
-                else if (line.startsWith(MINOR_HEADER_PREFIX)) {
-                    if (header == null) {
-                        throw new IllegalStateException("Header class must precede header value:" + line);
-                    }
-                    header.parse(line.substring(MINOR_HEADER_PREFIX.length()));
-                    this.headers.add(header);
-                    header = null;
-                }
-                else {
-                    throw new SAMException("Illegal state. Found following string in metrics file header: " + line);
-                }
-            }
-
-            // Read space between headers and metrics, if any
-            while (line != null && ! line.trim().startsWith(MAJOR_HEADER_PREFIX)) {
-                line = in.readLine();
-            }
-
-
-            if (line != null) {
-                line = line.trim();
-            
-                // Then read the metrics if there are any
-                if (line.startsWith(METRIC_HEADER)) {
-                    // Get the metric class from the header
-                    final String className = line.split(SEPARATOR)[1];
-                    Class<?> type = null;
-                    try {
-                        type = loadClass(className, true);
-                    }
-                    catch (final ClassNotFoundException cnfe) {
-                        throw new SAMException("Could not locate class with name " + className, cnfe);
-                    }
-
-                    // Read the next line with the column headers
-                    final String[] fieldNames = in.readLine().split(SEPARATOR);
-                    Collections.addAll(columnLabels, fieldNames);
-                    final Field[] fields = new Field[fieldNames.length];
-                    for (int i=0; i<fieldNames.length; ++i) {
-                        try {
-                            fields[i] = type.getField(fieldNames[i]);
-                        }
-                        catch (final Exception e) {
-                            throw new SAMException("Could not get field with name " + fieldNames[i] +
-                                " from class " + type.getName());
-                        }
-                    }
-
-                    // Now read the values
-                    while ((line = in.readLine()) != null) {
-                        if ("".equals(line.trim())) {
-                            break;
-                        }
-                        else {
-                            final String[] values = line.split(SEPARATOR, -1);
-                            BEAN bean = null;
-
-                            try { bean = (BEAN) type.newInstance(); }
-                            catch (final Exception e) { throw new SAMException("Error instantiating a " + type.getName(), e); }
-
-                            for (int i=0; i<fields.length; ++i) {
-                                Object value = null;
-                                if (values[i] != null && !values[i].isEmpty()) {
-                                    value = formatter.parseObject(values[i], fields[i].getType());
-                                }
-
-                                try { fields[i].set(bean, value); }
-                                catch (final Exception e) {
-                                    throw new SAMException("Error setting field " + fields[i].getName() +
-                                            " on class of type " + type.getName(), e);
-                                }
-                            }
-
-                            this.metrics.add(bean);
-                        }
-                    }
-                }
-            }
-
-            // Read away any blank lines between metrics and histograms
-            while (line != null && ! line.trim().startsWith(MAJOR_HEADER_PREFIX)) {
-                line = in.readLine();
-            }
-
-            // Then read the histograms if any are present
-            if (line != null) {
-                line = line.trim();
-
-                if (line.startsWith(HISTO_HEADER)) {
-                    // Get the key type of the histogram
-                    final String keyClassName = line.split(SEPARATOR)[1].trim();
-                    Class<?> keyClass = null;
-
-                    try { keyClass = loadClass(keyClassName, true); }
-                    catch (final ClassNotFoundException cnfe) { throw new SAMException("Could not load class with name " + keyClassName); }
-
-                    // Read the next line with the bin and value labels
-                    final String[] labels = in.readLine().split(SEPARATOR);
-                    for (int i=1; i<labels.length; ++i) {
-                        this.histograms.add(new Histogram<HKEY>(labels[0], labels[i]));
-                    }
-
-                    // Read the entries in the histograms
-                    while ((line = in.readLine()) != null && !"".equals(line)) {
-                        final String[] fields = line.trim().split(SEPARATOR);
-                        final HKEY key = (HKEY) formatter.parseObject(fields[0], keyClass);
-
-                        for (int i=1; i<fields.length; ++i) {
-                            final double value = formatter.parseDouble(fields[i]);
-                            this.histograms.get(i-1).increment(key, value);
-                        }
-                    }
-                }
-            }
-        }
-        catch (final IOException ioe) {
-            throw new SAMException("Could not read metrics from reader.", ioe);
-        }
-        finally{
-            CloserUtil.close(in);
-        }
-    }
-
-    /** Attempts to load a class, taking into account that some classes have "migrated" from the broad to sf. */
-    private Class<?> loadClass(final String className, final boolean tryOtherPackages) throws ClassNotFoundException {
-        // List of alternative packages to check in case classes moved around
-        final String[] packages = new String[] {
-                "edu.mit.broad.picard.genotype.concordance",
-                "edu.mit.broad.picard.genotype.fingerprint",
-                "edu.mit.broad.picard.ic",
-                "edu.mit.broad.picard.illumina",
-                "edu.mit.broad.picard.jumping",
-                "edu.mit.broad.picard.quality",
-                "edu.mit.broad.picard.samplevalidation",
-                "htsjdk.samtools.analysis",
-                "htsjdk.samtools.analysis.directed",
-                "htsjdk.samtools.sam",
-                "htsjdk.samtools.metrics",
-                "picard.sam",
-                "picard.metrics",
-                "picard.illumina",
-                "picard.analysis",
-                "picard.analysis.directed",
-                "picard.vcf"
-        };
-
-        try { return Class.forName(className); }
-        catch (ClassNotFoundException cnfe) {
-            if (tryOtherPackages) {
-                for (final String p : packages) {
-                    try {
-                        return loadClass(p + className.substring(className.lastIndexOf('.')), false);
-                    }
-                    catch (ClassNotFoundException cnf2) {/* do nothing */}
-                    // If it ws an inner class, try and see if it's a stand-alone class now
-                    if (className.indexOf('$') > -1) {
-                        try {
-                            return loadClass(p + "." + className.substring(className.lastIndexOf('$') + 1), false);
-                        }
-                        catch (ClassNotFoundException cnf2) {/* do nothing */}
-                    }
-                }
-            }
-
-            throw cnfe;
-        }
-    }
-
-    /** Checks that the headers, metrics and histogram are all equal. */
-    @Override
-    public boolean equals(final Object o) {
-        if (o == null) {
-            return false;
-        }
-        if (getClass() != o.getClass()) {
-            return false;
-        }
-        final MetricsFile that = (MetricsFile) o;
-
-        if (!areHeadersEqual(that)) {
-            return false;
-        }
-        if (!areMetricsEqual(that)) {
-            return false;
-        }
-        if (!areHistogramsEqual(that)) {
-            return false;
-        }
-
-        return true;
-    }
-
-    public boolean areHeadersEqual(final MetricsFile that) {
-        return this.headers.equals(that.headers);
-    }
-
-    public boolean areMetricsEqual(final MetricsFile that) {
-        return this.metrics.equals(that.metrics);
-    }
-
-    public boolean areHistogramsEqual(final MetricsFile that) {
-        return this.histograms.equals(that.histograms);
-    }
-
-    @Override
-    public int hashCode() {
-        int result = headers.hashCode();
-        result = 31 * result + metrics.hashCode();
-        return result;
-    }
-
-    /**
-     * Convenience method to read all the Metric beans from a metrics file.
-     * @param file to be read.
-     * @return list of beans from the file.
-     */
-    public static <T extends MetricBase> List<T> readBeans(final File file) {
-        final MetricsFile<T, Comparable<?>> metricsFile = new MetricsFile<T, Comparable<?>>();
-        final Reader in = IOUtil.openFileForBufferedReading(file);
-        metricsFile.read(in);
-        CloserUtil.close(in);
-        return metricsFile.getMetrics();
-    }
-
-    /**
-     * Method to read the header from a metrics file.
-     */
-    public static List<Header> readHeaders(final File file) {
-        try {
-            final MetricsFile<MetricBase, Comparable<?>> metricsFile = new MetricsFile<MetricBase, Comparable<?>>();
-            metricsFile.read(new FileReader(file));
-            return metricsFile.getHeaders();
-        } catch (FileNotFoundException e) {
-            throw new SAMException(e.getMessage(), e);
-        }
-    }
-
-    /**
-     * Compare the metrics in two files, ignoring headers and histograms.
-     */
-    public static boolean areMetricsEqual(final File file1, final File file2) {
-        try {
-            final MetricsFile<MetricBase, Comparable<?>> mf1 = new MetricsFile<MetricBase, Comparable<?>>();
-            final MetricsFile<MetricBase, Comparable<?>> mf2 = new MetricsFile<MetricBase, Comparable<?>>();
-            mf1.read(new FileReader(file1));
-            mf2.read(new FileReader(file2));
-            return mf1.areMetricsEqual(mf2);
-        } catch (FileNotFoundException e) {
-            throw new SAMException(e.getMessage(), e);
-        }
-
-    }
-
-    /**
-     * Compare the metrics and histograms in two files, ignoring headers.
-     */
-    public static boolean areMetricsAndHistogramsEqual(final File file1, final File file2) {
-        try {
-            final MetricsFile<MetricBase, Comparable<?>> mf1 = new MetricsFile<MetricBase, Comparable<?>>();
-            final MetricsFile<MetricBase, Comparable<?>> mf2 = new MetricsFile<MetricBase, Comparable<?>>();
-            mf1.read(new FileReader(file1));
-            mf2.read(new FileReader(file2));
-
-            return mf1.areMetricsEqual(mf2) && mf1.areHistogramsEqual(mf2);
-
-        } catch (FileNotFoundException e) {
-            throw new SAMException(e.getMessage(), e);
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/seekablestream/SeekableMemoryStream.java b/src/java/htsjdk/samtools/seekablestream/SeekableMemoryStream.java
deleted file mode 100644
index a6efc91..0000000
--- a/src/java/htsjdk/samtools/seekablestream/SeekableMemoryStream.java
+++ /dev/null
@@ -1,64 +0,0 @@
-package htsjdk.samtools.seekablestream;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-public class SeekableMemoryStream extends SeekableStream {
-    private final ByteBuffer buf;
-    private final String source;
-
-    public SeekableMemoryStream(final byte[] data, final String source) {
-        this.buf = ByteBuffer.wrap(data);
-        this.source = source;
-    }
-
-    @Override
-    public void close() throws IOException {
-        buf.clear();
-    }
-
-    @Override
-    public boolean eof() throws IOException {
-        return buf.position() == buf.limit();
-    }
-
-    @Override
-    public String getSource() {
-        return source;
-    }
-
-    @Override
-    public long length() {
-        return buf.array().length - buf.arrayOffset();
-    }
-
-    @Override
-    public int read(final byte[] buffer, final int offset, final int length) throws IOException {
-        int availableLength = Math.min(length, buf.remaining());
-        if (availableLength < 1) {
-            return -1;
-        }
-        buf.get(buffer, offset, availableLength);
-        return availableLength;
-    }
-
-    @Override
-    public void seek(final long position) throws IOException {
-        buf.position((int) position);
-    }
-
-    @Override
-    public int read() throws IOException {
-        if (buf.position() < buf.limit()) {
-            return buf.get();
-        } else {
-            return -1;
-        }
-    }
-
-    @Override
-    public long position() throws IOException {
-        return buf.position();
-    }
-
-}
diff --git a/src/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java b/src/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java
deleted file mode 100644
index 66b8589..0000000
--- a/src/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.seekablestream;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-
-/**
- * Singleton class for getting {@link SeekableStream}s from URL/paths
- * Applications using this library can set their own factory
- * @author jrobinso
- * @date Nov 30, 2009
- */
-public class SeekableStreamFactory{
-
-    private static final ISeekableStreamFactory DEFAULT_FACTORY;
-    private static ISeekableStreamFactory currentFactory;
-
-    static{
-        DEFAULT_FACTORY = new DefaultSeekableStreamFactory();
-        currentFactory = DEFAULT_FACTORY;
-    }
-
-    private SeekableStreamFactory(){}
-
-    public static void setInstance(final ISeekableStreamFactory factory){
-        currentFactory = factory;
-    }
-
-    public static ISeekableStreamFactory getInstance(){
-        return currentFactory;
-    }
-
-    /**
-     * Does this path point to a regular file on disk and not something like a URL?
-     * @param path the path to test
-     * @return true if the path is to a file on disk
-     */
-    public static boolean isFilePath(final String path) {
-        return ! ( path.startsWith("http:") || path.startsWith("https:") || path.startsWith("ftp:") );
-    }
-
-    private static class DefaultSeekableStreamFactory implements ISeekableStreamFactory {
-
-        public SeekableStream getStreamFor(final URL url) throws IOException {
-            return getStreamFor(url.toExternalForm());
-        }
-
-        public SeekableStream getStreamFor(final String path) throws IOException {
-            // todo -- add support for SeekableBlockInputStream
-
-            if (path.startsWith("http:") || path.startsWith("https:")) {
-                final URL url = new URL(path);
-                return new SeekableHTTPStream(url);
-            } else if (path.startsWith("ftp:")) {
-                return new SeekableFTPStream(new URL(path));
-            } else {
-                return new SeekableFileStream(new File(path));
-            }
-        }
-
-        public SeekableStream getBufferedStream(SeekableStream stream){
-            return getBufferedStream(stream, SeekableBufferedStream.DEFAULT_BUFFER_SIZE);
-        }
-
-        public SeekableStream getBufferedStream(SeekableStream stream, int bufferSize){
-            if (bufferSize == 0) return stream;
-            else return new SeekableBufferedStream(stream, bufferSize);
-        }
-
-    }
-
-}
diff --git a/src/java/htsjdk/samtools/sra/ReferenceCache.java b/src/java/htsjdk/samtools/sra/ReferenceCache.java
deleted file mode 100644
index de6e27b..0000000
--- a/src/java/htsjdk/samtools/sra/ReferenceCache.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package htsjdk.samtools.sra;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import ngs.ErrorMsg;
-import ngs.ReadCollection;
-import ngs.Reference;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * That is a thread-safe wrapper for a list of cache Reference objects.
- * Those objects can be used from different threads without issues, however to load and save a Reference object, we
- * need to acquire a lock.
- *
- * Created by andrii.nikitiuk on 10/28/15.
- */
-public class ReferenceCache {
-    private ReadCollection run;
-    private SAMFileHeader virtualHeader;
-    private final List<Reference> cachedReferences;
-
-    public ReferenceCache(ReadCollection run, SAMFileHeader virtualHeader) {
-        this.run = run;
-        this.virtualHeader = virtualHeader;
-        cachedReferences = initializeReferenceCache();
-    }
-
-    /**
-     * This method returns Reference objects by reference indexes in SAM header
-     * Those obejcts can be used from different threads
-     *
-     * This method maintains thread safety, so that if Reference object is set already, it can be easily returned
-     * without locks. However, if Reference object is null, we need to acquire a lock, load the object and save it in
-     * array.
-     *
-     * @param referenceIndex reference index in
-     * @return a Reference object
-     */
-    public Reference get(int referenceIndex) {
-        Reference reference = cachedReferences.get(referenceIndex);
-
-        if (reference != null) {
-            return reference;
-        }
-
-        // maintain thread safety
-        synchronized (this) {
-            reference = cachedReferences.get(referenceIndex);
-            if (reference == null) {
-                try {
-                    reference = run.getReference(virtualHeader.getSequence(referenceIndex).getSequenceName());
-                } catch (ErrorMsg e) {
-                    throw new RuntimeException(e);
-                }
-                cachedReferences.set(referenceIndex, reference);
-            }
-        }
-
-
-        return reference;
-    }
-
-    private List<Reference> initializeReferenceCache() {
-        if (virtualHeader == null) {
-            throw new RuntimeException("Cannot cache references - header is uninitialized");
-        }
-
-        SAMSequenceDictionary sequenceDictionary = virtualHeader.getSequenceDictionary();
-        List<Reference> references = new ArrayList<Reference>(sequenceDictionary.size());
-        for (SAMSequenceRecord sequence : sequenceDictionary.getSequences()) {
-            references.add(null);
-        }
-
-        return references;
-    }
-}
diff --git a/src/java/htsjdk/samtools/sra/SRAAccession.java b/src/java/htsjdk/samtools/sra/SRAAccession.java
deleted file mode 100644
index 1f2dbe0..0000000
--- a/src/java/htsjdk/samtools/sra/SRAAccession.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*===========================================================================
-*
-*                            PUBLIC DOMAIN NOTICE
-*               National Center for Biotechnology Information
-*
-*  This software/database is a "United States Government Work" under the
-*  terms of the United States Copyright Act.  It was written as part of
-*  the author's official duties as a United States Government employee and
-*  thus cannot be copyrighted.  This software/database is freely available
-*  to the public for use. The National Library of Medicine and the U.S.
-*  Government have not placed any restriction on its use or reproduction.
-*
-*  Although all reasonable efforts have been taken to ensure the accuracy
-*  and reliability of the software and data, the NLM and the U.S.
-*  Government do not and cannot warrant the performance or results that
-*  may be obtained by using this software or data. The NLM and the U.S.
-*  Government disclaim all warranties, express or implied, including
-*  warranties of performance, merchantability or fitness for any particular
-*  purpose.
-*
-*  Please cite the author in any work or product based on this material.
-*
-* ===========================================================================
-*
-*/
-
-package htsjdk.samtools.sra;
-
-import htsjdk.samtools.util.Log;
-import gov.nih.nlm.ncbi.ngs.NGS;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Serializable;
-import java.util.Arrays;
-
-/**
- * Describes a single SRA accession for SRA read collection
- * Also provides app string functionality and allows to check if working SRA is supported on the running platform
- *
- * Important: due to checks performed in SRAAccession.isValid(), we won't recognise any accessions other
- * than ones that follow the pattern "^[SED]RR[0-9]{6,9}$", e.g. SRR000123
- */
-public class SRAAccession implements Serializable {
-    private static final Log log = Log.getInstance(SRAAccession.class);
-
-    private static Boolean isSupportedCached = null;
-    private static String appVersionString = null;
-    private final static String defaultAppVersionString = "[unknown software]";
-    private final static String htsJdkVersionString = "HTSJDK-NGS";
-
-    private String acc;
-
-    /**
-     * Sets an app version string which will let SRA know which software uses it.
-     * @param appVersionString a string that describes running application
-     */
-    public static void setAppVersionString(String appVersionString) {
-        SRAAccession.appVersionString = appVersionString;
-    }
-
-    /**
-     * Returns true if SRA is supported on the running platform
-     * @return true if SRA engine was successfully loaded and operational, false otherwise
-     */
-    public static boolean isSupported() {
-        if (isSupportedCached == null) {
-            log.debug("Checking if SRA module is supported in that environment");
-            isSupportedCached = NGS.isSupported();
-            if (!isSupportedCached) {
-                log.info("SRA is not supported. Will not be able to read from SRA");
-            } else {
-                NGS.setAppVersionString(getFullVersionString());
-            }
-        }
-        return isSupportedCached;
-    }
-
-    /**
-     * @param acc accession
-     * @return true if a string is a valid SRA accession
-     */
-    public static boolean isValid(String acc) {
-        boolean looksLikeSRA = false;
-        File f = new File(acc);
-        if (f.isFile()) {
-            byte[] buffer = new byte[8];
-            byte[] signature1 = "NCBI.sra".getBytes();
-            byte[] signature2 = "NCBInenc".getBytes();
-
-            try (InputStream is = new FileInputStream(f)) {
-                int numRead = is.read(buffer);
-
-                looksLikeSRA = numRead == buffer.length &&
-                        (Arrays.equals(buffer, signature1) || Arrays.equals(buffer, signature2));
-            } catch (IOException e) {
-                looksLikeSRA = false;
-            }
-        } else if (f.exists()) {
-            // anything else local other than a file is not an SRA archive
-            looksLikeSRA = false;
-        } else {
-            looksLikeSRA = acc.toUpperCase().matches ( "^[SED]RR[0-9]{6,9}$" );
-        }
-
-        if (!looksLikeSRA) return false;
-
-        return isSupported() && NGS.isValid(acc);
-    }
-
-    /**
-     * @param acc accession
-     */
-    public SRAAccession(String acc) {
-        this.acc = acc;
-    }
-
-    public String toString() {
-        return acc;
-    }
-
-    /**
-     * @return true if contained string is an SRA accession
-     */
-    public boolean isValid() {
-        return SRAAccession.isValid(acc);
-    }
-
-    private static String getFullVersionString() {
-        String versionString = appVersionString == null ? defaultAppVersionString : appVersionString;
-        versionString += " through " + htsJdkVersionString;
-        return versionString;
-    }
-}
diff --git a/src/java/htsjdk/samtools/sra/SRAAlignmentIterator.java b/src/java/htsjdk/samtools/sra/SRAAlignmentIterator.java
deleted file mode 100644
index 2ebade1..0000000
--- a/src/java/htsjdk/samtools/sra/SRAAlignmentIterator.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*===========================================================================
-*
-*                            PUBLIC DOMAIN NOTICE
-*               National Center for Biotechnology Information
-*
-*  This software/database is a "United States Government Work" under the
-*  terms of the United States Copyright Act.  It was written as part of
-*  the author's official duties as a United States Government employee and
-*  thus cannot be copyrighted.  This software/database is freely available
-*  to the public for use. The National Library of Medicine and the U.S.
-*  Government have not placed any restriction on its use or reproduction.
-*
-*  Although all reasonable efforts have been taken to ensure the accuracy
-*  and reliability of the software and data, the NLM and the U.S.
-*  Government do not and cannot warrant the performance or results that
-*  may be obtained by using this software or data. The NLM and the U.S.
-*  Government disclaim all warranties, express or implied, including
-*  warranties of performance, merchantability or fitness for any particular
-*  purpose.
-*
-*  Please cite the author in any work or product based on this material.
-*
-* ===========================================================================
-*
-*/
-
-package htsjdk.samtools.sra;
-
-
-import htsjdk.samtools.Chunk;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SRAIterator;
-import htsjdk.samtools.ValidationStringency;
-import ngs.Alignment;
-import ngs.AlignmentIterator;
-import ngs.ErrorMsg;
-import ngs.ReadCollection;
-import ngs.Reference;
-import ngs.ReferenceIterator;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-
-
-/**
- * Iterator for aligned reads.
- * Is used from SRAIterator.
- * Created by andrii.nikitiuk on 9/3/15.
- */
-public class SRAAlignmentIterator implements Iterator<SAMRecord> {
-    private ValidationStringency validationStringency;
-
-    private SRAAccession accession;
-    private ReadCollection run;
-    private SAMFileHeader header;
-    private ReferenceCache cachedReferences;
-    private List<Long> referencesLengths;
-    private Iterator<Chunk> referencesChunksIterator;
-    private int currentReference = -1;
-
-    private boolean hasMoreReferences = true;
-
-    private AlignmentIterator alignedIterator;
-    private Boolean hasMoreAlignments = false;
-
-    private SRALazyRecord lastRecord;
-
-    /**
-     * @param run opened read collection
-     * @param header sam header
-     * @param cachedReferences list of cached references shared among all iterators from a single SRAFileReader
-     * @param recordRangeInfo info about record ranges withing SRA archive
-     * @param chunk used to determine which alignments the iterator should return
-     */
-    public SRAAlignmentIterator(SRAAccession accession, final ReadCollection run, final SAMFileHeader header, ReferenceCache cachedReferences,
-                                final SRAIterator.RecordRangeInfo recordRangeInfo, final Chunk chunk) {
-        this.accession = accession;
-        this.run = run;
-        this.header = header;
-        this.cachedReferences = cachedReferences;
-        this.referencesLengths = recordRangeInfo.getReferenceLengthsAligned();
-
-        referencesChunksIterator = getReferenceChunks(chunk).iterator();
-
-        try {
-            nextReference();
-        } catch (final Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    @Override
-    public boolean hasNext() {
-        // check aligned
-        if (hasMoreAlignments == null) {
-            try {
-                lastRecord.detachFromIterator();
-                hasMoreAlignments = alignedIterator.nextAlignment();
-            } catch (ErrorMsg e) {
-                throw new RuntimeException(e);
-            }
-        }
-        while (!hasMoreAlignments && hasMoreReferences) {
-            nextReference();
-        }
-
-        return hasMoreAlignments;
-    }
-
-    @Override
-    public SAMRecord next() {
-        if (!hasNext()) {
-            throw new NoSuchElementException("No more alignments are available");
-        }
-
-        return nextAlignment();
-    }
-
-    @Override
-    public void remove() {
-        throw new UnsupportedOperationException("Removal of records not implemented.");
-    }
-
-    public void setValidationStringency(ValidationStringency validationStringency) {
-        this.validationStringency = validationStringency;
-    }
-
-    private SAMRecord nextAlignment() {
-        try {
-            lastRecord = new SRALazyRecord(header, accession, run, alignedIterator, alignedIterator.getReadId(), alignedIterator.getAlignmentId());
-        } catch (ErrorMsg e) {
-            throw new RuntimeException(e);
-        }
-        if (validationStringency != null) {
-            lastRecord.setValidationStringency(validationStringency);
-        }
-
-        hasMoreAlignments = null;
-
-        return lastRecord;
-    }
-
-    private void nextReference() {
-        if (!hasMoreReferences) {
-            throw new NoSuchElementException("Cannot get next reference - already at last one");
-        }
-
-        try {
-            hasMoreReferences = referencesChunksIterator.hasNext();
-            if (!hasMoreReferences) {
-                hasMoreAlignments = false;
-                return;
-            }
-
-            currentReference++;
-            Chunk refChunk = referencesChunksIterator.next();
-            if (refChunk == null) {
-                hasMoreAlignments = false;
-                return;
-            }
-
-            Reference reference = cachedReferences.get(currentReference);
-
-            alignedIterator = reference.getFilteredAlignmentSlice(
-                    refChunk.getChunkStart(), refChunk.getChunkEnd() - refChunk.getChunkStart(),
-                    Alignment.all, Alignment.startWithinSlice | Alignment.passDuplicates | Alignment.passFailed, 0);
-
-            hasMoreAlignments = alignedIterator.nextAlignment();
-        } catch (ErrorMsg e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    private List<Chunk> getReferenceChunks(final Chunk chunk) {
-        List<Chunk> referencesChunks = new ArrayList<Chunk>();
-        long refOffset = 0;
-        for (Long refLen : referencesLengths) {
-            if (chunk.getChunkStart() - refOffset >= refLen || chunk.getChunkEnd() - refOffset <= 0) {
-                referencesChunks.add(null);
-            } else {
-                long refChunkStart = Math.max(chunk.getChunkStart() - refOffset, 0);
-                long refChunkEnd = Math.min(chunk.getChunkEnd() - refOffset, refLen);
-                referencesChunks.add(new Chunk(refChunkStart, refChunkEnd));
-            }
-
-            refOffset += refLen;
-        }
-
-        return referencesChunks;
-    }
-}
diff --git a/src/java/htsjdk/samtools/sra/SRAIndexedSequenceFile.java b/src/java/htsjdk/samtools/sra/SRAIndexedSequenceFile.java
deleted file mode 100644
index 567bce0..0000000
--- a/src/java/htsjdk/samtools/sra/SRAIndexedSequenceFile.java
+++ /dev/null
@@ -1,121 +0,0 @@
-package htsjdk.samtools.sra;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.sra.SRAAccession;
-import ngs.ErrorMsg;
-import ngs.ReadCollection;
-import ngs.Reference;
-import ngs.ReferenceIterator;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-/**
- * Allows reading Reference data from SRA
- */
-public class SRAIndexedSequenceFile implements ReferenceSequenceFile {
-    private SRAAccession acc;
-    private ReadCollection run;
-    private ThreadLocal<HashMap<Integer, Reference>> cachedReferences = new ThreadLocal<HashMap<Integer, Reference>>();
-
-    private Iterator<SAMSequenceRecord> sequenceRecordIterator;
-
-    protected SAMSequenceDictionary sequenceDictionary;
-
-    /**
-     * @param acc accession
-     */
-    public SRAIndexedSequenceFile(SRAAccession acc) {
-        this.acc = acc;
-
-        if (!acc.isValid()) {
-            throw new RuntimeException("Passed an invalid SRA accession into SRA reader: " + acc);
-        }
-
-        try {
-            run = gov.nih.nlm.ncbi.ngs.NGS.openReadCollection(acc.toString());
-            sequenceDictionary = loadSequenceDictionary();
-        } catch (final ErrorMsg e) {
-            throw new RuntimeException(e);
-        }
-
-        reset();
-    }
-
-    @Override
-    public SAMSequenceDictionary getSequenceDictionary() {
-        return sequenceDictionary;
-    }
-
-    @Override
-    public ReferenceSequence nextSequence() {
-        SAMSequenceRecord sequence = sequenceRecordIterator.next();
-        return getSubsequenceAt(sequence.getSequenceName(), 1L, sequence.getSequenceLength());
-    }
-
-    @Override
-    public void reset() {
-        sequenceRecordIterator = sequenceDictionary.getSequences().iterator();
-    }
-
-    @Override
-    public boolean isIndexed() {
-        return true;
-    }
-
-    @Override
-    public ReferenceSequence getSequence(String contig) {
-        return getSubsequenceAt(contig, 1L, sequenceDictionary.getSequence(contig).getSequenceLength());
-    }
-
-    @Override
-    public ReferenceSequence getSubsequenceAt(String contig, long start, long stop) {
-        SAMSequenceRecord sequence = sequenceDictionary.getSequence(contig);
-        int referenceIndex = sequence.getSequenceIndex();
-
-        byte[] bases;
-
-        try {
-            HashMap<Integer, Reference> localRefs = cachedReferences.get();
-            if (localRefs == null) {
-                localRefs = new HashMap<Integer, Reference>();
-                cachedReferences.set(localRefs);
-            }
-            Reference reference = localRefs.get(referenceIndex);
-            if (reference == null) {
-                reference = run.getReference(contig);
-                localRefs.put(referenceIndex, reference);
-            }
-
-            bases = reference.getReferenceBases(start - 1, stop - (start - 1)).getBytes();
-        } catch (ErrorMsg e) {
-            throw new RuntimeException(e);
-        }
-
-        return new ReferenceSequence(contig, referenceIndex, bases);
-    }
-
-    @Override
-    public void close() throws IOException {
-
-    }
-
-    protected SAMSequenceDictionary loadSequenceDictionary() throws ErrorMsg {
-        SAMSequenceDictionary dict = new SAMSequenceDictionary();
-
-        ReferenceIterator itRef = run.getReferences();
-        while (itRef.nextReference()) {
-            dict.addSequence(new SAMSequenceRecord(itRef.getCanonicalName(), (int) itRef.getLength()));
-        }
-
-        return dict;
-    }
-}
\ No newline at end of file
diff --git a/src/java/htsjdk/samtools/util/BlockCompressedOutputStream.java b/src/java/htsjdk/samtools/util/BlockCompressedOutputStream.java
deleted file mode 100644
index 138a975..0000000
--- a/src/java/htsjdk/samtools/util/BlockCompressedOutputStream.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import htsjdk.samtools.util.zip.DeflaterFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.zip.CRC32;
-import java.util.zip.Deflater;
-
-/**
- * Writer for a file that is a series of gzip blocks (BGZF format).  The caller just treats it as an
- * OutputStream, and under the covers a gzip block is written when the amount of uncompressed as-yet-unwritten
- * bytes reaches a threshold.
- *
- * The advantage of BGZF over conventional gzip is that BGZF allows for seeking without having to scan through
- * the entire file up to the position being sought.
- *
- * Note that the flush() method should not be called by client
- * unless you know what you're doing, because it forces a gzip block to be written even if the
- * number of buffered bytes has not reached threshold.  close(), on the other hand, must be called
- * when done writing in order to force the last gzip block to be written.
- *
- * c.f. http://samtools.sourceforge.net/SAM1.pdf for details of BGZF file format.
- */
-public class BlockCompressedOutputStream
-        extends OutputStream
-        implements LocationAware
-{
-    private static int defaultCompressionLevel = BlockCompressedStreamConstants.DEFAULT_COMPRESSION_LEVEL;
-
-    /**
-     * Sets the GZip compression level for subsequent BlockCompressedOutputStream object creation
-     * that do not specify the compression level.
-     * @param compressionLevel 1 <= compressionLevel <= 9
-     */
-    public static void setDefaultCompressionLevel(final int compressionLevel) {
-        if (compressionLevel < Deflater.NO_COMPRESSION || compressionLevel > Deflater.BEST_COMPRESSION) {
-            throw new IllegalArgumentException("Invalid compression level: " + compressionLevel);
-        }
-        defaultCompressionLevel = compressionLevel;
-    }
-
-    public static int getDefaultCompressionLevel() {
-        return defaultCompressionLevel;
-    }
-
-    private final BinaryCodec codec;
-    private final byte[] uncompressedBuffer = new byte[BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE];
-    private int numUncompressedBytes = 0;
-    private final byte[] compressedBuffer =
-            new byte[BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE -
-                    BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH];
-    private final Deflater deflater;
-
-    // A second deflater is created for the very unlikely case where the regular deflation actually makes
-    // things bigger, and the compressed block is too big.  It should be possible to downshift the
-    // primary deflater to NO_COMPRESSION level, recompress, and then restore it to its original setting,
-    // but in practice that doesn't work.
-    // The motivation for deflating at NO_COMPRESSION level is that it will predictably produce compressed
-    // output that is 10 bytes larger than the input, and the threshold at which a block is generated is such that
-    // the size of tbe final gzip block will always be <= 64K.  This is preferred over the previous method,
-    // which would attempt to compress up to 64K bytes, and if the resulting compressed block was too large,
-    // try compressing fewer input bytes (aka "downshifting').  The problem with downshifting is that
-    // getFilePointer might return an inaccurate value.
-    // I assume (AW 29-Oct-2013) that there is no value in using hardware-assisted deflater for no-compression mode,
-    // so just use JDK standard.
-    private final Deflater noCompressionDeflater = new Deflater(Deflater.NO_COMPRESSION, true);
-    private final CRC32 crc32 = new CRC32();
-    private File file = null;
-    private long mBlockAddress = 0;
-
-
-    // Really a local variable, but allocate once to reduce GC burden.
-    private final byte[] singleByteArray = new byte[1];
-
-    /**
-     * Uses default compression level, which is 5 unless changed by setCompressionLevel
-     */
-    public BlockCompressedOutputStream(final String filename) {
-        this(filename, defaultCompressionLevel);
-    }
-
-    /**
-     * Uses default compression level, which is 5 unless changed by setCompressionLevel
-     */
-    public BlockCompressedOutputStream(final File file) {
-        this(file, defaultCompressionLevel);
-    }
-
-    /**
-     * Prepare to compress at the given compression level
-     * @param compressionLevel 1 <= compressionLevel <= 9
-     */
-    public BlockCompressedOutputStream(final String filename, final int compressionLevel) {
-        this(new File(filename), compressionLevel);
-    }
-
-    /**
-     * Prepare to compress at the given compression level
-     * @param compressionLevel 1 <= compressionLevel <= 9
-     */
-    public BlockCompressedOutputStream(final File file, final int compressionLevel) {
-        this.file = file;
-        codec = new BinaryCodec(file, true);
-        deflater = DeflaterFactory.makeDeflater(compressionLevel, true);
-    }
-
-    /**
-     * Constructors that take output streams
-     * file may be null
-     */
-    public BlockCompressedOutputStream(final OutputStream os, final File file) {
-        this(os, file, defaultCompressionLevel);
-    }
-
-    public BlockCompressedOutputStream(final OutputStream os, final File file, final int compressionLevel) {
-        this.file = file;
-        codec = new BinaryCodec(os);
-        if (file != null) {
-            codec.setOutputFileName(file.getAbsolutePath());
-        }
-        deflater = DeflaterFactory.makeDeflater(compressionLevel, true);
-    }
-
-    /**
-     *
-     * @param location May be null.  Used for error messages, and for checking file termination.
-     * @param output May or not already be a BlockCompressedOutputStream.
-     * @return A BlockCompressedOutputStream, either by wrapping the given OutputStream, or by casting if it already
-     *         is a BCOS.
-     */
-    public static BlockCompressedOutputStream maybeBgzfWrapOutputStream(final File location, OutputStream output) {
-        if (!(output instanceof BlockCompressedOutputStream)) {
-                return new BlockCompressedOutputStream(output, location);
-        } else {
-        return (BlockCompressedOutputStream)output;
-        }
-    }
-
-    /**
-     * Writes b.length bytes from the specified byte array to this output stream. The general contract for write(b)
-     * is that it should have exactly the same effect as the call write(b, 0, b.length).
-     * @param bytes the data
-     */
-    @Override
-    public void write(final byte[] bytes) throws IOException {
-        write(bytes, 0, bytes.length);
-    }
-
-    /**
-     * Writes len bytes from the specified byte array starting at offset off to this output stream. The general
-     * contract for write(b, off, len) is that some of the bytes in the array b are written to the output stream in order;
-     * element b[off] is the first byte written and b[off+len-1] is the last byte written by this operation.
-     *
-     * @param bytes the data
-     * @param startIndex the start offset in the data
-     * @param numBytes the number of bytes to write
-     */
-    @Override
-    public void write(final byte[] bytes, int startIndex, int numBytes) throws IOException {
-        assert(numUncompressedBytes < uncompressedBuffer.length);
-        while (numBytes > 0) {
-            final int bytesToWrite = Math.min(uncompressedBuffer.length - numUncompressedBytes, numBytes);
-            System.arraycopy(bytes, startIndex, uncompressedBuffer, numUncompressedBytes, bytesToWrite);
-            numUncompressedBytes += bytesToWrite;
-            startIndex += bytesToWrite;
-            numBytes -= bytesToWrite;
-            assert(numBytes >= 0);
-            if (numUncompressedBytes == uncompressedBuffer.length) {
-                deflateBlock();
-            }
-        }
-    }
-
-    /**
-     * WARNING: flush() affects the output format, because it causes the current contents of uncompressedBuffer
-     * to be compressed and written, even if it isn't full.  Unless you know what you're doing, don't call flush().
-     * Instead, call close(), which will flush any unwritten data before closing the underlying stream.
-     *
-     */
-    @Override
-    public void flush() throws IOException {
-        while (numUncompressedBytes > 0) {
-            deflateBlock();
-        }
-        codec.getOutputStream().flush();
-    }
-
-    /**
-     * close() must be called in order to flush any remaining buffered bytes.  An unclosed file will likely be
-     * defective.
-     *
-     */
-    @Override
-    public void close() throws IOException {
-        flush();
-        // For debugging...
-        // if (numberOfThrottleBacks > 0) {
-        //     System.err.println("In BlockCompressedOutputStream, had to throttle back " + numberOfThrottleBacks +
-        //                        " times for file " + codec.getOutputFileName());
-        // }
-        codec.writeBytes(BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK);
-        codec.close();
-        // Can't re-open something that is not a regular file, e.g. a named pipe or an output stream
-        if (this.file == null || !this.file.isFile()) return;
-        if (BlockCompressedInputStream.checkTermination(this.file) !=
-                BlockCompressedInputStream.FileTermination.HAS_TERMINATOR_BLOCK) {
-            throw new IOException("Terminator block not found after closing BGZF file " + this.file);
-        }
-    }
-
-    /**
-     * Writes the specified byte to this output stream. The general contract for write is that one byte is written
-     * to the output stream. The byte to be written is the eight low-order bits of the argument b.
-     * The 24 high-order bits of b are ignored.
-     * @param bite
-     * @throws IOException
-     */
-    public void write(final int bite) throws IOException {
-        singleByteArray[0] = (byte)bite;
-        write(singleByteArray);
-    }
-
-    /** Encode virtual file pointer
-     * Upper 48 bits is the byte offset into the compressed stream of a block.
-     * Lower 16 bits is the byte offset into the uncompressed stream inside the block.
-     */
-    public long getFilePointer(){
-        return BlockCompressedFilePointerUtil.makeFilePointer(mBlockAddress, numUncompressedBytes);
-    }
-
-    @Override
-    public long getPosition() {
-        return getFilePointer();
-    }
-
-    /**
-     * Attempt to write the data in uncompressedBuffer to the underlying file in a gzip block.
-     * If the entire uncompressedBuffer does not fit in the maximum allowed size, reduce the amount
-     * of data to be compressed, and slide the excess down in uncompressedBuffer so it can be picked
-     * up in the next deflate event.
-     * @return size of gzip block that was written.
-     */
-    private int deflateBlock() {
-        if (numUncompressedBytes == 0) {
-            return 0;
-        }
-        final int bytesToCompress = numUncompressedBytes;
-        // Compress the input
-        deflater.reset();
-        deflater.setInput(uncompressedBuffer, 0, bytesToCompress);
-        deflater.finish();
-        int compressedSize = deflater.deflate(compressedBuffer, 0, compressedBuffer.length);
-
-        // If it didn't all fit in compressedBuffer.length, set compression level to NO_COMPRESSION
-        // and try again.  This should always fit.
-        if (!deflater.finished()) {
-            noCompressionDeflater.reset();
-            noCompressionDeflater.setInput(uncompressedBuffer, 0, bytesToCompress);
-            noCompressionDeflater.finish();
-            compressedSize = noCompressionDeflater.deflate(compressedBuffer, 0, compressedBuffer.length);
-            if (!noCompressionDeflater.finished()) {
-                throw new IllegalStateException("unpossible");
-            }
-        }
-        // Data compressed small enough, so write it out.
-        crc32.reset();
-        crc32.update(uncompressedBuffer, 0, bytesToCompress);
-
-        final int totalBlockSize = writeGzipBlock(compressedSize, bytesToCompress, crc32.getValue());
-        assert(bytesToCompress <= numUncompressedBytes);
-
-        // Clear out from uncompressedBuffer the data that was written
-        if (bytesToCompress == numUncompressedBytes) {
-            numUncompressedBytes = 0;
-        } else {
-            System.arraycopy(uncompressedBuffer, bytesToCompress, uncompressedBuffer, 0,
-                    numUncompressedBytes - bytesToCompress);
-            numUncompressedBytes -= bytesToCompress;
-        }
-        mBlockAddress += totalBlockSize;
-        return totalBlockSize;
-    }
-
-    /**
-     * Writes the entire gzip block, assuming the compressed data is stored in compressedBuffer
-     * @return  size of gzip block that was written.
-     */
-    private int writeGzipBlock(final int compressedSize, final int uncompressedSize, final long crc) {
-        // Init gzip header
-        codec.writeByte(BlockCompressedStreamConstants.GZIP_ID1);
-        codec.writeByte(BlockCompressedStreamConstants.GZIP_ID2);
-        codec.writeByte(BlockCompressedStreamConstants.GZIP_CM_DEFLATE);
-        codec.writeByte(BlockCompressedStreamConstants.GZIP_FLG);
-        codec.writeInt(0); // Modification time
-        codec.writeByte(BlockCompressedStreamConstants.GZIP_XFL);
-        codec.writeByte(BlockCompressedStreamConstants.GZIP_OS_UNKNOWN);
-        codec.writeShort(BlockCompressedStreamConstants.GZIP_XLEN);
-        codec.writeByte(BlockCompressedStreamConstants.BGZF_ID1);
-        codec.writeByte(BlockCompressedStreamConstants.BGZF_ID2);
-        codec.writeShort(BlockCompressedStreamConstants.BGZF_LEN);
-        final int totalBlockSize = compressedSize + BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH +
-                BlockCompressedStreamConstants.BLOCK_FOOTER_LENGTH;
-
-        // I don't know why we store block size - 1, but that is what the spec says
-        codec.writeShort((short)(totalBlockSize - 1));
-        codec.writeBytes(compressedBuffer, 0, compressedSize);
-        codec.writeInt((int)crc);
-        codec.writeInt(uncompressedSize);
-        return totalBlockSize;
-    }
-}
diff --git a/src/java/htsjdk/samtools/util/Histogram.java b/src/java/htsjdk/samtools/util/Histogram.java
deleted file mode 100644
index f69408c..0000000
--- a/src/java/htsjdk/samtools/util/Histogram.java
+++ /dev/null
@@ -1,480 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools.util;
-
-import htsjdk.samtools.util.Histogram.Bin;
-import java.io.Serializable;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Iterator;
-import java.util.List;
-import java.util.TreeMap;
-
-import static java.lang.Math.*;
-
-/**
- * Class for computing and accessing histogram type data.  Stored internally in
- * a sorted Map so that keys can be iterated in order.
- *
- * @author Tim Fennell
- */
-public class Histogram<K extends Comparable> extends TreeMap<K, Bin> {
-    private String binLabel   = "BIN";
-    private String valueLabel = "VALUE";
-
-    /** Constructs a new Histogram with default bin and value labels. */
-    public Histogram() { }
-
-    /** Constructs a new Histogram with supplied bin and value labels. */
-    public Histogram(final String binLabel, final String valueLabel) {
-        this.binLabel = binLabel;
-        this.valueLabel = valueLabel;
-    }
-
-    /** Constructs a new Histogram that'll use the supplied comparator to sort keys. */
-    public Histogram(final Comparator<K> comparator) {
-        super(comparator);
-    }
-
-    /** Constructor that takes labels for the bin and values and a comparator to sort the bins. */
-    public Histogram(final String binLabel, final String valueLabel, final Comparator<K> comparator) {
-        this(comparator);
-        this.binLabel = binLabel;
-        this.valueLabel = valueLabel;
-    }
-
-    /** Copy constructor for a histogram. */
-    public Histogram(final Histogram<K> in) {
-        super(in);
-        this.binLabel = in.binLabel;
-        this.valueLabel = in.valueLabel;
-    }
-
-    /** Represents a bin in the Histogram. */
-    public class Bin implements Serializable{
-        private final K id;
-        private double value = 0;
-
-        /** Constructs a new bin with the given ID. */
-        private Bin(final K id) { this.id = id; }
-
-        /** Gets the ID of this bin. */
-        public K getId() { return id; }
-
-        /** Gets the value in the bin. */
-        public double getValue() { return value; }
-
-        /** Returns the String format for the value in the bin. */
-        public String toString() { return String.valueOf(this.value); }
-
-        /** Checks the equality of the bin by ID and value. */
-        public boolean equals(final Object o) {
-            if (this == o) return true;
-            if (o == null || getClass() != o.getClass()) return false;
-
-            final Bin bin = (Bin) o;
-
-            if (Double.compare(bin.value, value) != 0) return false;
-            if (!id.equals(bin.id)) return false;
-
-            return true;
-        }
-
-        @Override
-        public int hashCode() {
-            int result;
-            final long temp;
-            result = id.hashCode();
-            temp = value != +0.0d ? Double.doubleToLongBits(value) : 0L;
-            result = 31 * result + (int) (temp ^ (temp >>> 32));
-            return result;
-        }
-
-        public double getIdValue() {
-            if (id instanceof Number) {
-                return ((Number) id).doubleValue();
-            } else {
-                throw new UnsupportedOperationException("getIdValue only supported for Histogram<? extends Number>");
-            }
-        }
-    }
-
-    /** Prefill the histogram with the supplied set of bins. */
-    public void prefillBins(final K... ids) {
-        for (final K id : ids) {
-            put(id, new Bin(id));
-        }
-    }
-
-    /** Increments the value in the designated bin by 1. */
-    public void increment(final K id) {
-        increment(id, 1d);
-    }
-
-    /** Increments the value in the designated bin by the supplied increment. */
-    public void increment(final K id, final double increment) {
-        Bin bin = get(id);
-        if (bin == null) {
-            bin = new Bin(id);
-            put(id, bin);
-        }
-
-        bin.value += increment;
-    }
-
-    public String getBinLabel() { return binLabel; }
-    public void setBinLabel(final String binLabel) { this.binLabel = binLabel; }
-
-    public String getValueLabel() { return valueLabel; }
-    public void setValueLabel(final String valueLabel) { this.valueLabel = valueLabel; }
-
-    /** Checks that the labels and values in the two histograms are identical. */
-    public boolean equals(final Object o) {
-        return o != null &&
-                (o instanceof Histogram) &&
-                ((Histogram) o).binLabel.equals(this.binLabel) &&
-                ((Histogram) o).valueLabel.equals(this.valueLabel) &&
-                super.equals(o);
-    }
-
-    /**
-     * Assuming that the key type for the histogram is a Number type, returns the mean of
-     * all the items added to the histogram.
-     */
-    public double getMean() {
-        // Could use simply getSum() / getCount(), but that would require iterating over the
-        // values() set twice, which seems inefficient given how simply the computation is.
-        double product=0, totalCount=0;
-        for (final Bin bin : values()) {
-            final double idValue = bin.getIdValue();
-            final double count   = bin.getValue();
-
-            product += idValue * count;
-            totalCount += count;
-        }
-
-        return product / totalCount;
-    }
-
-    /**
-     * Returns the sum of the products of the histgram bin ids and the number of entries in each bin.
-     */
-    public double getSum() {
-        double total = 0;
-        for (final Bin bin : values()) {
-            total += bin.getValue() * bin.getIdValue();
-        }
-
-        return total;
-    }
-
-    /**
-     * Returns the sum of the number of entries in each bin.
-     */
-    public double getSumOfValues() {
-        double total = 0;
-        for (final Bin bin : values()) {
-            total += bin.getValue();
-        }
-
-        return total;
-    }
-
-    public double getStandardDeviation() {
-        final double mean = getMean();
-
-        double count = 0;
-        double total = 0;
-
-        for (final Bin bin : values()) {
-            final double localCount = bin.getValue();
-            final double value = bin.getIdValue();
-
-            count += localCount;
-            total += localCount * pow(value - mean, 2);
-        }
-
-        return Math.sqrt(total / (count-1));
-    }
-
-    /**
-     * Calculates the mean bin size
-     */
-    public double getMeanBinSize() {
-        return (getSumOfValues() / size());
-    }
-
-	/**
-	 * Calculates the median bin size
-	 */
-	public double getMedianBinSize() {
-		if (size() == 0) {
-			return 0;
-		}
-
-		final List<Double> binValues = new ArrayList<Double>();
-		for (final Bin bin : values()) {
-			binValues.add(bin.getValue());
-		}
-		Collections.sort(binValues);
-
-		final int midPoint = binValues.size() / 2;
-		double median = binValues.get(midPoint);
-		if (binValues.size() % 2 == 0) {
-			median = (median + binValues.get(midPoint-1)) / 2;
-		}
-
-		return median;
-	}
-
-    /**
-     * Calculates the standard deviation of the bin size
-     */
-    public double getStandardDeviationBinSize(final double mean) {
-        double total = 0;
-        for(final Bin bin : values()) {
-            total += Math.pow(bin.getValue() - mean, 2);
-        }
-        return Math.sqrt(total / (Math.max(1,values().size()-1)));
-    }
-
-    /**
-     * Gets the bin in which the given percentile falls.
-     *
-     * @param percentile a value between 0 and 1
-     * @return the bin value in which the percentile falls
-     */
-    public double getPercentile(double percentile) {
-        if (percentile <= 0) throw new IllegalArgumentException("Cannot query percentiles of 0 or below");
-        if (percentile >= 1) throw new IllegalArgumentException("Cannot query percentiles of 1 or above");
-
-        double total = getCount();
-        double sofar = 0;
-        for (Bin bin : values()) {
-            sofar += bin.getValue();
-            if (sofar / total >= percentile) return bin.getIdValue();
-        }
-
-        throw new IllegalStateException("Could not find percentile: " + percentile);
-    }
-
-    /**
-     * Returns the cumulative probability of observing a value <= v when sampling the
-     * distribution represented by this histogram.
-     */
-    public double getCumulativeProbability(final double v) {
-        double count = 0;
-        double total = 0;
-
-        for (final Bin bin : values()) {
-            final double binValue = bin.getIdValue();
-            if (binValue <= v) count += bin.getValue();
-            total += bin.getValue();
-        }
-
-        return count / total;
-    }
-
-    public double getMedian() {
-        double total = 0;
-        double count = getCount();
-
-        // Base cases
-        if (count == 0) return 0;
-        if (count == 1) return values().iterator().next().getIdValue();
-
-        final double midLow, midHigh;
-        if (count % 2 == 0) {
-            midLow = count / 2;
-            midHigh = midLow + 1;
-        }
-        else {
-            midLow = Math.ceil(count / 2);
-            midHigh = midLow;
-        }
-
-        Double midLowValue  = null;
-        Double midHighValue = null;
-        for (final Bin bin : values()) {
-            total += bin.getValue();
-            if (midLowValue  == null && total >= midLow)  midLowValue  = bin.getIdValue();
-            if (midHighValue == null && total >= midHigh) midHighValue = bin.getIdValue();
-            if (midLowValue != null && midHighValue != null) break;
-        }
-
-        return (midLowValue + midHighValue) / 2;
-    }
-
-    /** Gets the median absolute deviation of the distribution. */
-    public double getMedianAbsoluteDeviation() {
-        final double median = getMedian();
-        final Histogram<Double> deviations = new Histogram<Double>();
-        for (final Bin bin : values()) {
-            final double dev = abs(bin.getIdValue() - median);
-            deviations.increment(dev, bin.getValue());
-        }
-
-        return deviations.getMedian();
-    }
-
-    /**
-     * Returns a value that is intended to estimate the mean of the distribution, if the distribution is
-     * essentially normal, by using the median absolute deviation to remove the effect of
-     * erroneous massive outliers.
-     */
-    public double estimateSdViaMad() {
-        return 1.4826 * getMedianAbsoluteDeviation();
-    }
-
-    /** Returns id of the Bin that's the mode of the distribution (i.e. the largest bin). */
-    public double getMode() {
-
-        return getModeBin().getIdValue();
-    }
-
-    /** Returns the Bin that's the mode of the distribution (i.e. the largest bin). */
-    private Bin getModeBin() {
-        Bin modeBin = null;
-
-        for (final Bin bin : values()) {
-            if (modeBin == null || modeBin.value < bin.value) {
-                modeBin = bin;
-            }
-        }
-
-        return modeBin;
-    }
-
-
-    public double getMin() {
-        return firstEntry().getValue().getIdValue();
-    }
-
-    public double getMax() {
-        return lastEntry().getValue().getIdValue();
-    }
-
-    public double getCount() {
-        double count = 0;
-        for (final Bin bin : values()) {
-            count += bin.value;
-        }
-
-        return count;
-    }
-
-    /** Gets the geometric mean of the distribution. */
-    public double getGeometricMean() {
-        double total = 0;
-        double count = 0;
-        for (final Bin bin : values()) {
-            total += bin.value * log(bin.getIdValue());
-            count += bin.value;
-        }
-
-        return exp(total / count);
-    }
-
-    /**
-     * Trims the histogram when the bins in the tail of the distribution contain fewer than mode/tailLimit items
-     */
-    public void trimByTailLimit(final int tailLimit) {
-        if (isEmpty()) {
-            return;
-        }
-
-        final Bin modeBin = getModeBin();
-        final double mode = modeBin.getIdValue();
-        final double sizeOfModeBin = modeBin.getValue();
-        final double minimumBinSize = sizeOfModeBin/tailLimit;
-        Histogram<K>.Bin lastBin = null;
-
-        final List<K> binsToKeep = new ArrayList<K>();
-        for (Histogram<K>.Bin bin : values()) {
-            double binId = ((Number)bin.getId()).doubleValue();
-
-            if (binId <= mode) {
-                binsToKeep.add(bin.getId());
-            }
-            else if ((lastBin != null && ((Number)lastBin.getId()).doubleValue() != binId - 1) || bin.getValue() < minimumBinSize) {
-                break;
-            }
-            else {
-                binsToKeep.add(bin.getId());
-            }
-            lastBin = bin;
-        }
-
-        final Object keys[] = keySet().toArray();
-        for (Object binId : keys) {
-            if (!binsToKeep.contains((K)binId)) {
-                remove(binId);
-            }
-        }
-    }
-
-    /**
-     * Trims the histogram so that only bins <= width are kept.
-     */
-    public void trimByWidth(final int width) {
-        final Iterator<K> it = descendingKeySet().iterator();
-        while (it.hasNext()) {
-
-            if (((Number)it.next()).doubleValue() > width) {
-                it.remove();
-            } else break;
-        }
-    }
-
-    /***
-     * Immutable method that divides the current Histogram by an input Histogram and generates a new one
-     * Throws an exception if the bins don't match up exactly
-     * @param divisorHistogram
-     * @return
-     * @throws IllegalArgumentException
-     */
-    public Histogram<K> divideByHistogram(final Histogram<K> divisorHistogram) throws IllegalArgumentException{
-        Histogram<K> output = new Histogram<K>();
-        if (!this.keySet().equals(divisorHistogram.keySet()))  throw new IllegalArgumentException("Attempting to divide Histograms with non-identical bins");
-        for (final K key : this.keySet()){
-            Bin dividend = this.get(key);
-            Bin divisor = divisorHistogram.get(key);
-            output.increment(key, dividend.getValue()/divisor.getValue());
-        }
-        return output;
-    }
-
-    /***
-     * Mutable method that allows the addition of a Histogram into the current one.
-     * @param addHistogram
-     */
-    public void addHistogram(final Histogram<K> addHistogram) {
-        for (final K key : addHistogram.keySet()){
-            this.increment(key, addHistogram.get(key).getValue());
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/util/Objects.java b/src/java/htsjdk/samtools/util/Objects.java
deleted file mode 100644
index 45b0066..0000000
--- a/src/java/htsjdk/samtools/util/Objects.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package htsjdk.samtools.util;
-
-/**
- * Subset of JDK7's {@link java.util.Objects} for non-JDK7 support. 
- * 
- * @author mccowan
- */
-public class Objects {
-    public static boolean equals(final Object a, final Object b) {
-        return (a == b) || (a != null && a.equals(b));
-    } 
-}
diff --git a/src/java/htsjdk/samtools/util/OverlapDetector.java b/src/java/htsjdk/samtools/util/OverlapDetector.java
deleted file mode 100644
index c73fbe1..0000000
--- a/src/java/htsjdk/samtools/util/OverlapDetector.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * Utility class to efficiently do in memory overlap detection between a large
- * set of mapping like objects, and one or more candidate mappings.
- */
-public class OverlapDetector<T> {
-    private Map<Object, IntervalTree<Set<T>>> cache = new HashMap<Object, IntervalTree<Set<T>>>();
-    private final int lhsBuffer;
-    private final int rhsBuffer;
-
-    /**
-     * Constructs an overlap detector.
-     * @param lhsBuffer the amount by which to "trim" coordinates of mappings on the left
-     *                  hand side when calculating overlaps
-     * @param rhsBuffer the amount by which to "trim" coordinates of mappings on the right
-     *                  hand side when calculating overlaps
-     */
-    public OverlapDetector(int lhsBuffer, int rhsBuffer) {
-        this.lhsBuffer = lhsBuffer;
-        this.rhsBuffer = rhsBuffer;
-    }
-
-    /** Adds a mapping to the set of mappings against which to match candidates. */
-    public void addLhs(T object, Interval interval) {
-        Object seqId = interval.getContig();
-
-        IntervalTree<Set<T>> tree = this.cache.get(seqId);
-        if (tree == null) {
-            tree = new IntervalTree<Set<T>>();
-            this.cache.put(seqId, tree);
-        }
-
-        int start = interval.getStart() + this.lhsBuffer;
-        int end   = interval.getEnd()   - this.lhsBuffer;
-
-        Set<T> objects = new HashSet<T>();
-        objects.add(object);
-        if (start <= end)  // Don't put in sequences that have no overlappable bases
-        {
-            Set<T> alreadyThere = tree.put(start, end, objects);
-            if (alreadyThere != null)
-            {
-                alreadyThere.add(object);
-                tree.put(start, end, alreadyThere);
-            }
-        }
-    }
-
-    /** Adds all items to the overlap detector. */
-    public void addAll(List<T> objects, List<Interval> intervals) {
-        if (objects.size() != intervals.size()) {
-            throw new IllegalArgumentException("Objects and intervals must be the same size.");
-        }
-
-        for (int i=0; i<objects.size(); ++i) {
-            addLhs(objects.get(i), intervals.get(i));
-        }
-    }
-
-    /** Gets all the objects that could be returned by the overlap detector. */
-    public Collection<T> getAll() {
-        Collection<T> all = new HashSet<T>();
-        for (IntervalTree<Set<T>> tree : this.cache.values()) {
-            for (IntervalTree.Node<Set<T>> node : tree) {
-                all.addAll(node.getValue());
-            }
-        }
-
-        return all;
-    }
-
-    /** Gets the collection of objects that overlap the provided mapping. */
-    public Collection<T> getOverlaps(Interval rhs)  {
-        Collection<T> matches = new ArrayList<T>();
-
-        Object seqId = rhs.getContig();
-        IntervalTree<Set<T>> tree = this.cache.get(seqId);
-        int start = rhs.getStart() + this.rhsBuffer;
-        int end = rhs.getEnd() - this.rhsBuffer;
-
-        if (tree != null && start <= end)
-        {
-            Iterator<IntervalTree.Node<Set<T>>> it = tree.overlappers(start, end);
-            while (it.hasNext())
-            {
-                IntervalTree.Node<Set<T>> node = it.next();
-                matches.addAll(node.getValue());
-            }
-        }
-
-        return matches;
-    }
-}
diff --git a/src/java/htsjdk/samtools/util/SamLocusIterator.java b/src/java/htsjdk/samtools/util/SamLocusIterator.java
deleted file mode 100644
index f0dd952..0000000
--- a/src/java/htsjdk/samtools/util/SamLocusIterator.java
+++ /dev/null
@@ -1,552 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import htsjdk.samtools.AlignmentBlock;
-import htsjdk.samtools.SAMException;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.SamReader;
-import htsjdk.samtools.filter.AggregateFilter;
-import htsjdk.samtools.filter.DuplicateReadFilter;
-import htsjdk.samtools.filter.FilteringSamIterator;
-import htsjdk.samtools.filter.SamRecordFilter;
-import htsjdk.samtools.filter.SecondaryOrSupplementaryFilter;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Iterator that traverses a SAM File, accumulating information on a per-locus basis.
- * Optionally takes a target interval list, in which case the loci returned are the ones covered by
- * the interval list.  If no target interval list, whatever loci are covered by the input reads are returned.
- * By default duplicate reads and non-primary alignments are filtered out.  Filtering may be changed
- * via setSamFilters().
- *
- * @author alecw at broadinstitute.org
- */
-public class SamLocusIterator implements Iterable<SamLocusIterator.LocusInfo>, CloseableIterator<SamLocusIterator.LocusInfo> {
-    private static final Log LOG = Log.getInstance(SamLocusIterator.class);
-
-    /**
-     * Holds a SAMRecord plus the zero-based offset into that SAMRecord's bases and quality scores that corresponds
-     * to the base and quality at the genomic position described the containing LocusInfo.
-     */
-    public static class RecordAndOffset {
-        private final SAMRecord record;
-        private final int offset;
-
-        public RecordAndOffset(final SAMRecord record, final int offset) {
-            this.offset = offset;
-            this.record = record;
-        }
-
-        /** Zero-based offset into the read corresponding to the current position in LocusInfo */
-        public int getOffset() { return offset; }
-        public SAMRecord getRecord() { return record; }
-        public byte getReadBase() { return record.getReadBases()[offset]; }
-        public byte getBaseQuality() { return record.getBaseQualities()[offset]; }
-    }
-
-    /**
-     * The unit of iteration.  Holds information about the locus (the SAMSequenceRecord and 1-based position
-     * on the reference), plus List of ReadAndOffset objects, one for each read that overlaps the locus
-     */
-    public static final class LocusInfo implements Locus {
-        private final SAMSequenceRecord referenceSequence;
-        private final int position;
-        private final List<RecordAndOffset> recordAndOffsets = new ArrayList<RecordAndOffset>(100);
-
-        LocusInfo(final SAMSequenceRecord referenceSequence, final int position) {
-            this.referenceSequence = referenceSequence;
-            this.position = position;
-        }
-
-        /** Accumulate info for one read at the locus. */
-        public void add(final SAMRecord read, final int position) {
-            recordAndOffsets.add(new RecordAndOffset(read, position));
-        }
-
-        public int getSequenceIndex() { return referenceSequence.getSequenceIndex(); }
-
-        /** @return 1-based reference position */
-        public int getPosition() { return position; }
-        public List<RecordAndOffset> getRecordAndPositions() { return Collections.unmodifiableList(recordAndOffsets); }
-        public String getSequenceName() { return referenceSequence.getSequenceName(); }
-        @Override public String toString() { return referenceSequence.getSequenceName() + ":" + position; }
-        public int getSequenceLength() {return referenceSequence.getSequenceLength();}
-    }
-
-
-    private final SamReader samReader;
-    private final ReferenceSequenceMask referenceSequenceMask;
-    private PeekableIterator<SAMRecord> samIterator;
-    private List<SamRecordFilter> samFilters = Arrays.asList(new SecondaryOrSupplementaryFilter(),
-            new DuplicateReadFilter());
-    private final List<Interval> intervals;
-    private final boolean useIndex;
-
-    /**
-     * LocusInfos on this list are ready to be returned by iterator.  All reads that overlap
-     * the locus have been accumulated before the LocusInfo is moved into this list.
-     */
-    private final ArrayList<LocusInfo> complete = new ArrayList<LocusInfo>(100);
-
-    /**
-     * LocusInfos for which accumulation is in progress.  When {@link #accumulateSamRecord(SAMRecord)} is called
-     * the state of this list is guaranteed to be either:
-     *   a) Empty, or
-     *   b) That the element at index 0 corresponds to the same genomic locus as the first aligned base
-     *      in the read being accumulated
-     *
-     * Before each new read is accumulated the accumulator is examined and:
-     *   i) any LocusInfos at positions earlier than the read start are moved to {@link #complete}
-     *   ii) any uncovered positions between the last LocusInfo and the first aligned base of the new read
-     *       have LocusInfos created and added to {@link #complete} if we are emitting uncovered loci
-     */
-    private final ArrayList<LocusInfo> accumulator = new ArrayList<LocusInfo>(100);
-
-    private int qualityScoreCutoff = Integer.MIN_VALUE;
-    private int mappingQualityScoreCutoff = Integer.MIN_VALUE;
-    private boolean includeNonPfReads = true;
-
-    /**
-     * If true, emit a LocusInfo for every locus in the target map, or if no target map,
-     * emit a LocusInfo for every locus in the reference sequence.
-     * If false, emit a LocusInfo only if a locus has coverage.
-     */
-    private boolean emitUncoveredLoci = true;
-
-    /**
-     * If set, this will cap the number of reads we accumulate for any given position.
-     * Note that if we hit the maximum threshold at the first position in the accumulation queue,
-     * then we throw further reads overlapping that position completely away (including for subsequent positions).
-     * This is a useful feature if one wants to minimize the memory footprint in files with a few massively large pileups,
-     * but it must be pointed out that it could cause major bias because of the non-random nature with which the cap is
-     * applied (the first maxReadsToAccumulatePerLocus reads are kept and all subsequent ones are dropped).
-     */
-    private int maxReadsToAccumulatePerLocus = Integer.MAX_VALUE;
-
-    // Set to true when we have enforced the accumulation limit for the first time
-    private boolean enforcedAccumulationLimit = false;
-
-    // When there is a target mask, these members remember the last locus for which a LocusInfo has been
-    // returned, so that any uncovered locus in the target mask can be covered by a 0-coverage LocusInfo
-    private int lastReferenceSequence = 0;
-    private int lastPosition = 0;
-
-    // Set to true when past all aligned reads in input SAM file
-    private boolean finishedAlignedReads = false;
-
-    private final LocusComparator<Locus> locusComparator = new LocusComparator<Locus>();
-
-
-    /**
-     * Prepare to iterate through the given SAM records, skipping non-primary alignments.  Do not use
-     * BAM index even if available.
-     */
-    public SamLocusIterator(final SamReader samReader) {
-        this(samReader, null);
-    }
-
-    /**
-     * Prepare to iterate through the given SAM records, skipping non-primary alignments.  Do not use
-     * BAM index even if available.
-     *
-     * @param intervalList Either the list of desired intervals, or null.  Note that if an intervalList is
-     *                     passed in that is not coordinate sorted, it will eventually be coordinated sorted by this class.
-     */
-    public SamLocusIterator(final SamReader samReader, final IntervalList intervalList) {
-        this(samReader, intervalList, samReader.hasIndex());
-    }
-
-    /**
-     * Prepare to iterate through the given SAM records, skipping non-primary alignments
-     *
-     * @param samReader    must be coordinate sorted
-     * @param intervalList Either the list of desired intervals, or null.  Note that if an intervalList is
-     *                     passed in that is not coordinate sorted, it will eventually be coordinated sorted by this class.
-     * @param useIndex     If true, do indexed lookup to improve performance.  Not relevant if intervalList == null.
-     *                     It is no longer the case the useIndex==true can make performance worse.  It should always perform at least
-     *                     as well as useIndex==false, and generally will be much faster.
-     */
-    public SamLocusIterator(final SamReader samReader, final IntervalList intervalList, final boolean useIndex) {
-        if (samReader.getFileHeader().getSortOrder() == null || samReader.getFileHeader().getSortOrder() == SAMFileHeader.SortOrder.unsorted) {
-            LOG.warn("SamLocusIterator constructed with samReader that has SortOrder == unsorted.  ", "" +
-                    "Assuming SAM is coordinate sorted, but exceptions may occur if it is not.");
-        } else if (samReader.getFileHeader().getSortOrder() != SAMFileHeader.SortOrder.coordinate) {
-            throw new SAMException("SamLocusIterator cannot operate on a SAM file that is not coordinate sorted.");
-        }
-        this.samReader = samReader;
-        this.useIndex = useIndex;
-        if (intervalList != null) {
-            intervals = intervalList.uniqued().getIntervals();
-            this.referenceSequenceMask = new IntervalListReferenceSequenceMask(intervalList);
-        } else {
-            intervals = null;
-            this.referenceSequenceMask = new WholeGenomeReferenceSequenceMask(samReader.getFileHeader());
-        }
-    }
-
-    public Iterator<LocusInfo> iterator() {
-        if (samIterator != null) {
-            throw new IllegalStateException("Cannot call iterator() more than once on SamLocusIterator");
-        }
-        CloseableIterator<SAMRecord> tempIterator;
-        if (intervals != null) {
-            tempIterator = new SamRecordIntervalIteratorFactory().makeSamRecordIntervalIterator(samReader, intervals, useIndex);
-        } else {
-            tempIterator = samReader.iterator();
-        }
-        if (samFilters != null) {
-            tempIterator = new FilteringSamIterator(tempIterator, new AggregateFilter(samFilters));
-        }
-        samIterator = new PeekableIterator<SAMRecord>(tempIterator);
-        return this;
-    }
-
-    public void close() {
-        this.samIterator.close();
-    }
-
-    private boolean samHasMore() {
-        return !finishedAlignedReads && (samIterator.peek() != null);
-    }
-
-    /**
-     * Returns true if there are more LocusInfo objects that can be returned, due to any of the following reasons:
-     *   1) there are more aligned reads in the SAM file
-     *   2) there are LocusInfos in some stage of accumulation
-     *   3) there are loci in the target mask that have yet to be accumulated (even if there are no reads covering them)
-     */
-    public boolean hasNext() {
-        if (this.samIterator == null) {
-            iterator();
-        }
-
-        while (complete.isEmpty() && ((!accumulator.isEmpty()) || samHasMore() || hasRemainingMaskBases())) {
-            final LocusInfo locusInfo = next();
-            if (locusInfo != null) {
-                complete.add(0, locusInfo);
-            }
-        }
-        return !complete.isEmpty();
-    }
-
-    /**
-     * Returns true if there are more bases at which the locus iterator must emit LocusInfos because
-     * there are loci beyond the last emitted loci which are in the set of loci to be emitted and
-     * the iterator is setup to emit uncovered loci - so we can guarantee we'll emit those loci.
-     */
-    private boolean hasRemainingMaskBases() {
-        // if there are more sequences in the mask, by definition some of them must have
-        // marked bases otherwise if we're in the last sequence, but we're not at the last marked position,
-        // there is also more in the mask
-        if (!emitUncoveredLoci) {
-            // If not emitting uncovered loci, this check is irrelevant
-            return false;
-        }
-        return (lastReferenceSequence < referenceSequenceMask.getMaxSequenceIndex() ||
-                (lastReferenceSequence == referenceSequenceMask.getMaxSequenceIndex() &&
-                        lastPosition < referenceSequenceMask.nextPosition(lastReferenceSequence, lastPosition)));
-    }
-
-    /**
-     * hasNext() has been fixed so that if it returns true, next() is now guaranteed not to return null.
-     */
-    public LocusInfo next() {
-
-        // if we don't have any completed entries to return, try and make some!
-        while (complete.isEmpty() && samHasMore()) {
-            final SAMRecord rec = samIterator.peek();
-
-            // There might be unmapped reads mixed in with the mapped ones, but when a read
-            // is encountered with no reference index it means that all the mapped reads have been seen.
-            if (rec.getReferenceIndex() == -1) {
-                this.finishedAlignedReads = true;
-                continue;
-
-            }
-            // Skip over an unaligned read that has been forced to be sorted with the aligned reads
-            if (rec.getReadUnmappedFlag()
-                    || rec.getMappingQuality() < this.mappingQualityScoreCutoff
-                    || (!this.includeNonPfReads && rec.getReadFailsVendorQualityCheckFlag())) {
-                samIterator.next();
-                continue;
-            }
-
-            final Locus alignmentStart = new LocusImpl(rec.getReferenceIndex(), rec.getAlignmentStart());
-
-            // emit everything that is before the start of the current read, because we know no more
-            // coverage will be accumulated for those loci.
-            while (!accumulator.isEmpty() && locusComparator.compare(accumulator.get(0), alignmentStart) < 0) {
-                final LocusInfo first = accumulator.get(0);
-                populateCompleteQueue(alignmentStart);
-                if (!complete.isEmpty()) {
-                    return complete.remove(0);
-                }
-                if (!accumulator.isEmpty() && first == accumulator.get(0)) {
-                    throw new SAMException("Stuck in infinite loop");
-                }
-            }
-
-            // at this point, either the accumulator list is empty or the head should
-            // be the same position as the first base of the read
-            if (!accumulator.isEmpty()) {
-                if (accumulator.get(0).getSequenceIndex() != rec.getReferenceIndex() ||
-                        accumulator.get(0).position != rec.getAlignmentStart()) {
-                    throw new IllegalStateException("accumulator should be empty or aligned with current SAMRecord");
-                }
-            }
-
-            // Store the loci for the read in the accumulator
-            if (!surpassedAccumulationThreshold()) accumulateSamRecord(rec);
-
-            samIterator.next();
-        }
-
-        final Locus endLocus = new LocusImpl(Integer.MAX_VALUE, Integer.MAX_VALUE);
-        // if we have nothing to return to the user, and we're at the end of the SAM iterator,
-        // push everything into the complete queue
-        if (complete.isEmpty() && !samHasMore()) {
-            while (!accumulator.isEmpty()) {
-                populateCompleteQueue(endLocus);
-                if (!complete.isEmpty()) {
-                    return complete.remove(0);
-                }
-            }
-        }
-
-        // if there are completed entries, return those
-        if (!complete.isEmpty()) {
-            return complete.remove(0);
-        } else if (emitUncoveredLoci) {
-            final Locus afterLastMaskPositionLocus = new LocusImpl(referenceSequenceMask.getMaxSequenceIndex(),
-                    referenceSequenceMask.getMaxPosition() + 1);
-            // In this case... we're past the last read from SAM so see if we can
-            // fill out any more (zero coverage) entries from the mask
-            return createNextUncoveredLocusInfo(afterLastMaskPositionLocus);
-        } else {
-            return null;
-        }
-    }
-
-    /**
-     * @return true if we have surpassed the maximum accumulation threshold for the first locus in the accumulator, false otherwise
-     */
-    private boolean surpassedAccumulationThreshold() {
-        final boolean surpassesThreshold = !accumulator.isEmpty() && accumulator.get(0).recordAndOffsets.size() >= maxReadsToAccumulatePerLocus;
-        if (surpassesThreshold && !enforcedAccumulationLimit) {
-            LOG.warn("We have encountered greater than " + maxReadsToAccumulatePerLocus + " reads at position " + accumulator.get(0).toString() + " and will ignore the remaining reads at this position.  Note that further warnings will be suppressed.");
-            enforcedAccumulationLimit = true;
-        }
-        return surpassesThreshold;
-    }
-
-    /**
-     * Capture the loci covered by the given SAMRecord in the LocusInfos in the accumulator,
-     * creating new LocusInfos as needed.
-     */
-    private void accumulateSamRecord(final SAMRecord rec) {
-        final SAMSequenceRecord ref = getReferenceSequence(rec.getReferenceIndex());
-        final int alignmentStart  = rec.getAlignmentStart();
-        final int alignmentEnd    = rec.getAlignmentEnd();
-        final int alignmentLength = alignmentEnd - alignmentStart;
-
-        // Ensure there are LocusInfos up to and including this position
-        for (int i=accumulator.size(); i<=alignmentLength; ++i) {
-            accumulator.add(new LocusInfo(ref, alignmentStart + i));
-        }
-
-        final int minQuality = getQualityScoreCutoff();
-        final boolean dontCheckQualities = minQuality == 0;
-        final byte[] baseQualities = dontCheckQualities ? null : rec.getBaseQualities();
-
-        // interpret the CIGAR string and add the base info
-        for (final AlignmentBlock alignmentBlock : rec.getAlignmentBlocks()) {
-            final int readStart   = alignmentBlock.getReadStart();
-            final int refStart    = alignmentBlock.getReferenceStart();
-            final int blockLength = alignmentBlock.getLength();
-
-            for (int i=0; i<blockLength; ++i) {
-                // 0-based offset into the read of the current base
-                final int readOffset = readStart + i - 1;
-
-                // 0-based offset from the aligned position of the first base in the read to the aligned position of the current base.
-                final int refOffset =  refStart + i - alignmentStart;
-
-                // if the quality score cutoff is met, accumulate the base info
-                if (dontCheckQualities || baseQualities[readOffset] >= minQuality) {
-                    accumulator.get(refOffset).add(rec, readOffset);
-                }
-            }
-        }
-    }
-
-    /**
-     * Create the next relevant zero-coverage LocusInfo
-     *
-     * @param stopBeforeLocus don't go up to this sequence and position
-     * @return a zero-coverage LocusInfo, or null if there is none before the stopBefore locus
-     */
-    private LocusInfo createNextUncoveredLocusInfo(final Locus stopBeforeLocus) {
-        while (lastReferenceSequence <= stopBeforeLocus.getSequenceIndex() &&
-                lastReferenceSequence <= referenceSequenceMask.getMaxSequenceIndex()) {
-
-            if (lastReferenceSequence == stopBeforeLocus.getSequenceIndex() &&
-                    lastPosition + 1 >= stopBeforeLocus.getPosition()) {
-                return null;
-            }
-
-            final int nextbit = referenceSequenceMask.nextPosition(lastReferenceSequence, lastPosition);
-
-            // try the next reference sequence
-            if (nextbit == -1) {
-                // No more in this reference sequence
-                if (lastReferenceSequence == stopBeforeLocus.getSequenceIndex()) {
-                    lastPosition = stopBeforeLocus.getPosition();
-                    return null;
-                }
-                lastReferenceSequence++;
-                lastPosition = 0;
-            }
-            else if (lastReferenceSequence < stopBeforeLocus.getSequenceIndex() || nextbit < stopBeforeLocus.getPosition()) {
-                lastPosition = nextbit;
-                return new LocusInfo(getReferenceSequence(lastReferenceSequence), lastPosition);
-            }
-            else if (nextbit >= stopBeforeLocus.getPosition()) {
-                return null;
-            }
-        }
-
-        return null;
-    }
-
-    /**
-     * Pop the first entry from the LocusInfo accumulator into the complete queue.  In addition,
-     * check the ReferenceSequenceMask and if there are intervening mask positions between the last popped base and the one
-     * about to be popped, put those on the complete queue as well.
-     * Note that a single call to this method may not empty the accumulator completely, or even
-     * empty it at all, because it may just put a zero-coverage LocusInfo into the complete queue.
-     */
-    private void populateCompleteQueue(final Locus stopBeforeLocus) {
-        // Because of gapped alignments, it is possible to create LocusInfo's with no reads associated with them.
-        // Skip over these.
-        while (!accumulator.isEmpty() && accumulator.get(0).getRecordAndPositions().isEmpty() &&
-               locusComparator.compare(accumulator.get(0), stopBeforeLocus) < 0) {
-            accumulator.remove(0);
-        }
-        if (accumulator.isEmpty()) {
-            return;
-        }
-        final LocusInfo locusInfo = accumulator.get(0);
-        if (locusComparator.compare(stopBeforeLocus, locusInfo) <= 0) {
-            return;
-        }
-
-        // If necessary, emit a zero-coverage LocusInfo
-        if (emitUncoveredLoci) {
-            final LocusInfo zeroCoverage = createNextUncoveredLocusInfo(locusInfo);
-            if (zeroCoverage != null) {
-                complete.add(zeroCoverage);
-                return;
-            }
-        }
-
-        // At this point we know we're going to process the LocusInfo, so remove it from the accumulator.
-        accumulator.remove(0);
-
-        // fill in any gaps based on our genome mask
-        final int sequenceIndex = locusInfo.getSequenceIndex();
-
-
-        // only add to the complete queue if it's in the mask (or we have no mask!)
-        if (referenceSequenceMask.get(locusInfo.getSequenceIndex(), locusInfo.getPosition())) {
-            complete.add(locusInfo);
-        }
-
-        lastReferenceSequence = sequenceIndex;
-        lastPosition = locusInfo.getPosition();
-    }
-
-    private SAMSequenceRecord getReferenceSequence(final int referenceSequenceIndex) {
-        return samReader.getFileHeader().getSequence(referenceSequenceIndex);
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
-    }
-
-    // --------------------------------------------------------------------------------------------
-    // Helper methods below this point...
-    // --------------------------------------------------------------------------------------------
-
-    /**
-     * Controls which, if any, SAMRecords are filtered.  By default duplicate reads and non-primary alignments
-     * are filtered out.  The list of filters passed here replaces any existing filters.
-     *
-     * @param samFilters list of filters, or null if no filtering is desired.
-     */
-    public void setSamFilters(final List<SamRecordFilter> samFilters) {
-        this.samFilters = samFilters;
-    }
-
-    public int getQualityScoreCutoff() { return qualityScoreCutoff; }
-
-    public void setQualityScoreCutoff(final int qualityScoreCutoff) { this.qualityScoreCutoff = qualityScoreCutoff; }
-
-    public int getMappingQualityScoreCutoff() {
-        return mappingQualityScoreCutoff;
-    }
-
-    public void setMappingQualityScoreCutoff(final int mappingQualityScoreCutoff) { this.mappingQualityScoreCutoff = mappingQualityScoreCutoff; }
-
-    public boolean isIncludeNonPfReads() { return includeNonPfReads; }
-
-    public void setIncludeNonPfReads(final boolean includeNonPfReads) { this.includeNonPfReads = includeNonPfReads; }
-
-    public boolean isEmitUncoveredLoci() {
-        return emitUncoveredLoci;
-    }
-
-    public void setEmitUncoveredLoci(final boolean emitUncoveredLoci) {
-        this.emitUncoveredLoci = emitUncoveredLoci;
-    }
-
-    public int getMaxReadsToAccumulatePerLocus() {
-        return maxReadsToAccumulatePerLocus;
-    }
-
-    /**
-     * If set, this will cap the number of reads we accumulate for any given position.
-     * As is pointed out above, setting this could cause major bias because of the non-random nature with which the
-     * cap is applied (the first maxReadsToAccumulatePerLocus reads are kept and all subsequent ones are dropped).
-     */
-    public void setMaxReadsToAccumulatePerLocus(final int maxReadsToAccumulatePerLocus) { this.maxReadsToAccumulatePerLocus = maxReadsToAccumulatePerLocus; }
-}
-
diff --git a/src/java/htsjdk/samtools/util/SortingCollection.java b/src/java/htsjdk/samtools/util/SortingCollection.java
deleted file mode 100644
index 681f458..0000000
--- a/src/java/htsjdk/samtools/util/SortingCollection.java
+++ /dev/null
@@ -1,532 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import htsjdk.samtools.Defaults;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.lang.reflect.Array;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Comparator;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.TreeSet;
-
-/**
- * Collection to which many records can be added.  After all records are added, the collection can be
- * iterated, and the records will be returned in order defined by the comparator.  Records may be spilled
- * to a temporary directory if there are more records added than will fit in memory.  As a result of this,
- * the objects returned may not be identical to the objects added to the collection, but they should be
- * equal as determined by the codec used to write them to disk and read them back.
- *
- * When iterating over the collection, the number of file handles required is numRecordsInCollection/maxRecordsInRam.
- * If this becomes a limiting factor, a file handle cache could be added.
- *
- * If Snappy DLL is available and snappy.disable system property is not set to true, then Snappy is used
- * to compress temporary files.
- */
-public class SortingCollection<T> implements Iterable<T> {
-
-    /**
-     * Client must implement this class, which defines the way in which records are written to and
-     * read from file.
-     */
-    public interface Codec<T> extends Cloneable {
-        /**
-         * Where to write encoded output
-         * @param os
-         */
-        void setOutputStream(OutputStream os);
-
-        /**
-         * Where to read encoded input from
-         * @param is
-         */
-        void setInputStream(InputStream is);
-        /**
-         * Write object to output stream
-         * @param val what to write
-         */
-        void encode(T val);
-
-        /**
-         * Read the next record from the input stream and convert into a java object.
-         * @return null if no more records.  Should throw exception if EOF is encountered in the middle of
-         * a record.
-         */
-        T decode();
-
-        /**
-         * Must return a cloned copy of the codec that can be used independently of
-         * the original instance.  This is required so that multiple codecs can exist simultaneously
-         * that each is reading a separate file.
-         */
-        Codec<T> clone();
-    }
-
-    /** Directories where files of sorted records go. */
-    private final File[] tmpDirs;
-
-    /** The minimum amount of space free on a temp filesystem to write a file there. */
-    private final long TMP_SPACE_FREE = IOUtil.FIVE_GBS;
-
-    /**
-     * Used to write records to file, and used as a prototype to create codecs for reading.
-     */
-    private final SortingCollection.Codec<T> codec;
-
-    /**
-     * For sorting, both when spilling records to file, and merge sorting.
-     */
-    private final Comparator<T> comparator;
-    private final int maxRecordsInRam;
-    private int numRecordsInRam = 0;
-    private T[] ramRecords;
-    private boolean iterationStarted = false;
-    private boolean doneAdding = false;
-
-    /**
-     * Set to true when all temp files have been cleaned up
-     */
-    private boolean cleanedUp = false;
-
-    /**
-     * List of files in tmpDir containing sorted records
-     */
-    private final List<File> files = new ArrayList<File>();
-
-    private boolean destructiveIteration = true;
-
-    private TempStreamFactory tempStreamFactory = new TempStreamFactory();
-
-    /**
-     * Prepare to accumulate records to be sorted
-     * @param componentType Class of the record to be sorted.  Necessary because of Java generic lameness.
-     * @param codec For writing records to file and reading them back into RAM
-     * @param comparator Defines output sort order
-     * @param maxRecordsInRam how many records to accumulate before spilling to disk
-     * @param tmpDir Where to write files of records that will not fit in RAM
-     */
-    private SortingCollection(final Class<T> componentType, final SortingCollection.Codec<T> codec,
-                             final Comparator<T> comparator, final int maxRecordsInRam, final File... tmpDir) {
-        if (maxRecordsInRam <= 0) {
-            throw new IllegalArgumentException("maxRecordsInRam must be > 0");
-        }
-
-        if (tmpDir == null || tmpDir.length == 0) {
-            throw new IllegalArgumentException("At least one temp directory must be provided.");
-        }
-
-        this.tmpDirs = tmpDir;
-        this.codec = codec;
-        this.comparator = comparator;
-        this.maxRecordsInRam = maxRecordsInRam;
-        this.ramRecords = (T[])Array.newInstance(componentType, maxRecordsInRam);
-    }
-
-    public void add(final T rec) {
-        if (doneAdding) {
-            throw new IllegalStateException("Cannot add after calling doneAdding()");
-        }
-        if (iterationStarted) {
-            throw new IllegalStateException("Cannot add after calling iterator()");
-        }
-        if (numRecordsInRam == maxRecordsInRam) {
-            spillToDisk();
-        }
-        ramRecords[numRecordsInRam++] = rec;
-    }
-
-    /**
-     * This method can be called after caller is done adding to collection, in order to possibly free
-     * up memory.  If iterator() is called immediately after caller is done adding, this is not necessary,
-     * because iterator() triggers the same freeing.
-     */
-    public void doneAdding() {
-        if (this.cleanedUp) {
-            throw new IllegalStateException("Cannot call doneAdding() after cleanup() was called.");
-        }
-        if (doneAdding) {
-            return;
-        }
-
-        doneAdding = true;
-
-        if (this.files.isEmpty()) {
-            return;
-        }
-
-        if (this.numRecordsInRam > 0) {
-            spillToDisk();
-        }
-
-        // Facilitate GC
-        this.ramRecords = null;
-    }
-
-    /**
-     * @return True if this collection is allowed to discard data during iteration in order to reduce memory
-     * footprint, precluding a second iteration over the collection.
-     */
-    public boolean isDestructiveIteration() {
-        return destructiveIteration;
-    }
-
-    /**
-     * Tell this collection that it is allowed to discard data during iteration in order to reduce memory footprint,
-     * precluding a second iteration.  This is true by default.
-     */
-    public void setDestructiveIteration(boolean destructiveIteration) {
-        this.destructiveIteration = destructiveIteration;
-    }
-
-    /**
-     * Sort the records in memory, write them to a file, and clear the buffer of records in memory.
-     */
-    private void spillToDisk() {
-        try {
-            Arrays.sort(this.ramRecords, 0, this.numRecordsInRam, this.comparator);
-            final File f = newTempFile();
-            OutputStream os = null;
-            try {
-                os = tempStreamFactory.wrapTempOutputStream(new FileOutputStream(f), Defaults.BUFFER_SIZE);
-                this.codec.setOutputStream(os);
-                for (int i = 0; i < this.numRecordsInRam; ++i) {
-                    this.codec.encode(ramRecords[i]);
-                    // Facilitate GC
-                    this.ramRecords[i] = null;
-                }
-
-                os.flush();
-            } catch (RuntimeIOException ex) {
-                throw new RuntimeIOException("Problem writing temporary file " + f.getAbsolutePath() +
-                        ".  Try setting TMP_DIR to a file system with lots of space.", ex);
-            } finally {
-                if (os != null) {
-                    os.close();
-                }
-            }
-
-            this.numRecordsInRam = 0;
-            this.files.add(f);
-
-        }
-        catch (IOException e) {
-            throw new RuntimeIOException(e);
-        }
-    }
-
-    /**
-     * Creates a new tmp file on one of the available temp filesystems, registers it for deletion
-     * on JVM exit and then returns it.
-     */
-    private File newTempFile() throws IOException {
-        return IOUtil.newTempFile("sortingcollection.", ".tmp", this.tmpDirs, TMP_SPACE_FREE);
-    }
-
-    /**
-     * Prepare to iterate through the records in order.  This method may be called more than once,
-     * but add() may not be called after this method has been called.
-     */
-    public CloseableIterator<T> iterator() {
-        if (this.cleanedUp) {
-            throw new IllegalStateException("Cannot call iterator() after cleanup() was called.");
-        }
-        doneAdding();
-
-        this.iterationStarted = true;
-        if (this.files.isEmpty()) {
-            return new InMemoryIterator();
-        } else {
-            return new MergingIterator();
-        }
-    }
-
-    /**
-     * Delete any temporary files.  After this method is called, iterator() may not be called.
-     */
-    public void cleanup() {
-        this.iterationStarted = true;
-        this.cleanedUp = true;
-
-        IOUtil.deleteFiles(this.files);
-    }
-
-    /**
-     * Syntactic sugar around the ctor, to save some typing of type parameters
-     *
-     * @param componentType Class of the record to be sorted.  Necessary because of Java generic lameness.
-     * @param codec For writing records to file and reading them back into RAM
-     * @param comparator Defines output sort order
-     * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk
-     * @param tmpDir Where to write files of records that will not fit in RAM
-     */
-    public static <T> SortingCollection<T> newInstance(final Class<T> componentType,
-                                                       final SortingCollection.Codec<T> codec,
-                                                       final Comparator<T> comparator,
-                                                       final int maxRecordsInRAM,
-                                                       final File... tmpDir) {
-        return new SortingCollection<T>(componentType, codec, comparator, maxRecordsInRAM, tmpDir);
-
-    }
-
-    /**
-     * Syntactic sugar around the ctor, to save some typing of type parameters
-     *
-     * @param componentType Class of the record to be sorted.  Necessary because of Java generic lameness.
-     * @param codec For writing records to file and reading them back into RAM
-     * @param comparator Defines output sort order
-     * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk
-     * @param tmpDirs Where to write files of records that will not fit in RAM
-     */
-    public static <T> SortingCollection<T> newInstance(final Class<T> componentType,
-                                                       final SortingCollection.Codec<T> codec,
-                                                       final Comparator<T> comparator,
-                                                       final int maxRecordsInRAM,
-                                                       final Collection<File> tmpDirs) {
-        return new SortingCollection<T>(componentType,
-                                        codec,
-                                        comparator,
-                                        maxRecordsInRAM,
-                                        tmpDirs.toArray(new File[tmpDirs.size()]));
-
-    }
-
-
-    /**
-     * Syntactic sugar around the ctor, to save some typing of type parameters.  Writes files to java.io.tmpdir
-     *
-     * @param componentType Class of the record to be sorted.  Necessary because of Java generic lameness.
-     * @param codec For writing records to file and reading them back into RAM
-     * @param comparator Defines output sort order
-     * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk
-     */
-    public static <T> SortingCollection<T> newInstance(final Class<T> componentType,
-                                                       final SortingCollection.Codec<T> codec,
-                                                       final Comparator<T> comparator,
-                                                       final int maxRecordsInRAM) {
-
-        final File tmpDir = new File(System.getProperty("java.io.tmpdir"));
-        return new SortingCollection<T>(componentType, codec, comparator, maxRecordsInRAM, tmpDir);
-    }
-
-    /**
-     * For iteration when number of records added is less than the threshold for spilling to disk.
-     */
-    class InMemoryIterator implements CloseableIterator<T> {
-        private int iterationIndex = 0;
-
-        InMemoryIterator() {
-            Arrays.sort(SortingCollection.this.ramRecords,
-                        0,
-                        SortingCollection.this.numRecordsInRam,
-                        SortingCollection.this.comparator);
-        }
-
-        public void close() {
-            // nothing to do
-        }
-
-        public boolean hasNext() {
-            return this.iterationIndex < SortingCollection.this.numRecordsInRam;
-        }
-
-        public T next() {
-            if (!hasNext()) {
-                throw new NoSuchElementException();
-            }
-            T ret = SortingCollection.this.ramRecords[iterationIndex];
-            if (destructiveIteration) SortingCollection.this.ramRecords[iterationIndex] = null;
-            ++iterationIndex;
-            return ret;
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException();
-        }
-    }
-
-    /**
-     * For iteration when spilling to disk has occurred.
-     * Each file is has records in sort order within the file.
-     * This iterator automatically closes when it iterates to the end, but if not iterating
-     * to the end it is a good idea to call close().
-     *
-     * Algorithm: MergingIterator maintains a PriorityQueue of PeekFileRecordIterators.
-     * Each PeekFileRecordIterator iterates through a file in which the records are sorted.
-     * The comparator for PeekFileRecordIterator used by the PriorityQueue peeks at the next record from
-     * the file, so the first element in the PriorityQueue is the file that has the next record to be emitted.
-     * In order to get the next record, the first PeekFileRecordIterator in the PriorityQueue is popped,
-     * the record is obtained from that iterator, and then if that iterator is not empty, it is pushed back into
-     * the PriorityQueue.  Because it now has a different record as its next element, it may go into another
-     * location in the PriorityQueue
-     */
-    class MergingIterator implements CloseableIterator<T> {
-        private final PollableTreeSet<PeekFileRecordIterator> queue;
-
-        MergingIterator() {
-            this.queue = new PollableTreeSet<PeekFileRecordIterator>(new PeekFileRecordIteratorComparator());
-            int n = 0;
-            for (final File f : SortingCollection.this.files) {
-                final FileRecordIterator it = new FileRecordIterator(f);
-                if (it.hasNext()) {
-                    this.queue.add(new PeekFileRecordIterator(it, n++));
-                }
-                else {
-                    it.close();
-                }
-            }
-        }
-
-        public boolean hasNext() {
-            return !this.queue.isEmpty();
-        }
-
-        public T next() {
-            if (!hasNext()) {
-                throw new NoSuchElementException();
-            }
-
-            final PeekFileRecordIterator fileIterator = queue.poll();
-            final T ret = fileIterator.next();
-            if (fileIterator.hasNext()) {
-                this.queue.add(fileIterator);
-            }
-            else {
-                ((CloseableIterator<T>)fileIterator.getUnderlyingIterator()).close();
-            }
-
-            return ret;
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException();
-        }
-
-        public void close() {
-            while (!this.queue.isEmpty()) {
-                final PeekFileRecordIterator it = this.queue.poll();
-                ((CloseableIterator<T>)it.getUnderlyingIterator()).close();
-            }
-        }
-    }
-
-    /**
-     * Read a file of records in format defined by the codec
-     */
-    class FileRecordIterator implements CloseableIterator<T> {
-        private final File file;
-        private final FileInputStream is;
-        private final Codec<T> codec;
-        private T currentRecord = null;
-
-        FileRecordIterator(final File file) {
-            this.file = file;
-            try {
-                this.is = new FileInputStream(file);
-                this.codec = SortingCollection.this.codec.clone();
-                this.codec.setInputStream(tempStreamFactory.wrapTempInputStream(this.is, Defaults.BUFFER_SIZE));
-                advance();
-            }
-            catch (FileNotFoundException e) {
-                throw new RuntimeIOException(e);
-            }
-        }
-
-        public boolean hasNext() {
-            return this.currentRecord != null;
-        }
-
-        public T next() {
-            if (!hasNext()) {
-                throw new NoSuchElementException();
-            }
-            final T ret = this.currentRecord;
-            advance();
-            return ret;
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException();
-        }
-
-        private void advance() {
-            this.currentRecord = this.codec.decode();
-        }
-
-        public void close() {
-            CloserUtil.close(this.is);
-        }
-    }
-
-
-    /**
-     * Just a typedef
-     */
-    class PeekFileRecordIterator extends PeekIterator<T> {
-        final int n; // A serial number used for tie-breaking in the sort
-        PeekFileRecordIterator(final Iterator<T> underlyingIterator, final int n) {
-            super(underlyingIterator);
-            this.n = n;
-        }
-    }
-
-    class PeekFileRecordIteratorComparator implements Comparator<PeekFileRecordIterator>, Serializable {
-        private static final long serialVersionUID = 1L;
-
-        public int compare(final PeekFileRecordIterator lhs, final PeekFileRecordIterator rhs) {
-            final int result = comparator.compare(lhs.peek(), rhs.peek());
-            if (result == 0) return lhs.n - rhs.n;
-            else return result;
-        }
-    }
-
-    /** Little class that provides the Java 1.5 TreeSet with a poll() method */
-    static class PollableTreeSet<T> extends TreeSet<T> {
-        PollableTreeSet(final Comparator<? super T> comparator) {
-            super(comparator);
-        }
-
-        public T poll() {
-            if (isEmpty()) {
-                return null;
-            }
-            else {
-                final T t = first();
-                remove(t);
-                return t;
-            }
-        }
-    }
-}
diff --git a/src/java/htsjdk/samtools/util/zip/DeflaterFactory.java b/src/java/htsjdk/samtools/util/zip/DeflaterFactory.java
deleted file mode 100644
index 64bb8aa..0000000
--- a/src/java/htsjdk/samtools/util/zip/DeflaterFactory.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util.zip;
-
-import htsjdk.samtools.Defaults;
-import htsjdk.samtools.SAMException;
-
-import java.lang.reflect.Constructor;
-import java.util.zip.Deflater;
-
-/**
- * Create zlib-based Deflater if JNI library and other require libraries are available, otherwise create standard
- * JDK Deflater.
- * Java 7 has its own Deflater implementation (libzip.so).  This is almost as fast as a zlib-based Deflater, so in general
- * there isn't a compelling reason to use zlib.  However, Intel has created a hardware-assisted zlib implementation
- * as part of their IPP (Integrated Performance Primitives) package that can run significantly faster on some Intel
- * hardware.  We have seen compression times reduced by 13% to 33% depending on particular hardware, and hope that
- * newer Intel processors will be even better.
- *
- * Note that this class will no longer be necessary once Java 8 is required, because JDK 8 will use zlib instead
- * of libzip implementation.
- */
-public class DeflaterFactory {
-
-    private static Constructor<IntelDeflater> intelDeflaterConstructor;
-
-    static {
-        try {
-            if (Defaults.TRY_USE_INTEL_DEFLATER) {
-                final Class<IntelDeflater> clazz = (Class<IntelDeflater>) Class.forName("htsjdk.samtools.util.zip.IntelDeflater");
-                intelDeflaterConstructor = clazz.getConstructor(Integer.TYPE, Boolean.TYPE);
-            }
-        } catch (ClassNotFoundException e) {
-            intelDeflaterConstructor = null;
-        } catch (NoSuchMethodException e) {
-            intelDeflaterConstructor = null;
-        } catch (UnsatisfiedLinkError e) {
-            intelDeflaterConstructor = null;
-        }
-    }
-
-    public static Deflater makeDeflater(final int compressionLevel, final boolean nowrap) {
-        if (intelDeflaterConstructor != null) {
-            try {
-                return intelDeflaterConstructor.newInstance(compressionLevel, nowrap);
-            } catch (Exception e) {
-                throw new SAMException("Exception constructing IntelDeflater", e);
-            }
-        } else {
-            return new Deflater(compressionLevel, nowrap);
-        }
-    }
-
-    public static boolean usingIntelDeflater() {
-        return intelDeflaterConstructor != null;
-    }
-}
diff --git a/src/java/htsjdk/samtools/util/zip/IntelDeflater.java b/src/java/htsjdk/samtools/util/zip/IntelDeflater.java
deleted file mode 100644
index 30cb698..0000000
--- a/src/java/htsjdk/samtools/util/zip/IntelDeflater.java
+++ /dev/null
@@ -1,567 +0,0 @@
-/*
- * Copyright (c) 1996, 2010, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.  Oracle designates this
- * particular file as subject to the "Classpath" exception as provided
- * by Oracle in the LICENSE file that accompanied this code.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package htsjdk.samtools.util.zip;
-
-import htsjdk.samtools.Defaults;
-
-import java.io.File;
-import java.net.URL;
-import java.util.zip.Deflater;
-
-/**
- * This is a copy of java.util.zip.Deflater from OpenJDK 7, with the following changes:
- * - package and class name changed
- * - static block to load libIntelDeflater library
- * - extends java.util.zip.Deflater so that IntelDeflater object can be used as regular Deflater object.
- *   Note however that all methods of Deflater are overridden.
- *
- * The shared library is found via one of the following mechanisms:
- * 1. if samjdk.intel_deflater_so_path system property is set, this is assumed to be the path of libIntelDeflater.so
- * 2. If system property is not set, directory where the jarfile that this class came from lives is tried as location
- *    of libIntelDeflater.so
- * 3. If either of the above fails to find the library, regular LD_LIBRARY_PATH is used to find the library.
- * 4. If that doesn't work, class fails to load and code falls back to regular Java Deflater class.
- *
- *
- * The rest of this document is copied verbatim from the original OpenJDK file.
- *
- * This class provides support for general purpose compression using the
- * popular ZLIB compression library. The ZLIB compression library was
- * initially developed as part of the PNG graphics standard and is not
- * protected by patents. It is fully described in the specifications at
- * the <a href="package-summary.html#package_description">java.util.zip
- * package description</a>.
- *
- * <p>The following code fragment demonstrates a trivial compression
- * and decompression of a string using <tt>IntelDeflater</tt> and
- * <tt>Inflater</tt>.
- *
- * <blockquote><pre>
- * try {
- *     // Encode a String into bytes
- *     String inputString = "blahblahblah";
- *     byte[] input = inputString.getBytes("UTF-8");
- *
- *     // Compress the bytes
- *     byte[] output = new byte[100];
- *     IntelDeflater compresser = new IntelDeflater();
- *     compresser.setInput(input);
- *     compresser.finish();
- *     int compressedDataLength = compresser.deflate(output);
- *     compresser.end();
- *
- *     // Decompress the bytes
- *     Inflater decompresser = new Inflater();
- *     decompresser.setInput(output, 0, compressedDataLength);
- *     byte[] result = new byte[100];
- *     int resultLength = decompresser.inflate(result);
- *     decompresser.end();
- *
- *     // Decode the bytes into a String
- *     String outputString = new String(result, 0, resultLength, "UTF-8");
- * } catch(java.io.UnsupportedEncodingException ex) {
- *     // handle
- * } catch (java.util.zip.DataFormatException ex) {
- *     // handle
- * }
- * </pre></blockquote>
- *
- * @see         java.util.zip.Inflater
- * @author      David Connelly
- */
-public
-class IntelDeflater extends Deflater {
-
-    private final ZStreamRef zsRef;
-    private byte[] buf = new byte[0];
-    private int off, len;
-    private int level, strategy;
-    private boolean setParams;
-    private boolean finish, finished;
-
-    /**
-     * Compression flush mode used to achieve best compression result.
-     *
-     * @see IntelDeflater#deflate(byte[], int, int, int)
-     * @since 1.7
-     */
-    public static final int NO_FLUSH = 0;
-
-    /**
-     * Compression flush mode used to flush out all pending output; may
-     * degrade compression for some compression algorithms.
-     *
-     * @see IntelDeflater#deflate(byte[], int, int, int)
-     * @since 1.7
-     */
-    public static final int SYNC_FLUSH = 2;
-
-    /**
-     * Compression flush mode used to flush out all pending output and
-     * reset the deflater. Using this mode too often can seriously degrade
-     * compression.
-     *
-     * @see IntelDeflater#deflate(byte[], int, int, int)
-     * @since 1.7
-     */
-    public static final int FULL_FLUSH = 3;
-
-    static {
-        try {
-            final File sharedLibrary;
-            if (Defaults.INTEL_DEFLATER_SHARED_LIBRARY_PATH != null) {
-                // Load via path set by -Dsamjdk.intel_deflater_so_path=<path>
-                sharedLibrary = new File(Defaults.INTEL_DEFLATER_SHARED_LIBRARY_PATH);
-            } else {
-                // Look in directory containing this class for the library
-                URL jarUrl = IntelDeflater.class.getProtectionDomain().getCodeSource().getLocation();
-                sharedLibrary = new File(new File(jarUrl.getPath()).getParentFile(), "libIntelDeflater.so");
-            }
-            System.load(sharedLibrary.getAbsolutePath());
-        } catch (Throwable e) {
-            // Possible exceptions:
-            // System.load: UnsatisfiedLinkError
-            // getProtectionDomain: SecurityException
-            // NullPointerException due to getCodeSource returning null
-
-            // Try to find via LD_LIBRARY_PATH
-            System.loadLibrary("IntelDeflater");
-        }
-        initIDs();
-    }
-
-    /**
-     * Creates a new compressor using the specified compression level.
-     * If 'nowrap' is true then the ZLIB header and checksum fields will
-     * not be used in order to support the compression format used in
-     * both GZIP and PKZIP.
-     * @param level the compression level (0-9)
-     * @param nowrap if true then use GZIP compatible compression
-     */
-    public IntelDeflater(int level, boolean nowrap) {
-        this.level = level;
-        this.strategy = DEFAULT_STRATEGY;
-        this.zsRef = new ZStreamRef(init(level, DEFAULT_STRATEGY, nowrap));
-    }
-
-    /**
-     * Creates a new compressor using the specified compression level.
-     * Compressed data will be generated in ZLIB format.
-     * @param level the compression level (0-9)
-     */
-    public IntelDeflater(int level) {
-        this(level, false);
-    }
-
-    /**
-     * Creates a new compressor with the default compression level.
-     * Compressed data will be generated in ZLIB format.
-     */
-    public IntelDeflater() {
-        this(DEFAULT_COMPRESSION, false);
-    }
-
-    /**
-     * Sets input data for compression. This should be called whenever
-     * needsInput() returns true indicating that more input data is required.
-     * @param b the input data bytes
-     * @param off the start offset of the data
-     * @param len the length of the data
-     * @see IntelDeflater#needsInput
-     */
-    @Override
-    public void setInput(byte[] b, int off, int len) {
-        if (b== null) {
-            throw new NullPointerException();
-        }
-        if (off < 0 || len < 0 || off > b.length - len) {
-            throw new ArrayIndexOutOfBoundsException();
-        }
-        synchronized (zsRef) {
-            this.buf = b;
-            this.off = off;
-            this.len = len;
-        }
-    }
-
-    /**
-     * Sets input data for compression. This should be called whenever
-     * needsInput() returns true indicating that more input data is required.
-     * @param b the input data bytes
-     * @see IntelDeflater#needsInput
-     */
-    @Override
-    public void setInput(byte[] b) {
-        setInput(b, 0, b.length);
-    }
-
-    /**
-     * Sets preset dictionary for compression. A preset dictionary is used
-     * when the history buffer can be predetermined. When the data is later
-     * uncompressed with Inflater.inflate(), Inflater.getAdler() can be called
-     * in order to get the Adler-32 value of the dictionary required for
-     * decompression.
-     * @param b the dictionary data bytes
-     * @param off the start offset of the data
-     * @param len the length of the data
-     * @see java.util.zip.Inflater#inflate
-     * @see java.util.zip.Inflater#getAdler
-     */
-    @Override
-    public void setDictionary(byte[] b, int off, int len) {
-        if (b == null) {
-            throw new NullPointerException();
-        }
-        if (off < 0 || len < 0 || off > b.length - len) {
-            throw new ArrayIndexOutOfBoundsException();
-        }
-        synchronized (zsRef) {
-            ensureOpen();
-            setDictionary(zsRef.address(), b, off, len);
-        }
-    }
-
-    /**
-     * Sets preset dictionary for compression. A preset dictionary is used
-     * when the history buffer can be predetermined. When the data is later
-     * uncompressed with Inflater.inflate(), Inflater.getAdler() can be called
-     * in order to get the Adler-32 value of the dictionary required for
-     * decompression.
-     * @param b the dictionary data bytes
-     * @see java.util.zip.Inflater#inflate
-     * @see java.util.zip.Inflater#getAdler
-     */
-    @Override
-    public void setDictionary(byte[] b) {
-        setDictionary(b, 0, b.length);
-    }
-
-    /**
-     * Sets the compression strategy to the specified value.
-     * @param strategy the new compression strategy
-     * @exception IllegalArgumentException if the compression strategy is
-     *                                     invalid
-     */
-    @Override
-    public void setStrategy(int strategy) {
-        switch (strategy) {
-            case DEFAULT_STRATEGY:
-            case FILTERED:
-            case HUFFMAN_ONLY:
-                break;
-            default:
-                throw new IllegalArgumentException();
-        }
-        synchronized (zsRef) {
-            if (this.strategy != strategy) {
-                this.strategy = strategy;
-                setParams = true;
-            }
-        }
-    }
-
-    /**
-     * Sets the current compression level to the specified value.
-     * @param level the new compression level (0-9)
-     * @exception IllegalArgumentException if the compression level is invalid
-     */
-    @Override
-    public void setLevel(int level) {
-        if ((level < 0 || level > 9) && level != DEFAULT_COMPRESSION) {
-            throw new IllegalArgumentException("invalid compression level");
-        }
-        synchronized (zsRef) {
-            if (this.level != level) {
-                this.level = level;
-                setParams = true;
-            }
-        }
-    }
-
-    /**
-     * Returns true if the input data buffer is empty and setInput()
-     * should be called in order to provide more input.
-     * @return true if the input data buffer is empty and setInput()
-     * should be called in order to provide more input
-     */
-    @Override
-    public boolean needsInput() {
-        return len <= 0;
-    }
-
-    /**
-     * When called, indicates that compression should end with the current
-     * contents of the input buffer.
-     */
-    @Override
-    public void finish() {
-        synchronized (zsRef) {
-            finish = true;
-        }
-    }
-
-    /**
-     * Returns true if the end of the compressed data output stream has
-     * been reached.
-     * @return true if the end of the compressed data output stream has
-     * been reached
-     */
-    @Override
-    public boolean finished() {
-        synchronized (zsRef) {
-            return finished;
-        }
-    }
-
-    /**
-     * Compresses the input data and fills specified buffer with compressed
-     * data. Returns actual number of bytes of compressed data. A return value
-     * of 0 indicates that {@link #needsInput() needsInput} should be called
-     * in order to determine if more input data is required.
-     *
-     * <p>This method uses {@link #NO_FLUSH} as its compression flush mode.
-     * An invocation of this method of the form {@code deflater.deflate(b, off, len)}
-     * yields the same result as the invocation of
-     * {@code deflater.deflate(b, off, len, IntelDeflater.NO_FLUSH)}.
-     *
-     * @param b the buffer for the compressed data
-     * @param off the start offset of the data
-     * @param len the maximum number of bytes of compressed data
-     * @return the actual number of bytes of compressed data written to the
-     *         output buffer
-     */
-    @Override
-    public int deflate(byte[] b, int off, int len) {
-        return deflate(b, off, len, NO_FLUSH);
-    }
-
-    /**
-     * Compresses the input data and fills specified buffer with compressed
-     * data. Returns actual number of bytes of compressed data. A return value
-     * of 0 indicates that {@link #needsInput() needsInput} should be called
-     * in order to determine if more input data is required.
-     *
-     * <p>This method uses {@link #NO_FLUSH} as its compression flush mode.
-     * An invocation of this method of the form {@code deflater.deflate(b)}
-     * yields the same result as the invocation of
-     * {@code deflater.deflate(b, 0, b.length, IntelDeflater.NO_FLUSH)}.
-     *
-     * @param b the buffer for the compressed data
-     * @return the actual number of bytes of compressed data written to the
-     *         output buffer
-     */
-    @Override
-    public int deflate(byte[] b) {
-        return deflate(b, 0, b.length, NO_FLUSH);
-    }
-
-    /**
-     * Compresses the input data and fills the specified buffer with compressed
-     * data. Returns actual number of bytes of data compressed.
-     *
-     * <p>Compression flush mode is one of the following three modes:
-     *
-     * <ul>
-     * <li>{@link #NO_FLUSH}: allows the deflater to decide how much data
-     * to accumulate, before producing output, in order to achieve the best
-     * compression (should be used in normal use scenario). A return value
-     * of 0 in this flush mode indicates that {@link #needsInput()} should
-     * be called in order to determine if more input data is required.
-     *
-     * <li>{@link #SYNC_FLUSH}: all pending output in the deflater is flushed,
-     * to the specified output buffer, so that an inflater that works on
-     * compressed data can get all input data available so far (In particular
-     * the {@link #needsInput()} returns {@code true} after this invocation
-     * if enough output space is provided). Flushing with {@link #SYNC_FLUSH}
-     * may degrade compression for some compression algorithms and so it
-     * should be used only when necessary.
-     *
-     * <li>{@link #FULL_FLUSH}: all pending output is flushed out as with
-     * {@link #SYNC_FLUSH}. The compression state is reset so that the inflater
-     * that works on the compressed output data can restart from this point
-     * if previous compressed data has been damaged or if random access is
-     * desired. Using {@link #FULL_FLUSH} too often can seriously degrade
-     * compression.
-     * </ul>
-     *
-     * <p>In the case of {@link #FULL_FLUSH} or {@link #SYNC_FLUSH}, if
-     * the return value is {@code len}, the space available in output
-     * buffer {@code b}, this method should be invoked again with the same
-     * {@code flush} parameter and more output space.
-     *
-     * @param b the buffer for the compressed data
-     * @param off the start offset of the data
-     * @param len the maximum number of bytes of compressed data
-     * @param flush the compression flush mode
-     * @return the actual number of bytes of compressed data written to
-     *         the output buffer
-     *
-     * @throws IllegalArgumentException if the flush mode is invalid
-     * @since 1.7
-     */
-    public int deflate(byte[] b, int off, int len, int flush) {
-        //System.out.println("Inside IntelDeflater\n");
-        if (b == null) {
-            throw new NullPointerException();
-        }
-        if (off < 0 || len < 0 || off > b.length - len) {
-            throw new ArrayIndexOutOfBoundsException();
-        }
-        synchronized (zsRef) {
-            ensureOpen();
-            if (flush == NO_FLUSH || flush == SYNC_FLUSH ||
-                    flush == FULL_FLUSH)
-                return deflateBytes(zsRef.address(), b, off, len, flush);
-            throw new IllegalArgumentException();
-        }
-    }
-
-    /**
-     * Returns the ADLER-32 value of the uncompressed data.
-     * @return the ADLER-32 value of the uncompressed data
-     */
-    @Override
-    public int getAdler() {
-        synchronized (zsRef) {
-            ensureOpen();
-            return getAdler(zsRef.address());
-        }
-    }
-
-    /**
-     * Returns the total number of uncompressed bytes input so far.
-     *
-     * <p>Since the number of bytes may be greater than
-     * Integer.MAX_VALUE, the {@link #getBytesRead()} method is now
-     * the preferred means of obtaining this information.</p>
-     *
-     * @return the total number of uncompressed bytes input so far
-     */
-    @Override
-    public int getTotalIn() {
-        return (int) getBytesRead();
-    }
-
-    /**
-     * Returns the total number of uncompressed bytes input so far.</p>
-     *
-     * @return the total (non-negative) number of uncompressed bytes input so far
-     * @since 1.5
-     */
-    @Override
-    public long getBytesRead() {
-        synchronized (zsRef) {
-            ensureOpen();
-            return getBytesRead(zsRef.address());
-        }
-    }
-
-    /**
-     * Returns the total number of compressed bytes output so far.
-     *
-     * <p>Since the number of bytes may be greater than
-     * Integer.MAX_VALUE, the {@link #getBytesWritten()} method is now
-     * the preferred means of obtaining this information.</p>
-     *
-     * @return the total number of compressed bytes output so far
-     */
-    @Override
-    public int getTotalOut() {
-        return (int) getBytesWritten();
-    }
-
-    /**
-     * Returns the total number of compressed bytes output so far.</p>
-     *
-     * @return the total (non-negative) number of compressed bytes output so far
-     * @since 1.5
-     */
-    @Override
-    public long getBytesWritten() {
-        synchronized (zsRef) {
-            ensureOpen();
-            return getBytesWritten(zsRef.address());
-        }
-    }
-
-    /**
-     * Resets deflater so that a new set of input data can be processed.
-     * Keeps current compression level and strategy settings.
-     */
-    @Override
-    public void reset() {
-        synchronized (zsRef) {
-            ensureOpen();
-            reset(zsRef.address());
-            finish = false;
-            finished = false;
-            off = len = 0;
-        }
-    }
-
-    /**
-     * Closes the compressor and discards any unprocessed input.
-     * This method should be called when the compressor is no longer
-     * being used, but will also be called automatically by the
-     * finalize() method. Once this method is called, the behavior
-     * of the IntelDeflater object is undefined.
-     */
-    @Override
-    public void end() {
-        synchronized (zsRef) {
-            long addr = zsRef.address();
-            zsRef.clear();
-            if (addr != 0) {
-                end(addr);
-                buf = null;
-            }
-        }
-    }
-
-    /**
-     * Closes the compressor when garbage is collected.
-     */
-    protected void finalize() {
-        end();
-    }
-
-    private void ensureOpen() {
-        assert Thread.holdsLock(zsRef);
-        if (zsRef.address() == 0)
-            throw new NullPointerException("IntelDeflater has been closed");
-    }
-
-    private static native void initIDs();
-    private native static long init(int level, int strategy, boolean nowrap);
-    private native static void setDictionary(long addr, byte[] b, int off, int len);
-    private native int deflateBytes(long addr, byte[] b, int off, int len,
-                                    int flush);
-    private native static int getAdler(long addr);
-    private native static long getBytesRead(long addr);
-    private native static long getBytesWritten(long addr);
-    private native static void reset(long addr);
-    private native static void end(long addr);
-}
-
diff --git a/src/java/htsjdk/samtools/util/zip/ZStreamRef.java b/src/java/htsjdk/samtools/util/zip/ZStreamRef.java
deleted file mode 100644
index 8fa5f78..0000000
--- a/src/java/htsjdk/samtools/util/zip/ZStreamRef.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2009, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.  Oracle designates this
- * particular file as subject to the "Classpath" exception as provided
- * by Oracle in the LICENSE file that accompanied this code.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package htsjdk.samtools.util.zip;
-
-/**
- * Copied from OpenJDK 7.  Only change is package name, because IntelDeflater needs this and
- * the original class is private to java.util.zip package.
- *
- * A reference to the native zlib's z_stream structure.
- */
-
-class ZStreamRef {
-
-    private long address;
-    ZStreamRef (long address) {
-        this.address = address;
-    }
-
-    long address() {
-        return address;
-    }
-
-    void clear() {
-        address = 0;
-    }
-}
diff --git a/src/java/htsjdk/tribble/AsciiFeatureCodec.java b/src/java/htsjdk/tribble/AsciiFeatureCodec.java
deleted file mode 100644
index 4f123bc..0000000
--- a/src/java/htsjdk/tribble/AsciiFeatureCodec.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (c) 2007-2010 by The Broad Institute, Inc. and the Massachusetts Institute of Technology.
- * All Rights Reserved.
- *
- * This software is licensed under the terms of the GNU Lesser General Public License (LGPL), Version 2.1 which
- * is available at http://www.opensource.org/licenses/lgpl-2.1.php.
- *
- * THE SOFTWARE IS PROVIDED "AS IS." THE BROAD AND MIT MAKE NO REPRESENTATIONS OR WARRANTIES OF
- * ANY KIND CONCERNING THE SOFTWARE, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES
- * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT
- * OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE.  IN NO EVENT SHALL THE BROAD OR MIT, OR THEIR
- * RESPECTIVE TRUSTEES, DIRECTORS, OFFICERS, EMPLOYEES, AND AFFILIATES BE LIABLE FOR ANY DAMAGES OF
- * ANY KIND, INCLUDING, WITHOUT LIMITATION, INCIDENTAL OR CONSEQUENTIAL DAMAGES, ECONOMIC
- * DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER THE BROAD OR MIT SHALL
- * BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE
- * FOREGOING.
- */
-
-package htsjdk.tribble;
-
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.LocationAware;
-import htsjdk.tribble.readers.AsciiLineReader;
-import htsjdk.tribble.readers.AsciiLineReaderIterator;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.readers.LineIteratorImpl;
-import htsjdk.tribble.readers.LineReaderUtil;
-import htsjdk.tribble.readers.PositionalBufferedStream;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * A convenience base class for codecs that want to read in features from ASCII lines.
- * <p/>
- * This class overrides the general decode locs for streams and presents instead
- * Strings to decode(String) and readHeader(LineReader) functions.
- *
- * @param <T> The feature type this codec reads
- */
-public abstract class AsciiFeatureCodec<T extends Feature> extends AbstractFeatureCodec<T, LineIterator> {
-    protected AsciiFeatureCodec(final Class<T> myClass) {
-        super(myClass);
-    }
-    
-    @Override
-    public void close(final LineIterator lineIterator) {
-        CloserUtil.close(lineIterator);
-    }
-
-    @Override
-    public boolean isDone(final LineIterator lineIterator) {
-        return !lineIterator.hasNext();
-    }
-
-    @Override
-    public LocationAware makeIndexableSourceFromStream(final InputStream bufferedInputStream) {
-        final PositionalBufferedStream pbs;
-        if (bufferedInputStream instanceof PositionalBufferedStream) {
-            pbs = (PositionalBufferedStream) bufferedInputStream;
-        } else {
-            pbs = new PositionalBufferedStream(bufferedInputStream);
-        }
-        return new AsciiLineReaderIterator(new AsciiLineReader(pbs));
-    }
-
-    @Override
-    public LineIterator makeSourceFromStream(final InputStream bufferedInputStream) {
-        return new LineIteratorImpl(LineReaderUtil.fromBufferedStream(bufferedInputStream));
-    }
-
-    /** 
-     * Convenience method.  Decoding in ASCII files operates line-by-line, so obviate the need to call 
-     * {@link htsjdk.tribble.readers.LineIterator#next()} in implementing classes and, instead, have them implement
-     * {@link AsciiFeatureCodec#decode(String)}.
-     */
-    @Override
-    public T decode(final LineIterator lineIterator) {
-        return decode(lineIterator.next());
-    }
-
-    /** @see AsciiFeatureCodec#decode(htsjdk.tribble.readers.LineIterator) */
-    public abstract T decode(String s);
-
-    @Override
-    public FeatureCodecHeader readHeader(final LineIterator lineIterator) throws IOException {
-        // TODO: Track header end here, rather than assuming there isn't one.
-        return new FeatureCodecHeader(readActualHeader(lineIterator), FeatureCodecHeader.NO_HEADER_END);
-    }
-
-    /**
-     * Read and return the header, or null if there is no header.
-     *
-     * @return the actual header data in the file, or null if none is available
-     */
-    abstract public Object readActualHeader(final LineIterator reader);
-}
diff --git a/src/java/htsjdk/tribble/TabixFeatureReader.java b/src/java/htsjdk/tribble/TabixFeatureReader.java
deleted file mode 100644
index af76a6a..0000000
--- a/src/java/htsjdk/tribble/TabixFeatureReader.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.tribble;
-
-import htsjdk.samtools.util.BlockCompressedInputStream;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.tribble.readers.LineReader;
-import htsjdk.tribble.readers.LineReaderUtil;
-import htsjdk.tribble.readers.PositionalBufferedStream;
-import htsjdk.tribble.readers.TabixIteratorLineReader;
-import htsjdk.tribble.readers.TabixReader;
-import htsjdk.tribble.util.ParsingUtils;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * @author Jim Robinson
- * @since 2/11/12
- */
-public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatureReader<T, SOURCE> {
-
-    TabixReader tabixReader;
-    List<String> sequenceNames;
-
-    /**
-     *
-     * @param featureFile - path to a feature file. Can be a local file, http url, or ftp url
-     * @param codec
-     * @throws IOException
-     */
-    public TabixFeatureReader(final String featureFile, final AsciiFeatureCodec codec) throws IOException {
-        super(featureFile, codec);
-        tabixReader = new TabixReader(featureFile);
-        sequenceNames = new ArrayList<String>(tabixReader.getChromosomes());
-        readHeader();
-    }
-
-    /**
-     *
-     * @param featureFile - path to a feature file. Can be a local file, http url, or ftp url
-     * @param indexFile - path to the index file.
-     * @param codec
-     * @throws IOException
-     */
-    public TabixFeatureReader(final String featureFile, final String indexFile, final AsciiFeatureCodec codec) throws IOException {
-        super(featureFile, codec);
-        tabixReader = new TabixReader(featureFile, indexFile);
-        sequenceNames = new ArrayList<String>(tabixReader.getChromosomes());
-        readHeader();
-    }
-
-
-    /**
-     * read the header
-     *
-     * @return a Object, representing the file header, if available
-     * @throws IOException throws an IOException if we can't open the file
-     */
-    private void readHeader() throws IOException {
-        SOURCE source = null;
-        try {
-            source = codec.makeSourceFromStream(new PositionalBufferedStream(new BlockCompressedInputStream(ParsingUtils.openInputStream(path))));
-            header = codec.readHeader(source);
-        } catch (Exception e) {
-            throw new TribbleException.MalformedFeatureFile("Unable to parse header with error: " + e.getMessage(), path, e);
-        } finally {
-            if (source != null) {
-                codec.close(source);
-            }
-        }
-    }
-
-    @Override
-    public boolean hasIndex(){
-        return true;
-    }
-
-
-    public List<String> getSequenceNames() {
-        return sequenceNames;
-    }
-
-    /**
-     * Return iterator over all features overlapping the given interval
-     *
-     * @param chr
-     * @param start
-     * @param end
-     * @return
-     * @throws IOException
-     */
-    public CloseableTribbleIterator<T> query(final String chr, final int start, final int end) throws IOException {
-        final List<String> mp = getSequenceNames();
-        if (mp == null) throw new TribbleException.TabixReaderFailure("Unable to find sequence named " + chr +
-                " in the tabix index. ", path);
-        if (!mp.contains(chr)) {
-            return new EmptyIterator<T>();
-        }
-        final TabixIteratorLineReader lineReader = new TabixIteratorLineReader(tabixReader.query(tabixReader.chr2tid(chr), start - 1, end));
-        return new FeatureIterator<T>(lineReader, start - 1, end);
-    }
-
-    public CloseableTribbleIterator<T> iterator() throws IOException {
-        final InputStream is = new BlockCompressedInputStream(ParsingUtils.openInputStream(path));
-        final PositionalBufferedStream stream = new PositionalBufferedStream(is);
-        final LineReader reader = LineReaderUtil.fromBufferedStream(stream, LineReaderUtil.LineReaderOption.SYNCHRONOUS);
-        return new FeatureIterator<T>(reader, 0, Integer.MAX_VALUE);
-    }
-
-    public void close() throws IOException {
-        tabixReader.close();
-    }
-
-
-    class FeatureIterator<T extends Feature> implements CloseableTribbleIterator<T> {
-        private T currentRecord;
-        private LineReader lineReader;
-        private int start;
-        private int end;
-
-        public FeatureIterator(final LineReader lineReader, final int start, final int end) throws IOException {
-            this.lineReader = lineReader;
-            this.start = start;
-            this.end = end;
-            readNextRecord();
-        }
-
-
-        /**
-         * Advance to the next record in the query interval.
-         *
-         * @throws IOException
-         */
-        protected void readNextRecord() throws IOException {
-            currentRecord = null;
-            String nextLine;
-            while (currentRecord == null && (nextLine = lineReader.readLine()) != null) {
-                final Feature f;
-                try {
-                    f = ((AsciiFeatureCodec)codec).decode(nextLine);
-                    if (f == null) {
-                        continue;   // Skip
-                    }
-                    if (f.getStart() > end) {
-                        return;    // Done
-                    }
-                    if (f.getEnd() <= start) {
-                        continue;   // Skip
-                    }
-
-                    currentRecord = (T) f;
-
-                } catch (TribbleException e) {
-                    e.setSource(path);
-                    throw e;
-                } catch (NumberFormatException e) {
-                    String error = "Error parsing line: " + nextLine;
-                    throw new TribbleException.MalformedFeatureFile(error, path, e);
-                }
-
-
-            }
-        }
-
-
-        public boolean hasNext() {
-            return currentRecord != null;
-        }
-
-        public T next() {
-            T ret = currentRecord;
-            try {
-                readNextRecord();
-            } catch (IOException e) {
-                throw new RuntimeIOException("Unable to read the next record, the last record was at " +
-                        ret.getContig() + ":" + ret.getStart() + "-" + ret.getEnd(), e);
-            }
-            return ret;
-
-        }
-
-        public void remove() {
-            throw new UnsupportedOperationException("Remove is not supported in Iterators");
-        }
-
-        public void close() {
-            lineReader.close();
-        }
-
-        public Iterator<T> iterator() {
-            return this;
-        }
-    }
-
-
-}
diff --git a/src/java/htsjdk/tribble/readers/AsynchronousLineReader.java b/src/java/htsjdk/tribble/readers/AsynchronousLineReader.java
deleted file mode 100644
index 147214f..0000000
--- a/src/java/htsjdk/tribble/readers/AsynchronousLineReader.java
+++ /dev/null
@@ -1,98 +0,0 @@
-package htsjdk.tribble.readers;
-
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.tribble.TribbleException;
-
-import java.io.Reader;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-
-/**
- * A LineReader implementation that delegates the work of reading and fetching lines to another thread.  The thread terminates when it
- * encounters EOF in the underlying reader, or when this LineReader is closed.
- *
- * @author mccowan
- */
-public class AsynchronousLineReader implements LineReader {
-    public static final int DEFAULT_NUMBER_LINES_BUFFER = 100;
-    
-    private final LongLineBufferedReader bufferedReader;
-    private final BlockingQueue<String> lineQueue;
-    private final Thread worker;
-    private volatile Throwable workerException = null;
-    private volatile boolean eofReached = false;
-
-    public AsynchronousLineReader(final Reader reader, final int lineReadAheadSize) {
-        bufferedReader = new LongLineBufferedReader(reader);
-        lineQueue = new LinkedBlockingQueue<String>(lineReadAheadSize);
-        worker = new Thread(new Worker());
-        worker.setDaemon(true);
-        worker.start();
-    }
-
-    public AsynchronousLineReader(final Reader reader) {
-        this(reader, DEFAULT_NUMBER_LINES_BUFFER);
-    }
-
-    @Override
-    public String readLine() {
-        try {
-            // Continually poll until we get a result, unless the underlying reader is finished.
-            for (; ; ) {
-                checkAndThrowIfWorkerException();
-                final String pollResult = this.lineQueue.poll(100, TimeUnit.MILLISECONDS); // Not ideal for small files.
-                if (pollResult == null) {
-                    if (eofReached) {
-                        checkAndThrowIfWorkerException();
-                        return lineQueue.poll(); // If there is nothing left, returns null as expected.  Otherwise, grabs next element.
-                    }
-                } else {
-                    return pollResult;
-                }
-            }
-        } catch (final InterruptedException e) {
-            throw new TribbleException("Line polling interrupted.", e);
-        }
-    }
-
-    private void checkAndThrowIfWorkerException() {
-        if (workerException != null) {
-            throw new TribbleException("Exception encountered in worker thread.", workerException);
-        }
-    }
-
-    @Override
-    public void close() {
-        this.worker.interrupt(); // Allow the worker to close gracefully.
-    } 
-
-    private class Worker implements Runnable {
-        @Override
-        public void run() {
-            try {
-                for (; ; ) {
-                    final String line = bufferedReader.readLine();
-                    if (line == null) {
-                        eofReached = true;
-                        break;
-                    } else {
-                        try {
-                            lineQueue.put(line);
-                        } catch (final InterruptedException e) {
-                            /**
-                             * A thread interruption is not an exceptional state: it means a {@link AsynchronousLineReader#close();} has 
-                             * been called, so shut down gracefully.
-                             */
-                            break;
-                        }
-                    }
-                }
-            } catch (final Throwable e) {
-                AsynchronousLineReader.this.workerException = e;
-            } finally {
-                CloserUtil.close(AsynchronousLineReader.this.bufferedReader);
-            }
-        }
-    }
-}
diff --git a/src/java/htsjdk/tribble/readers/LineReaderUtil.java b/src/java/htsjdk/tribble/readers/LineReaderUtil.java
deleted file mode 100644
index 3671ed0..0000000
--- a/src/java/htsjdk/tribble/readers/LineReaderUtil.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package htsjdk.tribble.readers;
-
-import htsjdk.samtools.Defaults;
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.tribble.TribbleException;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.StringReader;
-
-/**
- * A collection of factories for generating {@link LineReader}s.
- *
- * @author mccowan
- */
-public class LineReaderUtil {
-    public enum LineReaderOption {
-        ASYNCHRONOUS, SYNCHRONOUS
-    }
-
-    /**
-     * Like {@link #fromBufferedStream(java.io.InputStream, LineReaderUtil.LineReaderOption)}, but the synchronicity
-     * option is determined by {@link htsjdk.samtools.Defaults}: if asynchronous I/O is enabled, an asynchronous line reader will be
-     * returned.
-     */
-    public static LineReader fromBufferedStream(final InputStream stream) {
-        return fromBufferedStream(stream, Defaults.USE_ASYNC_IO_FOR_TRIBBLE ? LineReaderOption.ASYNCHRONOUS : LineReaderOption.SYNCHRONOUS);
-    }
-
-    public static LineReader fromStringReader(final StringReader reader) {
-        return fromStringReader(reader, Defaults.USE_ASYNC_IO_FOR_TRIBBLE ? LineReaderOption.ASYNCHRONOUS : LineReaderOption.SYNCHRONOUS);
-    }
-
-    public static LineReader fromStringReader(final StringReader stringReader, final LineReaderOption lineReaderOption) {
-        switch (lineReaderOption) {
-            case ASYNCHRONOUS:
-                return new AsynchronousLineReader(stringReader);
-            case SYNCHRONOUS:
-                return new LineReader() {
-                    final LongLineBufferedReader reader = new LongLineBufferedReader(stringReader);
-
-                    @Override
-                    public String readLine() {
-                        try {
-                            return reader.readLine();
-                        } catch (IOException e) {
-                            throw new RuntimeIOException(e);
-                        }
-                    }
-
-                    @Override
-                    public void close() {
-                        CloserUtil.close(reader);
-                    }
-                };
-            default:
-                throw new TribbleException(String.format("Unrecognized LineReaderUtil option: %s.", lineReaderOption));
-        }
-    }
-
-    /**
-     * Convenience factory for composing a LineReader from an InputStream.
-     */
-    public static LineReader fromBufferedStream(final InputStream bufferedStream, final LineReaderOption option) {
-        final InputStreamReader bufferedInputStreamReader = new InputStreamReader(bufferedStream);
-        switch (option) {
-            case ASYNCHRONOUS:
-                return new AsynchronousLineReader(bufferedInputStreamReader);
-            case SYNCHRONOUS:
-                return new LineReader() {
-                    final LongLineBufferedReader reader = new LongLineBufferedReader(bufferedInputStreamReader);
-
-                    @Override
-                    public String readLine() {
-                        try {
-                            return reader.readLine();
-                        } catch (IOException e) {
-                            throw new RuntimeIOException(e);
-                        }
-                    }
-
-                    @Override
-                    public void close() {
-                        CloserUtil.close(reader);
-                    }
-                };
-            default:
-                throw new TribbleException(String.format("Unrecognized LineReaderUtil option: %s.", option));
-        }
-    }
-
-}
diff --git a/src/java/htsjdk/variant/bcf2/BCF2Codec.java b/src/java/htsjdk/variant/bcf2/BCF2Codec.java
deleted file mode 100644
index 9cbf1f5..0000000
--- a/src/java/htsjdk/variant/bcf2/BCF2Codec.java
+++ /dev/null
@@ -1,494 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.bcf2;
-
-import htsjdk.tribble.BinaryFeatureCodec;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.FeatureCodecHeader;
-import htsjdk.tribble.TribbleException;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.readers.LineIteratorImpl;
-import htsjdk.tribble.readers.LineReaderUtil;
-import htsjdk.tribble.readers.PositionalBufferedStream;
-import htsjdk.variant.utils.GeneralUtils;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.GenotypeBuilder;
-import htsjdk.variant.variantcontext.LazyGenotypesContext;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-import htsjdk.variant.variantcontext.VariantContextUtils;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFCompoundHeaderLine;
-import htsjdk.variant.vcf.VCFConstants;
-import htsjdk.variant.vcf.VCFContigHeaderLine;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLineType;
-
-import java.io.ByteArrayInputStream;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Decode BCF2 files
- */
-public final class BCF2Codec extends BinaryFeatureCodec<VariantContext> {
-    private final static int ALLOWED_MAJOR_VERSION = 2;
-    private final static int MIN_MINOR_VERSION = 1;
-
-    private BCFVersion bcfVersion = null;
-
-    private VCFHeader header = null;
-
-    /**
-     * Maps offsets (encoded in BCF) into contig names (from header) for the CHROM field
-     */
-    private final ArrayList<String> contigNames = new ArrayList<String>();
-
-    /**
-     * Maps header string names (encoded in VCF) into strings found in the BCF header
-     *
-     * Initialized when processing the header
-     */
-    private ArrayList<String> dictionary;
-
-    /**
-     * Our decoder that reads low-level objects from the BCF2 records
-     */
-    private final BCF2Decoder decoder = new BCF2Decoder();
-
-    /**
-     * Provides some sanity checking on the header
-     */
-    private final static int MAX_HEADER_SIZE = 0x08000000;
-
-    /**
-     * Genotype field decoders that are initialized when the header is read
-     */
-    private BCF2GenotypeFieldDecoders gtFieldDecoders = null;
-
-    /**
-     * A cached array of GenotypeBuilders for efficient genotype decoding.
-     *
-     * Caching it allows us to avoid recreating this intermediate data
-     * structure each time we decode genotypes
-     */
-    private GenotypeBuilder[] builders = null;
-
-    // for error handling
-    private int recordNo = 0;
-    private int pos = 0;
-
-
-    // ----------------------------------------------------------------------
-    //
-    // Feature codec interface functions
-    //
-    // ----------------------------------------------------------------------
-
-    @Override
-    public Feature decodeLoc( final PositionalBufferedStream inputStream ) {
-        return decode(inputStream);
-    }
-
-    @Override
-    public VariantContext decode( final PositionalBufferedStream inputStream ) {
-        try {
-            recordNo++;
-            final VariantContextBuilder builder = new VariantContextBuilder();
-
-            final int sitesBlockSize = decoder.readBlockSize(inputStream);
-            final int genotypeBlockSize = decoder.readBlockSize(inputStream);
-
-            decoder.readNextBlock(sitesBlockSize, inputStream);
-            decodeSiteLoc(builder);
-            final SitesInfoForDecoding info = decodeSitesExtendedInfo(builder);
-
-            decoder.readNextBlock(genotypeBlockSize, inputStream);
-            createLazyGenotypesDecoder(info, builder);
-            return builder.fullyDecoded(true).make();
-        } catch ( IOException e ) {
-            throw new TribbleException("Failed to read BCF file", e);
-        }
-    }
-
-    @Override
-    public Class<VariantContext> getFeatureType() {
-        return VariantContext.class;
-    }
-
-    @Override
-    public FeatureCodecHeader readHeader( final PositionalBufferedStream inputStream ) {
-        try {
-            // note that this reads the magic as well, and so does double duty
-            bcfVersion = BCFVersion.readBCFVersion(inputStream);
-            if ( bcfVersion == null )
-                error("Input stream does not contain a BCF encoded file; BCF magic header info not found");
-
-            if ( bcfVersion.getMajorVersion() != ALLOWED_MAJOR_VERSION )
-                error("BCF2Codec can only process BCF2 files, this file has major version " + bcfVersion.getMajorVersion());
-            if ( bcfVersion.getMinorVersion() < MIN_MINOR_VERSION )
-                error("BCF2Codec can only process BCF2 files with minor version >= " + MIN_MINOR_VERSION + " but this file has minor version " + bcfVersion.getMinorVersion());
-
-            if ( GeneralUtils.DEBUG_MODE_ENABLED ) {
-                System.err.println("Parsing data stream with BCF version " + bcfVersion);
-            }
-
-            final int headerSizeInBytes = BCF2Type.INT32.read(inputStream);
-
-            if ( headerSizeInBytes <= 0 || headerSizeInBytes > MAX_HEADER_SIZE) // no bigger than 8 MB
-                error("BCF2 header has invalid length: " + headerSizeInBytes + " must be >= 0 and < "+ MAX_HEADER_SIZE);
-
-            final byte[] headerBytes = new byte[headerSizeInBytes];
-            if ( inputStream.read(headerBytes) != headerSizeInBytes )
-                error("Couldn't read all of the bytes specified in the header length = " + headerSizeInBytes);
-
-            final PositionalBufferedStream bps = new PositionalBufferedStream(new ByteArrayInputStream(headerBytes));
-            final LineIterator lineIterator = new LineIteratorImpl(LineReaderUtil.fromBufferedStream(bps, LineReaderUtil.LineReaderOption.SYNCHRONOUS));
-            final VCFCodec headerParser = new VCFCodec();
-            this.header = (VCFHeader) headerParser.readActualHeader(lineIterator);
-            bps.close();
-        } catch ( IOException e ) {
-            throw new TribbleException("I/O error while reading BCF2 header");
-        }
-
-        // create the config offsets
-        if ( ! header.getContigLines().isEmpty() ) {
-            contigNames.clear();
-            for ( final VCFContigHeaderLine contig : header.getContigLines()) {
-                if ( contig.getID() == null || contig.getID().equals("") )
-                    error("found a contig with an invalid ID " + contig);
-                contigNames.add(contig.getID());
-            }
-        } else {
-            error("Didn't find any contig lines in BCF2 file header");
-        }
-
-        // create the string dictionary
-        dictionary = parseDictionary(header);
-
-        // prepare the genotype field decoders
-        gtFieldDecoders = new BCF2GenotypeFieldDecoders(header);
-
-        // create and initialize the genotype builder array
-        final int nSamples = header.getNGenotypeSamples();
-        builders = new GenotypeBuilder[nSamples];
-        for ( int i = 0; i < nSamples; i++ ) {
-            builders[i] = new GenotypeBuilder(header.getGenotypeSamples().get(i));
-        }
-
-        // position right before next line (would be right before first real record byte at end of header)
-        return new FeatureCodecHeader(header, inputStream.getPosition());
-    }
-
-    @Override
-    public boolean canDecode( final String path ) {
-        FileInputStream fis = null;
-        try {
-            fis = new FileInputStream(path);
-            final BCFVersion version = BCFVersion.readBCFVersion(fis);
-            return version != null && version.getMajorVersion() == ALLOWED_MAJOR_VERSION;
-        } catch ( FileNotFoundException e ) {
-            return false;
-        } catch ( IOException e ) {
-            return false;
-        } finally {
-            try {
-                if ( fis != null ) fis.close();
-            } catch ( IOException e ) {
-                // do nothing
-            }
-        }
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // implicit block
-    //
-    // The first four records of BCF are inline untype encoded data of:
-    //
-    // 4 byte integer chrom offset
-    // 4 byte integer start
-    // 4 byte integer ref length
-    // 4 byte float qual
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Decode the sites level data from this classes decoder
-     *
-     * @param builder
-     * @return
-     */
-    private final void decodeSiteLoc(final VariantContextBuilder builder) throws IOException {
-        final int contigOffset = decoder.decodeInt(BCF2Type.INT32);
-        final String contig = lookupContigName(contigOffset);
-        builder.chr(contig);
-
-        this.pos = decoder.decodeInt(BCF2Type.INT32) + 1; // GATK is one based, BCF2 is zero-based
-        final int refLength = decoder.decodeInt(BCF2Type.INT32);
-        builder.start((long)pos);
-        builder.stop((long)(pos + refLength - 1)); // minus one because GATK has closed intervals but BCF2 is open
-    }
-
-    /**
-     * Decode the sites level data from this classes decoder
-     *
-     * @param builder
-     * @return
-     */
-    private final SitesInfoForDecoding decodeSitesExtendedInfo(final VariantContextBuilder builder) throws IOException {
-        final Object qual = decoder.decodeSingleValue(BCF2Type.FLOAT);
-        if ( qual != null ) {
-            builder.log10PError(((Double)qual) / -10.0);
-        }
-
-        final int nAlleleInfo = decoder.decodeInt(BCF2Type.INT32);
-        final int nFormatSamples = decoder.decodeInt(BCF2Type.INT32);
-        final int nAlleles = nAlleleInfo >> 16;
-        final int nInfo = nAlleleInfo & 0x0000FFFF;
-        final int nFormatFields = nFormatSamples >> 24;
-        final int nSamples = nFormatSamples & 0x00FFFFF;
-
-        if ( header.getNGenotypeSamples() != nSamples )
-            error("Reading BCF2 files with different numbers of samples per record " +
-                    "is not currently supported.  Saw " + header.getNGenotypeSamples() +
-                    " samples in header but have a record with " + nSamples + " samples");
-
-        decodeID(builder);
-        final List<Allele> alleles = decodeAlleles(builder, pos, nAlleles);
-        decodeFilter(builder);
-        decodeInfo(builder, nInfo);
-
-        final SitesInfoForDecoding info = new SitesInfoForDecoding(nFormatFields, nSamples, alleles);
-        if ( ! info.isValid() )
-            error("Sites info is malformed: " + info);
-        return info;
-    }
-
-    protected final static class SitesInfoForDecoding {
-        final int nFormatFields;
-        final int nSamples;
-        final List<Allele> alleles;
-
-        private SitesInfoForDecoding(final int nFormatFields, final int nSamples, final List<Allele> alleles) {
-            this.nFormatFields = nFormatFields;
-            this.nSamples = nSamples;
-            this.alleles = alleles;
-        }
-
-        public boolean isValid() {
-            return nFormatFields >= 0 &&
-                    nSamples >= 0 &&
-                    alleles != null && ! alleles.isEmpty() && alleles.get(0).isReference();
-        }
-
-        @Override
-        public String toString() {
-            return String.format("nFormatFields = %d, nSamples = %d, alleles = %s", nFormatFields, nSamples, alleles);
-        }
-    }
-
-    /**
-     * Decode the id field in this BCF2 file and store it in the builder
-     * @param builder
-     */
-    private void decodeID( final VariantContextBuilder builder ) throws IOException {
-        final String id = (String)decoder.decodeTypedValue();
-
-        if ( id == null )
-            builder.noID();
-        else
-            builder.id(id);
-    }
-
-    /**
-     * Decode the alleles from this BCF2 file and put the results in builder
-     * @param builder
-     * @param pos
-     * @param nAlleles
-     * @return the alleles
-     */
-    private List<Allele> decodeAlleles( final VariantContextBuilder builder, final int pos, final int nAlleles ) throws IOException {
-        // TODO -- probably need inline decoder for efficiency here (no sense in going bytes -> string -> vector -> bytes
-        List<Allele> alleles = new ArrayList<Allele>(nAlleles);
-        String ref = null;
-
-        for ( int i = 0; i < nAlleles; i++ ) {
-            final String alleleBases = (String)decoder.decodeTypedValue();
-
-            final boolean isRef = i == 0;
-            final Allele allele = Allele.create(alleleBases, isRef);
-            if ( isRef ) ref = alleleBases;
-
-            alleles.add(allele);
-        }
-        assert ref != null;
-
-        builder.alleles(alleles);
-
-        assert !ref.isEmpty();
-
-        return alleles;
-    }
-
-    /**
-     * Decode the filter field of this BCF2 file and store the result in the builder
-     * @param builder
-     */
-    private void decodeFilter( final VariantContextBuilder builder ) throws IOException {
-        final Object value = decoder.decodeTypedValue();
-
-        if ( value == null )
-            builder.unfiltered();
-        else {
-            if ( value instanceof Integer ) {
-                // fast path for single integer result
-                final String filterString = getDictionaryString((Integer)value);
-                if ( VCFConstants.PASSES_FILTERS_v4.equals(filterString))
-                    builder.passFilters();
-                else
-                    builder.filter(filterString);
-            } else {
-                for ( final int offset : (List<Integer>)value )
-                    builder.filter(getDictionaryString(offset));
-            }
-        }
-    }
-
-    /**
-     * Loop over the info field key / value pairs in this BCF2 file and decode them into the builder
-     *
-     * @param builder
-     * @param numInfoFields
-     */
-    private void decodeInfo( final VariantContextBuilder builder, final int numInfoFields ) throws IOException {
-        if ( numInfoFields == 0 )
-            // fast path, don't bother doing any work if there are no fields
-            return;
-
-        final Map<String, Object> infoFieldEntries = new HashMap<String, Object>(numInfoFields);
-        for ( int i = 0; i < numInfoFields; i++ ) {
-            final String key = getDictionaryString();
-            Object value = decoder.decodeTypedValue();
-            final VCFCompoundHeaderLine metaData = VariantContextUtils.getMetaDataForField(header, key);
-            if ( metaData.getType() == VCFHeaderLineType.Flag ) value = true; // special case for flags
-            infoFieldEntries.put(key, value);
-        }
-
-        builder.attributes(infoFieldEntries);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Decoding Genotypes
-    //
-    // --------------------------------------------------------------------------------
-
-    /**
-     * Create the lazy loader for the genotypes data, and store it in the builder
-     * so that the VC will be able to decode on demand the genotypes data
-     *
-     * @param siteInfo
-     * @param builder
-     */
-    private void createLazyGenotypesDecoder( final SitesInfoForDecoding siteInfo,
-                                             final VariantContextBuilder builder ) {
-        if (siteInfo.nSamples > 0) {
-            final LazyGenotypesContext.LazyParser lazyParser =
-                    new BCF2LazyGenotypesDecoder(this, siteInfo.alleles, siteInfo.nSamples, siteInfo.nFormatFields, builders);
-
-            final LazyData lazyData = new LazyData(header, siteInfo.nFormatFields, decoder.getRecordBytes());
-            final LazyGenotypesContext lazy = new LazyGenotypesContext(lazyParser, lazyData, header.getNGenotypeSamples());
-
-            // did we resort the sample names?  If so, we need to load the genotype data
-            if ( !header.samplesWereAlreadySorted() )
-                lazy.decode();
-
-            builder.genotypesNoValidation(lazy);
-        }
-    }
-
-    public static class LazyData {
-        final public VCFHeader header;
-        final public int nGenotypeFields;
-        final public byte[] bytes;
-
-        public LazyData(final VCFHeader header, final int nGenotypeFields, final byte[] bytes) {
-            this.header = header;
-            this.nGenotypeFields = nGenotypeFields;
-            this.bytes = bytes;
-        }
-    }
-
-    private final String getDictionaryString() throws IOException {
-        return getDictionaryString((Integer) decoder.decodeTypedValue());
-    }
-
-    protected final String getDictionaryString(final int offset) {
-        return dictionary.get(offset);
-    }
-
-    /**
-     * Translate the config offset as encoded in the BCF file into the actual string
-     * name of the contig from the dictionary
-     *
-     * @param contigOffset
-     * @return
-     */
-    private final String lookupContigName( final int contigOffset ) {
-        return contigNames.get(contigOffset);
-    }
-
-    private final ArrayList<String> parseDictionary(final VCFHeader header) {
-        final ArrayList<String> dict = BCF2Utils.makeDictionary(header);
-
-        // if we got here we never found a dictionary, or there are no elements in the dictionary
-        if ( dict.isEmpty() )
-            error("Dictionary header element was absent or empty");
-
-        return dict;
-    }
-
-    /**
-     * @return the VCFHeader we found in this BCF2 file
-     */
-    protected VCFHeader getHeader() {
-        return header;
-    }
-
-    protected BCF2GenotypeFieldDecoders.Decoder getGenotypeFieldDecoder(final String field) {
-        return gtFieldDecoders.getDecoder(field);
-    }
-
-    private void error(final String message) throws RuntimeException {
-        throw new TribbleException(String.format("%s, at record %d with position %d:", message, recordNo, pos));
-    }
-}
diff --git a/src/java/htsjdk/variant/example/PrintVariantsExample.java b/src/java/htsjdk/variant/example/PrintVariantsExample.java
deleted file mode 100755
index 997f0ee..0000000
--- a/src/java/htsjdk/variant/example/PrintVariantsExample.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2016 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- */
-package htsjdk.variant.example;
-
-import htsjdk.samtools.Defaults;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.ProgressLogger;
-import htsjdk.samtools.util.zip.DeflaterFactory;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.writer.Options;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFHeader;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.util.Arrays;
-import java.util.stream.Collectors;
-
-/**
- * This is a example program showing how to use Feature readers and (optionally) writers.
- * It's also useful for measuring time.
- * An example invocation is:
- * java -cp dist/htsjdk-2.1.1.jar htsjdk.variant.example.PrintVariantsExample in.vcf out.vcf
- * <p>
- * Arguments:
- * - the first argument is the input file (VCF)
- * - the second argument is optional and is the name of the output file (nothing gets written if this argument is missing)
- */
-public final class PrintVariantsExample {
-    private PrintVariantsExample() {
-    }
-
-    private static final Log log = Log.getInstance(PrintVariantsExample.class);
-
-    public static void main(final String[] args) throws IOException {
-        if (args.length < 1) {
-            System.out.println("Usage: " + PrintVariantsExample.class.getCanonicalName() + " inFile [outFile]");
-            System.exit(1);
-        }
-        final File inputFile = new File(args[0]);
-        final File outputFile = args.length >= 2 ? new File(args[1]) : null;
-
-        final long start = System.currentTimeMillis();
-
-        log.info("Start with args:" + Arrays.toString(args));
-        printConfigurationInfo();
-
-        try(final VariantContextWriter writer = outputFile == null ? null : new VariantContextWriterBuilder().setOutputFile(outputFile).setOutputFileType(VariantContextWriterBuilder.OutputType.VCF).unsetOption(Options.INDEX_ON_THE_FLY).build();
-            final AbstractFeatureReader<VariantContext, LineIterator> reader = AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), new VCFCodec(), false)){
-
-            log.info(reader.getClass().getSimpleName() + " hasIndex " + reader.hasIndex());
-            if (writer != null){
-                log.info(writer.getClass().getSimpleName());
-                writer.writeHeader((VCFHeader) reader.getHeader());
-            }
-
-            final ProgressLogger pl = new ProgressLogger(log, 1000000);
-            for (final VariantContext vc : reader.iterator()) {
-                if (writer != null){
-                    writer.add(vc);
-                }
-                pl.record(vc.getContig(), vc.getStart());
-            }
-        }
-
-        final long end = System.currentTimeMillis();
-        log.info(String.format("Done. Elapsed time %.3f seconds", (end - start) / 1000.0));
-    }
-
-    private static void printConfigurationInfo() throws IOException {
-        log.info("Executing as " +
-                System.getProperty("user.name") + '@' + InetAddress.getLocalHost().getHostName() +
-                " on " + System.getProperty("os.name") + ' ' + System.getProperty("os.version") +
-                ' ' + System.getProperty("os.arch") + "; " + System.getProperty("java.vm.name") +
-                ' ' + System.getProperty("java.runtime.version") +
-                ' ' + (DeflaterFactory.usingIntelDeflater() ? "IntelDeflater" : "JdkDeflater"));
-
-        log.info(Defaults.allDefaults().entrySet().stream().map(e -> e.getKey() + ':' + e.getValue()).collect(Collectors.<String>joining(" ")));
-    }
-}
diff --git a/src/java/htsjdk/variant/variantcontext/VariantContextBuilder.java b/src/java/htsjdk/variant/variantcontext/VariantContextBuilder.java
deleted file mode 100644
index 94eebca..0000000
--- a/src/java/htsjdk/variant/variantcontext/VariantContextBuilder.java
+++ /dev/null
@@ -1,500 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext;
-
-import htsjdk.variant.vcf.VCFConstants;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * <p>Builder class for <code>VariantContext</code>.</p>
- *
- * <p>Some basic assumptions here:</p>
- * <ol>
- * <li> data isn't protectively copied.  If you provide an attribute map to
- * the build, and modify it later, the builder will see this and so will any
- * resulting variant contexts.  It's best not to modify collections provided
- * to a builder.</li>
- *
- * <li> the system uses the standard builder model, allowing the simple construction idiom:
- *<blockquote>
- *   <code>builder.source("a").genotypes(gc).id("x").make()</code> => <code>VariantContext</code>
- *</blockquote></li>
- *<li>The best way to copy a VariantContext is:
- *<blockquote>
- *   <code>new VariantContextBuilder(vc).make()</code> => a copy of VC
- *</blockquote>
- * <li> validation of arguments is done at the during the final <code>make()</code> call, so a
- * <code>VariantContextBuilder</code> can exist in an inconsistent state as long as those issues
- * are resolved before the call to <code>make()</code> is issued.
- *</ol>
- * @author depristo
- */
-public class VariantContextBuilder {
-    // required fields
-    private boolean fullyDecoded = false;
-    private String source = null;
-    private String contig = null;
-    private long start = -1;
-    private long stop = -1;
-    private Collection<Allele> alleles = null;
-
-    // optional -> these are set to the appropriate default value
-    private String ID = VCFConstants.EMPTY_ID_FIELD;
-    private GenotypesContext genotypes = GenotypesContext.NO_GENOTYPES;
-    private double log10PError = VariantContext.NO_LOG10_PERROR;
-    private Set<String> filters = null;
-    private Map<String, Object> attributes = null;
-    private boolean attributesCanBeModified = false;
-
-    /** enum of what must be validated */
-    final private EnumSet<VariantContext.Validation> toValidate = EnumSet.noneOf(VariantContext.Validation.class);
-
-    /**
-     * Create an empty VariantContextBuilder where all values adopt their default values.  Note that
-     * source, chr, start, stop, and alleles must eventually be filled in, or the resulting VariantContext
-     * will throw an error.
-     */
-    public VariantContextBuilder() {}
-
-    /**
-     * Create an empty VariantContextBuilder where all values adopt their default values, but the bare min.
-     * of info (source, chr, start, stop, and alleles) have been provided to start.
-     */
-    public VariantContextBuilder(final String source, final String contig, final long start, final long stop, final Collection<Allele> alleles) {
-        this.source = source;
-        this.contig = contig;
-        this.start = start;
-        this.stop = stop;
-        this.alleles = alleles;
-        this.attributes = Collections.emptyMap(); // immutable
-        toValidate.add(VariantContext.Validation.ALLELES);
-    }
-
-    /**
-     * Returns a new builder based on parent -- the new VC will have all fields initialized
-     * to their corresponding values in parent.  This is the best way to create a derived VariantContext
-     *
-     * @param parent  Cannot be null
-     */
-    public VariantContextBuilder(final VariantContext parent) {
-        if ( parent == null ) throw new IllegalArgumentException("BUG: VariantContextBuilder parent argument cannot be null in VariantContextBuilder");
-        this.alleles = parent.getAlleles();
-        this.attributes = parent.getAttributes();
-        this.attributesCanBeModified = false;
-        this.contig = parent.getContig();
-        this.filters = parent.getFiltersMaybeNull();
-        this.genotypes = parent.getGenotypes();
-        this.ID = parent.getID();
-        this.log10PError = parent.getLog10PError();
-        this.source = parent.getSource();
-        this.start = parent.getStart();
-        this.stop = parent.getEnd();
-        this.fullyDecoded = parent.isFullyDecoded();
-    }
-
-    public VariantContextBuilder(final VariantContextBuilder parent) {
-        if ( parent == null ) throw new IllegalArgumentException("BUG: VariantContext parent argument cannot be null in VariantContextBuilder");
-        this.alleles = parent.alleles;
-        this.attributesCanBeModified = false;
-        this.contig = parent.contig;
-        this.genotypes = parent.genotypes;
-        this.ID = parent.ID;
-        this.log10PError = parent.log10PError;
-        this.source = parent.source;
-        this.start = parent.start;
-        this.stop = parent.stop;
-        this.fullyDecoded = parent.fullyDecoded;
-
-        this.attributes(parent.attributes);
-        this.filters(parent.filters);
-    }
-
-    public VariantContextBuilder copy() {
-        return new VariantContextBuilder(this);
-    }
-
-    /**
-     * Tells this builder to use this collection of alleles for the resulting VariantContext
-     *
-     * @param alleles
-     * @return this builder
-     */
-    public VariantContextBuilder alleles(final Collection<Allele> alleles) {
-        this.alleles = alleles;
-        toValidate.add(VariantContext.Validation.ALLELES);
-        return this;
-    }
-
-    public VariantContextBuilder alleles(final List<String> alleleStrings) {
-        final List<Allele> alleles = new ArrayList<Allele>(alleleStrings.size());
-
-        for ( int i = 0; i < alleleStrings.size(); i++ ) {
-            alleles.add(Allele.create(alleleStrings.get(i), i == 0));
-        }
-
-        return alleles(alleles);
-    }
-
-    public VariantContextBuilder alleles(final String ... alleleStrings) {
-        return alleles(Arrays.asList(alleleStrings));
-    }
-
-    public List<Allele> getAlleles() {
-        return new ArrayList<Allele>(alleles);
-    }
-
-    /**
-     * Tells this builder to use this map of attributes alleles for the resulting <code>VariantContext</code>
-     *
-     * Attributes can be <code>null</code> -> meaning there are no attributes.  After
-     * calling this routine the builder assumes it can modify the attributes
-     * object here, if subsequent calls are made to set attribute values
-     *
-     * Value for each attribute must be of a type that implements {@link Serializable} or else
-     * serialization will fail.
-     *
-     * @param attributes
-     */
-    public VariantContextBuilder attributes(final Map<String, Object> attributes) {
-        if (attributes != null) {
-            this.attributes = attributes;
-        }
-        else {
-            this.attributes = new HashMap<String, Object>();
-        }
-
-        this.attributesCanBeModified = true;
-        return this;
-    }
-
-    /**
-     * Puts the key -> value mapping into this builder's attributes
-     *
-     * @param key key for the attribute
-     * @param value value for the attribute (must be of a type that implements {@link Serializable} or else serialization will fail)
-     */
-    public VariantContextBuilder attribute(final String key, final Object value) {
-        makeAttributesModifiable();
-        attributes.put(key, value);
-        return this;
-    }
-
-    /**
-     * Removes key if present in the attributes
-     *
-     * @param key  key to remove
-     * @return
-     */
-    public VariantContextBuilder rmAttribute(final String key) {
-        makeAttributesModifiable();
-        attributes.remove(key);
-        return this;
-    }
-
-    /**
-     * Removes list of keys if present in the attributes
-     *
-     * @param keys  list of keys to remove
-     * @return
-     */
-    public VariantContextBuilder rmAttributes(final List<String> keys) {
-        makeAttributesModifiable();
-        for ( final String key : keys )
-            attributes.remove(key);
-        return this;
-    }
-
-    /**
-     * Makes the attributes field modifiable.  In many cases attributes is just a pointer to an immutable
-     * collection, so methods that want to add / remove records require the attributes to be copied to a
-     */
-    private void makeAttributesModifiable() {
-        if ( ! attributesCanBeModified ) {
-            this.attributesCanBeModified = true;
-            if (attributes == null) {
-            	this.attributes = new HashMap<String, Object>();
-            } else {
-            	this.attributes = new HashMap<String, Object>(attributes);
-            }
-        }
-    }
-
-    /**
-     * This builder's filters are set to this value
-     *
-     * filters can be <code>null</code> -> meaning there are no filters
-     * @param filters
-     */
-    public VariantContextBuilder filters(final Set<String> filters) {
-        this.filters = filters;
-        return this;
-    }
-
-    /**
-     * {@link #filters}
-     *
-     * @param filters
-     * @return
-     */
-    public VariantContextBuilder filters(final String ... filters) {
-        filters(new LinkedHashSet<String>(Arrays.asList(filters)));
-        return this;
-    }
-
-    public VariantContextBuilder filter(final String filter) {
-        if ( this.filters == null ) this.filters = new LinkedHashSet<String>(1);
-        this.filters.add(filter);
-        return this;
-    }
-
-    /**
-     * Tells this builder that the resulting VariantContext should have PASS filters
-     *
-     * @return
-     */
-    public VariantContextBuilder passFilters() {
-        return filters(VariantContext.PASSES_FILTERS);
-    }
-
-    /**
-     * Tells this builder that the resulting VariantContext be unfiltered
-     *
-     * @return
-     */
-    public VariantContextBuilder unfiltered() {
-        this.filters = null;
-        return this;
-    }
-
-    /**
-     * Tells this builder that the resulting <code>VariantContext</code> should use this genotype's <code>GenotypeContext</code>.
-     *
-     * Note that genotypes can be <code>null</code> -> meaning there are no genotypes
-     *
-     * @param genotypes
-     */
-    public VariantContextBuilder genotypes(final GenotypesContext genotypes) {
-        this.genotypes = genotypes;
-        if ( genotypes != null )
-            toValidate.add(VariantContext.Validation.GENOTYPES);
-        return this;
-    }
-
-    public VariantContextBuilder genotypesNoValidation(final GenotypesContext genotypes) {
-        this.genotypes = genotypes;
-        return this;
-    }
-
-    /**
-     * Tells this builder that the resulting <code>VariantContext</code> should use a <code>GenotypeContext</code> containing genotypes
-     *
-     * Note that genotypes can be <code>null</code>, meaning there are no genotypes
-     *
-     * @param genotypes
-     */
-    public VariantContextBuilder genotypes(final Collection<Genotype> genotypes) {
-        return genotypes(GenotypesContext.copy(genotypes));
-    }
-
-    /**
-     * Tells this builder that the resulting <code>VariantContext</code> should use a <code>GenotypeContext</code> containing genotypes
-     * @param genotypes
-     */
-    public VariantContextBuilder genotypes(final Genotype ... genotypes) {
-        return genotypes(GenotypesContext.copy(Arrays.asList(genotypes)));
-    }
-
-    /**
-     * Tells this builder that the resulting VariantContext should not contain any GenotypeContext
-     */
-    public VariantContextBuilder noGenotypes() {
-        this.genotypes = null;
-        return this;
-    }
-
-    /**
-     * Tells us that the resulting VariantContext should have ID
-     * @param ID
-     * @return
-     */
-    public VariantContextBuilder id(final String ID) {
-        this.ID = ID;
-        return this;
-    }
-
-    /**
-     * Tells us that the resulting VariantContext should not have an ID
-     * @return
-     */
-    public VariantContextBuilder noID() {
-        return id(VCFConstants.EMPTY_ID_FIELD);
-    }
-
-    /**
-     * Tells us that the resulting VariantContext should have log10PError
-     * @param log10PError
-     * @return
-     */
-    public VariantContextBuilder log10PError(final double log10PError) {
-        this.log10PError = log10PError;
-        return this;
-    }
-
-    /**
-     * Tells us that the resulting VariantContext should have source field set to source
-     * @param source
-     * @return
-     */
-    public VariantContextBuilder source(final String source) {
-        this.source = source;
-        return this;
-    }
-
-    /**
-     * Tells us that the resulting VariantContext should have the specified location
-     * @param contig
-     * @param start
-     * @param stop
-     * @return
-     */
-    public VariantContextBuilder loc(final String contig, final long start, final long stop) {
-        this.contig = contig;
-        this.start = start;
-        this.stop = stop;
-        toValidate.add(VariantContext.Validation.ALLELES);
-        return this;
-    }
-
-    /**
-     * Tells us that the resulting VariantContext should have the specified contig chr
-     * @param contig
-     * @return
-     */
-    public VariantContextBuilder chr(final String contig) {
-        this.contig = contig;
-        return this;
-    }
-
-    /**
-     * Tells us that the resulting VariantContext should have the specified contig start
-     * @param start
-     * @return
-     */
-    public VariantContextBuilder start(final long start) {
-        this.start = start;
-        toValidate.add(VariantContext.Validation.ALLELES);
-        return this;
-    }
-
-    /**
-     * Tells us that the resulting VariantContext should have the specified contig stop
-     * @param stop
-     * @return
-     */
-    public VariantContextBuilder stop(final long stop) {
-        this.stop = stop;
-        return this;
-    }
-
-    /**
-     * @see #computeEndFromAlleles(java.util.List, int, int) with endForSymbolicAlleles == -1
-     */
-    public VariantContextBuilder computeEndFromAlleles(final List<Allele> alleles, final int start) {
-        return computeEndFromAlleles(alleles, start, -1);
-    }
-
-    /**
-     * Compute the end position for this VariantContext from the alleles themselves
-     *
-     * assigns this builder the stop position computed.
-     *
-     * @param alleles the list of alleles to consider.  The reference allele must be the first one
-     * @param start the known start position of this event
-     * @param endForSymbolicAlleles the end position to use if any of the alleles is symbolic.  Can be -1
-     *                              if no is expected but will throw an error if one is found
-     * @return this builder
-     */
-    public VariantContextBuilder computeEndFromAlleles(final List<Allele> alleles, final int start, final int endForSymbolicAlleles) {
-        stop(VariantContextUtils.computeEndFromAlleles(alleles, start, endForSymbolicAlleles));
-        return this;
-    }
-
-    /**
-     * @return true if this builder contains fully decoded data
-     *
-     * See VariantContext for more information
-     */
-    public boolean isFullyDecoded() {
-        return fullyDecoded;
-    }
-
-    /**
-     * Sets this builder's fully decoded state to true.
-     *
-     * A fully decoded builder indicates that all fields are represented by their
-     * proper java objects (e.g., Integer(10) not "10").
-     *
-     * See VariantContext for more information
-     *
-     * @param isFullyDecoded
-     */
-    public VariantContextBuilder fullyDecoded(boolean isFullyDecoded) {
-        this.fullyDecoded = isFullyDecoded;
-        return this;
-    }
-
-    /**
-     * Takes all of the builder data provided up to this point, and instantiates
-     * a freshly allocated VariantContext with all of the builder data.  This
-     * VariantContext is validated as appropriate and if not failing QC (and
-     * throwing an exception) is returned.
-     *
-     * Note that this function can be called multiple times to create multiple
-     * VariantContexts from the same builder.
-     */
-    public VariantContext make() {
-        return make(false);
-    }
-
-    public VariantContext make(final boolean leaveModifyableAsIs) {
-        if(!leaveModifyableAsIs) attributesCanBeModified = false;
-
-        return new VariantContext(source, ID, contig, start, stop, alleles,
-                genotypes, log10PError, filters, attributes,
-                fullyDecoded, toValidate);
-    }
-}
diff --git a/src/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java b/src/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java
deleted file mode 100644
index 165c272..0000000
--- a/src/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java
+++ /dev/null
@@ -1,525 +0,0 @@
-/*
-* Copyright (c) 2014 The Broad Institute
-*
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-*
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext.writer;
-
-import htsjdk.samtools.Defaults;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.Md5CalculatingOutputStream;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.index.IndexCreator;
-import htsjdk.tribble.index.tabix.TabixFormat;
-import htsjdk.tribble.index.tabix.TabixIndexCreator;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.OutputStream;
-import java.util.EnumSet;
-
-/*
- * Created with IntelliJ IDEA.
- * User: thibault
- * Date: 3/7/14
- * Time: 2:07 PM
- */
-/**
- * @author thibault
- * 
- * <p>
- * Provides methods for creating <code>VariantContextWriter</code>s using the Builder pattern.
- * Replaces <code>VariantContextWriterFactory</code>.
- * </p>
- * <p>
- * The caller must choose an output file or an output stream for the <code>VariantContextWriter</code> to write to.
- * When a file is chosen, the output stream is created implicitly based on Defaults and options passed to the builder.
- * When a stream is chosen, it is passed unchanged to the <code>VariantContextWriter</code>.
- * </p>
- * <p>
- * Example: Create a series of files with buffering and indexing on the fly.
- * Determine the appropriate file type based on filename.
- * </p>
-
-   <pre>
-   VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-       .setReferenceDictionary(refDict)
-       .setOption(Options.INDEX_ON_THE_FLY)
-       .setBuffer(8192);
- 
-   VariantContextWriter sample1_writer = builder
-       .setOutputFile("sample1.vcf")
-       .build();
-   VariantContextWriter sample2_writer = builder
-       .setOutputFile("sample2.bcf")
-       .build();
-   VariantContextWriter sample3_writer = builder
-       .setOutputFile("sample3.vcf.bgzf")
-       .build();
-   </pre>
-   
-   <p>
- * Example: Explicitly turn off buffering and explicitly set the file type
- * </p>
- * 
- * <pre>
-   VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-       .setReferenceDictionary(refDict)
-       .setOption(Options.INDEX_ON_THE_FLY)
-       .unsetBuffering();
- 
-   VariantContextWriter sample1_writer = builder
-       .setOutputFile("sample1.custom_extension")
-       .setOutputFileType(OutputType.VCF)
-       .build();
-   VariantContextWriter sample2_writer = builder
-       .setOutputFile("sample2.custom_extension")
-       .setOutputFileType(OutputType.BLOCK_COMPRESSED_VCF)
-       .build();
-   </pre>
- */
-public class VariantContextWriterBuilder {
-    public static final EnumSet<Options> DEFAULT_OPTIONS = EnumSet.of(Options.INDEX_ON_THE_FLY);
-    public static final EnumSet<Options> NO_OPTIONS = EnumSet.noneOf(Options.class);
-
-    public enum OutputType {
-        UNSPECIFIED,
-        VCF,
-        BCF,
-        BLOCK_COMPRESSED_VCF,
-        VCF_STREAM,
-        BCF_STREAM
-    }
-
-    public static final EnumSet<OutputType> FILE_TYPES = EnumSet.of(OutputType.VCF, OutputType.BCF, OutputType.BLOCK_COMPRESSED_VCF);
-    public static final EnumSet<OutputType> STREAM_TYPES = EnumSet.of(OutputType.VCF_STREAM, OutputType.BCF_STREAM);
-
-    private SAMSequenceDictionary refDict = null;
-    private OutputType outType = OutputType.UNSPECIFIED;
-    private File outFile = null;
-    private OutputStream outStream = null;
-    private IndexCreator idxCreator = null;
-    private int bufferSize = Defaults.BUFFER_SIZE;
-    private boolean createMD5 = Defaults.CREATE_MD5;
-    protected EnumSet<Options> options = DEFAULT_OPTIONS.clone();
-
-    /**
-     * Default constructor.  Adds <code>USE_ASYNC_IO</code> to the Options if it is present in Defaults.
-     */
-    public VariantContextWriterBuilder() {
-        if (Defaults.USE_ASYNC_IO_FOR_TRIBBLE) {
-            options.add(Options.USE_ASYNC_IO);
-        }
-    }
-
-    /**
-     * Set the reference dictionary to be used by <code>VariantContextWriter</code>s created by this builder.
-     *
-     * @param refDict the reference dictionary
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setReferenceDictionary(final SAMSequenceDictionary refDict) {
-        this.refDict = refDict;
-        return this;
-    }
-
-    /**
-     * Set the output file for the next <code>VariantContextWriter</code> created by this builder.
-     * Determines file type implicitly from the filename.
-     *
-     * @param outFile the file the <code>VariantContextWriter</code> will write to
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setOutputFile(final File outFile) {
-        this.outFile = outFile;
-        this.outStream = null;
-        determineOutputTypeFromFilename();
-        return this;
-    }
-
-    /**
-     * Set the output file for the next <code>VariantContextWriter</code> created by this builder.
-     * Determines file type implicitly from the filename.
-     *
-     * @param outFile the file the <code>VariantContextWriter</code> will write to
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setOutputFile(final String outFile) {
-        this.outFile = new File(outFile);
-        this.outStream = null;
-        determineOutputTypeFromFilename();
-        return this;
-    }
-
-    /**
-     * Set the output file type for the next <code>VariantContextWriter</code> created by this builder.
-     *
-     * @param outType the type of file the <code>VariantContextWriter</code> will write to
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setOutputFileType(final OutputType outType) {
-        if (!FILE_TYPES.contains(outType))
-            throw new IllegalArgumentException("Must choose a file type, not other output types.");
-
-        if (this.outFile == null || this.outStream != null)
-            throw new IllegalArgumentException("Cannot set a file type if the output is not to a file.");
-
-        this.outType = outType;
-        return this;
-    }
-
-    /**
-     * Set the output VCF stream for the next <code>VariantContextWriter</code> created by this builder.
-     * If buffered writing is desired, caller must provide some kind of buffered <code>OutputStream</code>.
-     *
-     * @param outStream the output stream to write to
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setOutputVCFStream(final OutputStream outStream) {
-        this.outStream = outStream;
-        this.outFile = null;
-        this.outType = OutputType.VCF_STREAM;
-        return this;
-    }
-
-    /**
-     * Set the output BCF stream for the next <code>VariantContextWriter</code> created by this builder.
-     * If buffered writing is desired, caller must provide some kind of buffered <code>OutputStream</code>.
-     *
-     * @param outStream the output stream to write to
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setOutputBCFStream(final OutputStream outStream) {
-        this.outStream = outStream;
-        this.outFile = null;
-        this.outType = OutputType.BCF_STREAM;
-        return this;
-    }
-
-    /**
-     * Set the output stream (VCF, by default) for the next <code>VariantContextWriter</code> created by this builder.
-     * If buffered writing is desired, caller must provide some kind of buffered <code>OutputStream</code>.
-     *
-     * @param outStream the output stream to write to
-     * @return this VariantContextWriterBuilder
-     */
-    public VariantContextWriterBuilder setOutputStream(final OutputStream outStream) {
-        return setOutputVCFStream(outStream);
-    }
-
-    /**
-     * Set an IndexCreator for the next <code>VariantContextWriter</code> created by this builder.
-     *
-     * @param idxCreator the <code>IndexCreator</code> to use
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setIndexCreator(final IndexCreator idxCreator) {
-        this.idxCreator = idxCreator;
-        return this;
-    }
-
-    /**
-     * Do not pass an <code>IndexCreator</code> to the next <code>VariantContextWriter</code> created by this builder.
-     *
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder clearIndexCreator() {
-        this.idxCreator = null;
-        return this;
-    }
-
-    /**
-     * Set a buffer size for the file output stream passed to the next <code>VariantContextWriter</code> created by this builder.
-     * Set to 0 for no buffering.
-     * Does not affect OutputStreams passed directly to <code>VariantContextWriterBuilder</code>.
-     *
-     * @param bufferSize the buffer size to use
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setBuffer(final int bufferSize) {
-        this.bufferSize = bufferSize;
-        return this;
-    }
-
-    /**
-     * Do not use buffering in the next <code>VariantContextWriter</code> created by this builder.
-     * Does not affect <code>OutputStream</code>s passed directly to <code>VariantContextWriterBuilder</code>.
-     *
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder unsetBuffering() {
-        this.bufferSize = 0;
-        return this;
-    }
-
-    /**
-     * Choose whether to also create an MD5 digest file for the next <code>VariantContextWriter</code> created by this builder.
-     *
-     * @param createMD5 boolean, <code>true</code> to create an MD5 digest
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setCreateMD5(final boolean createMD5) {
-        this.createMD5 = createMD5;
-        return this;
-    }
-
-    /**
-     * Create an MD5 digest file for the next <code>VariantContextWriter</code> created by this builder.
-     *
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setCreateMD5() {
-        return setCreateMD5(true);
-    }
-
-    /**
-     * Don't create an MD5 digest file for the next <code>VariantContextWriter</code> created by this builder.
-     *
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder unsetCreateMD5() {
-        return setCreateMD5(false);
-    }
-
-    /**
-     * Replace the set of <code>Options</code> for the <code>VariantContextWriterBuilder</code> with a new set.
-     *
-     * @param options the complete set of options to use
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setOptions(final EnumSet<Options> options) {
-        this.options = options;
-        return this;
-    }
-
-    /**
-     * Add one option to the set of <code>Options</code> for the <code>VariantContextWriterBuilder</code>, if it's not already present.
-     *
-     * @param option the option to set
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder setOption(final Options option) {
-        this.options.add(option);
-        return this;
-    }
-
-    /**
-     * Remove one option from the set of <code>Options</code> for the <code>VariantContextWriterBuilder</code>, if it's present.
-     *
-     * @param option the option to unset
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder unsetOption(final Options option) {
-        this.options.remove(option);
-        return this;
-    }
-
-    /**
-     * Set or unset option depending on the boolean given
-     * @param option the option to modify
-     * @param setIt true to set the option, false to unset it.
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public VariantContextWriterBuilder modifyOption(final Options option, final boolean setIt) {
-        return (setIt) ? this.setOption(option) : this.unsetOption(option);
-    }
-
-    /**
-     * Add one option to the set of default <code>Options</code> that will be used as the initial set of options
-     * for all VariantContextWriterBuilders created after this call.
-     *
-     * @param option the option to set
-     */
-    public static void setDefaultOption(final Options option) {
-        VariantContextWriterBuilder.DEFAULT_OPTIONS.add(option);
-    }
-
-    /**
-     * Remove an option from the set of default <code>Options</code> that will be used as the initial set of options
-     * for all VariantContextWriterBuilders created after this call.
-     *
-     * @param option the option to unset
-     * @return this <code>VariantContextWriterBuilder</code>
-     */
-    public static void unsetDefaultOption(final Options option) {
-        VariantContextWriterBuilder.DEFAULT_OPTIONS.remove(option);
-    }
-
-    /**
-     * Remove all options from the set of <code>Options</code> for the <code>VariantContextWriterBuilder</code>.
-     *
-     * @return this VariantContextWriterBuilder
-     */
-    public VariantContextWriterBuilder clearOptions() {
-        this.options = NO_OPTIONS.clone();
-        return this;
-    }
-
-    /**
-     * Used for testing; tests if the option is set
-     * @param option the option to test
-     * @return true if the option is set, false otherwise.
-     */
-    boolean isOptionSet(final Options option) {
-        return this.options.contains(option);
-    }
-
-    /**
-     * Validate and build the <code>VariantContextWriter</code>.
-     *
-     * @return the <code>VariantContextWriter</code> as specified by previous method calls
-     * @throws RuntimeIOException if the writer is configured to write to a file, and the corresponding path does not exist.
-     * @throws IllegalArgumentException if no output file or stream is specified.
-     * @throws IllegalArgumentException if <code>Options.INDEX_ON_THE_FLY</code> is specified and no reference dictionary is provided.
-     * @throws IllegalArgumentException if <code>Options.INDEX_ON_THE_FLY</code> is specified and a stream output is specified.
-     */
-    public VariantContextWriter build() {
-        VariantContextWriter writer = null;
-
-        // don't allow FORCE_BCF to modify the outType state
-        OutputType typeToBuild = this.outType;
-
-        if (this.options.contains(Options.FORCE_BCF)) {
-            if (FILE_TYPES.contains(this.outType))
-                typeToBuild = OutputType.BCF;
-            else if (STREAM_TYPES.contains(this.outType))
-                typeToBuild = OutputType.BCF_STREAM;
-        }
-
-        OutputStream outStreamFromFile = this.outStream;
-        if (FILE_TYPES.contains(this.outType)) {
-            try {
-                outStreamFromFile = IOUtil.maybeBufferOutputStream(new FileOutputStream(outFile), bufferSize);
-            } catch (final FileNotFoundException e) {
-                throw new RuntimeIOException("File not found: " + outFile, e);
-            }
-
-            if (createMD5)
-                outStreamFromFile = new Md5CalculatingOutputStream(outStreamFromFile, new File(outFile.getAbsolutePath() + ".md5"));
-        }
-
-        switch (typeToBuild) {
-            case UNSPECIFIED:
-                throw new IllegalArgumentException("Must specify file or stream output type.");
-            case VCF:
-                if ((refDict == null) && (options.contains(Options.INDEX_ON_THE_FLY)))
-                    throw new IllegalArgumentException("A reference dictionary is required for creating Tribble indices on the fly");
-
-                writer = createVCFWriter(outFile, outStreamFromFile);
-                break;
-            case BLOCK_COMPRESSED_VCF:
-                if (refDict == null)
-                    idxCreator = new TabixIndexCreator(TabixFormat.VCF);
-                else
-                    idxCreator = new TabixIndexCreator(refDict, TabixFormat.VCF);
-
-                writer = createVCFWriter(outFile, new BlockCompressedOutputStream(outStreamFromFile, outFile));
-                break;
-            case BCF:
-                if ((refDict == null) && (options.contains(Options.INDEX_ON_THE_FLY)))
-                    throw new IllegalArgumentException("A reference dictionary is required for creating Tribble indices on the fly");
-
-                writer = createBCFWriter(outFile, outStreamFromFile);
-                break;
-            case VCF_STREAM:
-                if (options.contains(Options.INDEX_ON_THE_FLY))
-                    throw new IllegalArgumentException("VCF index creation not supported for stream output.");
-
-                writer = createVCFWriter(null, outStream);
-                break;
-            case BCF_STREAM:
-                if (options.contains(Options.INDEX_ON_THE_FLY))
-                    throw new IllegalArgumentException("BCF index creation not supported for stream output.");
-
-                writer = createBCFWriter(null, outStream);
-                break;
-        }
-
-        if (this.options.contains(Options.USE_ASYNC_IO))
-            writer = new AsyncVariantContextWriter(writer, AsyncVariantContextWriter.DEFAULT_QUEUE_SIZE);
-
-        return writer;
-     }
-
-    private void determineOutputTypeFromFilename() {
-        if (isBCF(this.outFile)) {
-            this.outType = OutputType.BCF;
-        } else if (isCompressedVCF(this.outFile)) {
-            this.outType = OutputType.BLOCK_COMPRESSED_VCF;
-        } else if (isVCF(this.outFile)) {
-            this.outType = OutputType.VCF;
-        }
-        else {
-            this.outType = OutputType.UNSPECIFIED;
-        }
-    }
-
-    private boolean isVCF(final File outFile) {
-        return outFile != null && outFile.getName().endsWith(".vcf");
-    }
-
-    private boolean isBCF(final File outFile) {
-        return outFile != null && outFile.getName().endsWith(".bcf");
-    }
-
-    private boolean isCompressedVCF(final File outFile) {
-        if (outFile == null)
-            return false;
-
-        return AbstractFeatureReader.hasBlockCompressedExtension(outFile);
-    }
-
-    private VariantContextWriter createVCFWriter(final File writerFile, final OutputStream writerStream) {
-        if (idxCreator == null) {
-            return new VCFWriter(writerFile, writerStream, refDict,
-                    options.contains(Options.INDEX_ON_THE_FLY),
-                    options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                    options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                    options.contains(Options.WRITE_FULL_FORMAT_FIELD));
-        }
-        else {
-            return new VCFWriter(writerFile, writerStream, refDict, idxCreator,
-                    options.contains(Options.INDEX_ON_THE_FLY),
-                    options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                    options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                    options.contains(Options.WRITE_FULL_FORMAT_FIELD));
-        }
-    }
-
-    private VariantContextWriter createBCFWriter(final File writerFile, final OutputStream writerStream) {
-        if (idxCreator == null) {
-            return new BCF2Writer(writerFile, writerStream, refDict,
-                    options.contains(Options.INDEX_ON_THE_FLY),
-                    options.contains(Options.DO_NOT_WRITE_GENOTYPES));
-        }
-        else {
-            return new BCF2Writer(writerFile, writerStream, refDict, idxCreator,
-                    options.contains(Options.INDEX_ON_THE_FLY),
-                    options.contains(Options.DO_NOT_WRITE_GENOTYPES));
-        }
-    }
-}
diff --git a/src/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java b/src/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java
deleted file mode 100644
index 6f3511d..0000000
--- a/src/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext.writer;
-
-import htsjdk.samtools.Defaults;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.index.IndexCreator;
-import htsjdk.tribble.index.tabix.TabixFormat;
-import htsjdk.tribble.index.tabix.TabixIndexCreator;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.OutputStream;
-import java.util.EnumSet;
-
-/**
- * Factory methods to create VariantContext writers
- *
- * @author depristo
- * @since 5/12
- *
- * @deprecated Replaced by {@link VariantContextWriterBuilder}
- */
- at Deprecated
-public class VariantContextWriterFactory {
-
-    public static final EnumSet<Options> DEFAULT_OPTIONS = EnumSet.of(Options.INDEX_ON_THE_FLY);
-    public static final EnumSet<Options> NO_OPTIONS = EnumSet.noneOf(Options.class);
-
-    static {
-        if (Defaults.USE_ASYNC_IO_FOR_TRIBBLE) {
-            DEFAULT_OPTIONS.add(Options.USE_ASYNC_IO);
-        }
-    }
-
-    private VariantContextWriterFactory() {}
-
-    public static VariantContextWriter create(final File location, final SAMSequenceDictionary refDict) {
-        return create(location, openOutputStream(location), refDict, DEFAULT_OPTIONS);
-    }
-
-    public static VariantContextWriter create(final File location, final SAMSequenceDictionary refDict, final EnumSet<Options> options) {
-        return create(location, openOutputStream(location), refDict, options);
-    }
-
-    /**
-     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter create(final File location,
-                                              final OutputStream output,
-                                              final SAMSequenceDictionary refDict) {
-        return create(location, output, refDict, DEFAULT_OPTIONS);
-    }
-
-    /**
-     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter create(final OutputStream output,
-                                              final SAMSequenceDictionary refDict,
-                                              final EnumSet<Options> options) {
-        return create(null, output, refDict, options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
-     *                 but does not control where the file is written
-     * @param output This is where the BCF is actually written. If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createBcf2(final File location,
-                                                  final OutputStream output,
-                                                  final SAMSequenceDictionary refDict,
-                                                  final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new BCF2Writer(location, output, refDict,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
-     *                 but does not control where the file is written
-     * @param output This is where the BCF is actually written.  If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createBcf2(final File location,
-                                                  final OutputStream output,
-                                                  final SAMSequenceDictionary refDict,
-                                                  final IndexCreator indexCreator,
-                                                  final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new BCF2Writer(location, output, refDict, indexCreator,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
-     *                 but does not control where the file is written
-     * @param output This is where the VCF is actually written. If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createVcf(final File location,
-                                                 final OutputStream output,
-                                                 final SAMSequenceDictionary refDict,
-                                                 final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new VCFWriter(location, output, refDict,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
-     *                 but does not control where the file is written
-     * @param output This is where the VCF is actually written.  If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createVcf(final File location,
-                                                 final OutputStream output,
-                                                 final SAMSequenceDictionary refDict,
-                                                 final IndexCreator indexCreator,
-                                                 final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new VCFWriter(location, output, refDict, indexCreator,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages,
-     *                 but does not control where the file is written
-     * @param output This is where the VCF is actually written.  If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createBlockCompressedVcf(final File location,
-                                                                final OutputStream output,
-                                                                final SAMSequenceDictionary refDict,
-                                                                final EnumSet<Options> options) {
-        final TabixIndexCreator indexCreator;
-        if (options.contains(Options.INDEX_ON_THE_FLY)) {
-            indexCreator = new TabixIndexCreator(refDict, TabixFormat.VCF);
-        } else {
-            indexCreator = null;
-        }
-        return maybeWrapWithAsyncWriter(new VCFWriter(location, BlockCompressedOutputStream.maybeBgzfWrapOutputStream(location, output),
-                refDict, indexCreator,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages,
-     *                 but does not control where the file is written
-     * @param output This is where the VCF is actually written. If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createBlockCompressedVcf(final File location,
-                                                                final OutputStream output,
-                                                                final SAMSequenceDictionary refDict,
-                                                                final IndexCreator indexCreator,
-                                                                final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new VCFWriter(location, BlockCompressedOutputStream.maybeBgzfWrapOutputStream(location, output),
-                refDict, indexCreator,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
-    }
-
-    public static VariantContextWriter create(final File location,
-        final OutputStream output,
-        final SAMSequenceDictionary refDict,
-        final EnumSet<Options> options) {
-
-        if (isBCFOutput(location, options)) {
-            return createBcf2(location, output, refDict, options);
-        } else if (isCompressedVcf(location)) {
-            return createBlockCompressedVcf(location, output, refDict, options);
-        } else {
-            return createVcf(location, output, refDict, options);
-        }
-    }
-
-    /**
-     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter create(final File location,
-                                              final OutputStream output,
-                                              final SAMSequenceDictionary refDict,
-                                              final IndexCreator indexCreator,
-                                              final EnumSet<Options> options) {
-
-        if (isBCFOutput(location, options)) {
-            return createBcf2(location, output, refDict, indexCreator, options);
-        } else if (isCompressedVcf(location)) {
-            return createBlockCompressedVcf(location, output, refDict, indexCreator, options);
-        } else {
-            return createVcf(location, output, refDict, indexCreator, options);
-        }
-    }
-
-    private static VariantContextWriter maybeWrapWithAsyncWriter(final VariantContextWriter writer,
-                                                                 final EnumSet<Options> options) {
-        if (options.contains(Options.USE_ASYNC_IO)) {
-            return new AsyncVariantContextWriter(writer, AsyncVariantContextWriter.DEFAULT_QUEUE_SIZE);
-        }
-        else return writer;
-    }
-
-    /**
-     * Should we output a BCF file based solely on the name of the file at location?
-     *
-     * @param location
-     * @return
-     */
-    public static boolean isBCFOutput(final File location) {
-        return isBCFOutput(location, EnumSet.noneOf(Options.class));
-    }
-
-    public static boolean isBCFOutput(final File location, final EnumSet<Options> options) {
-        return options.contains(Options.FORCE_BCF) || (location != null && location.getName().contains(".bcf"));
-    }
-
-    public static boolean isCompressedVcf(final File location) {
-        if (location == null)
-            return false;
-
-        return AbstractFeatureReader.hasBlockCompressedExtension(location);
-    }
-
-    public static VariantContextWriter sortOnTheFly(final VariantContextWriter innerWriter, final int maxCachingStartDistance) {
-        return sortOnTheFly(innerWriter, maxCachingStartDistance, false);
-    }
-
-    public static VariantContextWriter sortOnTheFly(final VariantContextWriter innerWriter, final int maxCachingStartDistance, final boolean takeOwnershipOfInner) {
-        return new SortingVariantContextWriter(innerWriter, maxCachingStartDistance, takeOwnershipOfInner);
-    }
-
-    /**
-     * Returns a output stream writing to location, or throws an exception if this fails
-     * @param location
-     * @return
-     */
-    protected static OutputStream openOutputStream(final File location) {
-        try {
-            return IOUtil.maybeBufferOutputStream(new FileOutputStream(location));
-        } catch (final FileNotFoundException e) {
-            throw new RuntimeIOException(location + ": Unable to create VCF writer", e);
-        }
-    }
-}
diff --git a/src/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java b/src/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java
deleted file mode 100644
index 8eace55..0000000
--- a/src/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.vcf;
-
-import htsjdk.tribble.TribbleException;
-
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * A class for translating between vcf header versions
- */
-public class VCFHeaderLineTranslator {
-    private static Map<VCFHeaderVersion,VCFLineParser> mapping;
-
-    static {
-        mapping = new HashMap<VCFHeaderVersion,VCFLineParser>();
-        mapping.put(VCFHeaderVersion.VCF4_0,new VCF4Parser());
-        mapping.put(VCFHeaderVersion.VCF4_1,new VCF4Parser());
-        mapping.put(VCFHeaderVersion.VCF4_2,new VCF4Parser());
-        mapping.put(VCFHeaderVersion.VCF3_3,new VCF3Parser());
-        mapping.put(VCFHeaderVersion.VCF3_2,new VCF3Parser());
-    }
-
-    public static Map<String,String> parseLine(VCFHeaderVersion version, String valueLine, List<String> expectedTagOrder) {
-        return mapping.get(version).parseLine(valueLine,expectedTagOrder);
-    }
-}
-
-
-interface VCFLineParser {
-    public Map<String,String> parseLine(String valueLine, List<String> expectedTagOrder);
-}
-
-
-/**
- * a class that handles the to and from disk for VCF 4 lines
- */
-class VCF4Parser implements VCFLineParser {
-    /**
-     * parse a VCF4 line
-     * @param valueLine the line
-     * @return a mapping of the tags parsed out
-     */
-    public Map<String, String> parseLine(String valueLine, List<String> expectedTagOrder) {
-        // our return map
-        Map<String, String> ret = new LinkedHashMap<String, String>();
-
-        // a builder to store up characters as we go
-        StringBuilder builder = new StringBuilder();
-
-        // store the key when we're parsing out the values
-        String key = "";
-
-        // where are we in the stream of characters?
-        int index = 0;
-
-        // are we inside a quotation? we don't special case ',' then
-        boolean inQuote = false;
-
-        // a little switch machine to parse out the tags. Regex ended up being really complicated and ugly [yes, but this machine is getting ugly now... MAD]
-        for (char c: valueLine.toCharArray()) {
-            if ( c == '\"' ) {
-                inQuote = ! inQuote;
-            } else if ( inQuote ) {
-                builder.append(c);
-            } else {
-                switch (c) {
-                    case ('<') : if (index == 0) break; // if we see a open bracket at the beginning, ignore it
-                    case ('>') : if (index == valueLine.length()-1) ret.put(key,builder.toString().trim()); break; // if we see a close bracket, and we're at the end, add an entry to our list
-                    case ('=') : key = builder.toString().trim(); builder = new StringBuilder(); break; // at an equals, copy the key and reset the builder
-                    case (',') : ret.put(key,builder.toString().trim()); builder = new StringBuilder(); break; // drop the current key value to the return map
-                    default: builder.append(c); // otherwise simply append to the current string
-                }
-            }
-            
-            index++;
-        }
-
-        // validate the tags against the expected list
-        index = 0;
-        if ( expectedTagOrder != null ) {
-            if ( ret.size() > expectedTagOrder.size() )
-                throw new TribbleException.InvalidHeader("unexpected tag count " + ret.size() + " in line " + valueLine);
-            for ( String str : ret.keySet() ) {
-                if ( !expectedTagOrder.get(index).equals(str) )
-                    throw new TribbleException.InvalidHeader("Unexpected tag " + str + " in line " + valueLine);
-                index++;
-            }
-        }
-        return ret;
-    }
-}
-
-class VCF3Parser implements VCFLineParser {
-
-    public Map<String, String> parseLine(String valueLine, List<String> expectedTagOrder) {
-        // our return map
-        Map<String, String> ret = new LinkedHashMap<String, String>();
-
-        // a builder to store up characters as we go
-        StringBuilder builder = new StringBuilder();
-
-        // where are we in the stream of characters?
-        int index = 0;
-        // where in the expected tag order are we?
-        int tagIndex = 0;
-
-        // are we inside a quotation? we don't special case ',' then
-        boolean inQuote = false;
-
-        // a little switch machine to parse out the tags. Regex ended up being really complicated and ugly
-        for (char c: valueLine.toCharArray()) {
-            switch (c) {
-                case ('\"') : inQuote = !inQuote; break; // a quote means we ignore ',' in our strings, keep track of it
-                case (',') : if (!inQuote) { ret.put(expectedTagOrder.get(tagIndex++),builder.toString()); builder = new StringBuilder(); break; } // drop the current key value to the return map
-                default: builder.append(c); // otherwise simply append to the current string
-            }
-            index++;
-        }
-        ret.put(expectedTagOrder.get(tagIndex++),builder.toString());
-        
-        // validate the tags against the expected list
-        index = 0;
-        if (tagIndex != expectedTagOrder.size()) throw new IllegalArgumentException("Unexpected tag count " + tagIndex + ", we expected " + expectedTagOrder.size());
-        for (String str : ret.keySet()){
-            if (!expectedTagOrder.get(index).equals(str)) throw new IllegalArgumentException("Unexpected tag " + str + " in string " + valueLine);
-            index++;
-        }
-        return ret;
-    }
-}
\ No newline at end of file
diff --git a/src/java/htsjdk/samtools/AbstractBAMFileIndex.java b/src/main/java/htsjdk/samtools/AbstractBAMFileIndex.java
similarity index 100%
rename from src/java/htsjdk/samtools/AbstractBAMFileIndex.java
rename to src/main/java/htsjdk/samtools/AbstractBAMFileIndex.java
diff --git a/src/java/htsjdk/samtools/AbstractSAMHeaderRecord.java b/src/main/java/htsjdk/samtools/AbstractSAMHeaderRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/AbstractSAMHeaderRecord.java
rename to src/main/java/htsjdk/samtools/AbstractSAMHeaderRecord.java
diff --git a/src/java/htsjdk/samtools/AlignmentBlock.java b/src/main/java/htsjdk/samtools/AlignmentBlock.java
similarity index 100%
rename from src/java/htsjdk/samtools/AlignmentBlock.java
rename to src/main/java/htsjdk/samtools/AlignmentBlock.java
diff --git a/src/java/htsjdk/samtools/AsyncSAMFileWriter.java b/src/main/java/htsjdk/samtools/AsyncSAMFileWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/AsyncSAMFileWriter.java
rename to src/main/java/htsjdk/samtools/AsyncSAMFileWriter.java
diff --git a/src/java/htsjdk/samtools/BAMFileConstants.java b/src/main/java/htsjdk/samtools/BAMFileConstants.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMFileConstants.java
rename to src/main/java/htsjdk/samtools/BAMFileConstants.java
diff --git a/src/main/java/htsjdk/samtools/BAMFileReader.java b/src/main/java/htsjdk/samtools/BAMFileReader.java
new file mode 100644
index 0000000..98bb74f
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/BAMFileReader.java
@@ -0,0 +1,913 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.BinaryCodec;
+import htsjdk.samtools.util.BlockCompressedInputStream;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CoordMath;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.samtools.util.StringLineReader;
+
+import java.io.DataInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * Class for reading and querying BAM files.
+ */
+class BAMFileReader extends SamReader.ReaderImplementation {
+    // True if reading from a File rather than an InputStream
+    private boolean mIsSeekable = false;
+
+    // For converting bytes into other primitive types
+    private BinaryCodec mStream = null;
+
+    // Underlying compressed data stream.
+    private final BlockCompressedInputStream mCompressedInputStream;
+    private SAMFileHeader mFileHeader = null;
+
+    // One of these is populated if the file is seekable and an index exists
+    private File mIndexFile = null;
+    private SeekableStream mIndexStream = null;
+
+    private BAMIndex mIndex = null;
+    private long mFirstRecordPointer = 0;
+    // If non-null, there is an unclosed iterator extant.
+    private CloseableIterator<SAMRecord> mCurrentIterator = null;
+
+    // If true, all SAMRecords are fully decoded as they are read.
+    private boolean eagerDecode;
+
+    // If true, the BAMFileReader will use asynchronous IO.
+    // Note: this field currently has no effect (is not hooked up anywhere), but will be in the future. See https://github.com/samtools/htsjdk/pull/576
+    private final boolean useAsynchronousIO;
+
+    // For error-checking.
+    private ValidationStringency mValidationStringency;
+
+    // For creating BAMRecords
+    private SAMRecordFactory samRecordFactory;
+
+    /**
+     * Use the caching index reader implementation rather than the disk-hit-per-file model.
+     */
+    private boolean mEnableIndexCaching = false;
+
+    /**
+     * Use the traditional memory-mapped implementation for BAM file indexes rather than regular I/O.
+     */
+    private boolean mEnableIndexMemoryMapping = true;
+
+    /**
+     * Add information about the origin (reader and position) to SAM records.
+     */
+    private SamReader mReader = null;
+
+    /**
+     * Prepare to read BAM from a stream (not seekable)
+     * @param stream source of bytes.
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     */
+    BAMFileReader(final InputStream stream,
+                  final File indexFile,
+                  final boolean eagerDecode,
+                  final boolean useAsynchronousIO,
+                  final ValidationStringency validationStringency,
+                  final SAMRecordFactory factory)
+        throws IOException {
+        mIndexFile = indexFile;
+        mIsSeekable = false;
+        this.useAsynchronousIO = useAsynchronousIO;
+        mCompressedInputStream = new BlockCompressedInputStream(stream);
+        mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
+        this.eagerDecode = eagerDecode;
+        this.mValidationStringency = validationStringency;
+        this.samRecordFactory = factory;
+        this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, null);
+    }
+
+    /**
+     * Prepare to read BAM from a file (seekable)
+     * @param file source of bytes.
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     */
+    BAMFileReader(final File file,
+                  final File indexFile,
+                  final boolean eagerDecode,
+                  final boolean useAsynchronousIO,
+                  final ValidationStringency validationStringency,
+                  final SAMRecordFactory factory)
+        throws IOException {
+        this(new BlockCompressedInputStream(file), indexFile!=null ? indexFile : SamFiles.findIndex(file), eagerDecode, useAsynchronousIO, file.getAbsolutePath(), validationStringency, factory);
+        if (mIndexFile != null && mIndexFile.lastModified() < file.lastModified()) {
+            System.err.println("WARNING: BAM index file " + mIndexFile.getAbsolutePath() +
+                    " is older than BAM " + file.getAbsolutePath());
+        }
+        // Provide better error message when there is an error reading.
+        mStream.setInputFileName(file.getAbsolutePath());
+    }
+
+    BAMFileReader(final SeekableStream strm,
+                  final File indexFile,
+                  final boolean eagerDecode,
+                  final boolean useAsynchronousIO,
+                  final ValidationStringency validationStringency,
+                  final SAMRecordFactory factory)
+        throws IOException {
+        this(new BlockCompressedInputStream(strm), indexFile, eagerDecode, useAsynchronousIO, strm.getSource(), validationStringency, factory);
+    }
+
+    BAMFileReader(final SeekableStream strm,
+                  final SeekableStream indexStream,
+                  final boolean eagerDecode,
+                  final boolean useAsynchronousIO,
+                  final ValidationStringency validationStringency,
+                  final SAMRecordFactory factory)
+        throws IOException {
+        this(new BlockCompressedInputStream(strm), indexStream, eagerDecode, useAsynchronousIO, strm.getSource(), validationStringency, factory);
+    }
+
+    private BAMFileReader(final BlockCompressedInputStream compressedInputStream,
+                          final File indexFile,
+                          final boolean eagerDecode,
+                          final boolean useAsynchronousIO,
+                          final String source,
+                          final ValidationStringency validationStringency,
+                          final SAMRecordFactory factory)
+        throws IOException {
+        mIndexFile = indexFile;
+        mIsSeekable = true;
+        mCompressedInputStream = compressedInputStream;
+        mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
+        this.eagerDecode = eagerDecode;
+        this.useAsynchronousIO = useAsynchronousIO;
+        this.mValidationStringency = validationStringency;
+        this.samRecordFactory = factory;
+        this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, source);
+        mFirstRecordPointer = mCompressedInputStream.getFilePointer();
+    }    
+
+    private BAMFileReader(final BlockCompressedInputStream compressedInputStream,
+                          final SeekableStream indexStream,
+                          final boolean eagerDecode,
+                          final boolean useAsynchronousIO,
+                          final String source,
+                          final ValidationStringency validationStringency,
+                          final SAMRecordFactory factory)
+        throws IOException {
+        mIndexStream = indexStream;
+        mIsSeekable = true;
+        mCompressedInputStream = compressedInputStream;
+        mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
+        this.eagerDecode = eagerDecode;
+        this.useAsynchronousIO = useAsynchronousIO;
+        this.mValidationStringency = validationStringency;
+        this.samRecordFactory = factory;
+        this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, source);
+        mFirstRecordPointer = mCompressedInputStream.getFilePointer();
+    }
+
+    /** Reads through the header and sequence records to find the virtual file offset of the first record in the BAM file. */
+    static long findVirtualOffsetOfFirstRecord(final File bam) throws IOException {
+        final BAMFileReader reader = new BAMFileReader(bam, null, false, false, ValidationStringency.SILENT, new DefaultSAMRecordFactory());
+        final long offset = reader.mFirstRecordPointer;
+        reader.close();
+        return offset;
+    }
+
+    /**
+     * If true, writes the source of every read into the source SAMRecords.
+     * @param enabled true to write source information into each SAMRecord.
+     */
+    void enableFileSource(final SamReader reader, final boolean enabled) {
+        this.mReader = enabled ? reader : null;
+    }
+
+    /**
+     * If true, uses the caching version of the index reader.
+     * @param enabled true to use the caching version of the reader.
+     */
+    protected void enableIndexCaching(final boolean enabled) {
+        if(mIndex != null)
+            throw new SAMException("Unable to turn on index caching; index file has already been loaded.");
+        this.mEnableIndexCaching = enabled;
+    }
+
+    /**
+     * If false, disable the use of memory mapping for accessing index files (default behavior is to use memory mapping).
+     * This is slower but more scalable when accessing large numbers of BAM files sequentially.
+     * @param enabled True to use memory mapping, false to use regular I/O.
+     */
+    protected void enableIndexMemoryMapping(final boolean enabled) {
+        if (mIndex != null) {
+            throw new SAMException("Unable to change index memory mapping; index file has already been loaded.");
+        }
+        this.mEnableIndexMemoryMapping = enabled;
+    }
+
+    @Override void enableCrcChecking(final boolean enabled) {
+        this.mCompressedInputStream.setCheckCrcs(enabled);
+    }
+
+    @Override void setSAMRecordFactory(final SAMRecordFactory factory) { this.samRecordFactory = factory; }
+
+    @Override
+    public SamReader.Type type() {
+        return SamReader.Type.BAM_TYPE;
+    }
+
+    /**
+     * @return true if ths is a BAM file, and has an index
+     */
+    public boolean hasIndex() {
+        return mIsSeekable && ((mIndexFile != null) || (mIndexStream != null));
+    }
+
+    /**
+     * Retrieves the index for the given file type.  Ensure that the index is of the specified type.
+     * @return An index of the given type.
+     */
+    public BAMIndex getIndex() {
+        if(!hasIndex())
+            throw new SAMException("No index is available for this BAM file.");
+        if(mIndex == null) {
+            if (mIndexFile != null)
+                mIndex = mEnableIndexCaching ? new CachingBAMFileIndex(mIndexFile, getFileHeader().getSequenceDictionary(), mEnableIndexMemoryMapping)
+                                             : new DiskBasedBAMFileIndex(mIndexFile, getFileHeader().getSequenceDictionary(), mEnableIndexMemoryMapping);
+            else
+                mIndex = mEnableIndexCaching ? new CachingBAMFileIndex(mIndexStream, getFileHeader().getSequenceDictionary())
+                                             : new DiskBasedBAMFileIndex(mIndexStream, getFileHeader().getSequenceDictionary());
+        }
+        return mIndex;
+    }
+
+    public void setEagerDecode(final boolean desired) { this.eagerDecode = desired; }
+
+    @Override
+    public void close() {
+        if (mCompressedInputStream != null) {
+            try {
+                mCompressedInputStream.close();
+            } catch (IOException e) {
+                throw new RuntimeIOException("Exception closing compressed input stream.", e);
+            }
+        }
+        if (mStream != null) {
+            mStream.close();
+        }
+        if (mIndex != null) {
+            mIndex.close();
+        }
+        mStream = null;
+        mFileHeader = null;
+        mIndex = null;
+    }
+
+    public SAMFileHeader getFileHeader() {
+        return mFileHeader;
+    }
+
+    /**
+     * Set error-checking level for subsequent SAMRecord reads.
+     */
+    void setValidationStringency(final ValidationStringency validationStringency) {
+        this.mValidationStringency = validationStringency;
+    }
+
+    public ValidationStringency getValidationStringency() {
+        return this.mValidationStringency;
+    }
+
+    /**
+     * Prepare to iterate through the SAMRecords in file order.
+     * Only a single iterator on a BAM file can be extant at a time.  If getIterator() or a query method has been called once,
+     * that iterator must be closed before getIterator() can be called again.
+     * A somewhat peculiar aspect of this method is that if the file is not seekable, a second call to
+     * getIterator() begins its iteration where the last one left off.  That is the best that can be
+     * done in that situation.
+     */
+    public CloseableIterator<SAMRecord> getIterator() {
+        if (mStream == null) {
+            throw new IllegalStateException("File reader is closed");
+        }
+        if (mCurrentIterator != null) {
+            throw new IllegalStateException("Iteration in progress");
+        }
+        if (mIsSeekable) {
+            try {
+                mCompressedInputStream.seek(mFirstRecordPointer);
+            } catch (final IOException exc) {
+                throw new RuntimeIOException(exc.getMessage(), exc);
+            }
+        }
+        mCurrentIterator = new BAMFileIterator();
+        return mCurrentIterator;
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> getIterator(final SAMFileSpan chunks) {
+        if (mStream == null) {
+            throw new IllegalStateException("File reader is closed");
+        }
+        if (mCurrentIterator != null) {
+            throw new IllegalStateException("Iteration in progress");
+        }
+        if (!(chunks instanceof BAMFileSpan)) {
+            throw new IllegalStateException("BAMFileReader cannot handle this type of file span.");
+        }
+
+        // Create an iterator over the given chunk boundaries.
+        mCurrentIterator = new BAMFileIndexIterator(((BAMFileSpan)chunks).toCoordinateArray());
+        return mCurrentIterator;
+    }
+
+    /**
+     * Gets an unbounded pointer to the first record in the BAM file.  Because the reader doesn't necessarily know
+     * when the file ends, the rightmost bound of the file pointer will not end exactly where the file ends.  However,
+     * the rightmost bound is guaranteed to be after the last read in the file.
+     * @return An unbounded pointer to the first record in the BAM file.
+     */
+    @Override
+    public SAMFileSpan getFilePointerSpanningReads() {
+        return new BAMFileSpan(new Chunk(mFirstRecordPointer,Long.MAX_VALUE));
+    }
+
+    /**
+     * Prepare to iterate through the SAMRecords that match the given interval.
+     * Only a single iterator on a BAMFile can be extant at a time.  The previous one must be closed
+     * before calling any of the methods that return an iterator.
+     *
+     * Note that an unmapped SAMRecord may still have a reference name and an alignment start for sorting
+     * purposes (typically this is the coordinate of its mate), and will be found by this method if the coordinate
+     * matches the specified interval.
+     *
+     * Note that this method is not necessarily efficient in terms of disk I/O.  The index does not have perfect
+     * resolution, so some SAMRecords may be read and then discarded because they do not match the specified interval.
+     *
+     * @param sequence Reference sequence sought.
+     * @param start Desired SAMRecords must overlap or be contained in the interval specified by start and end.
+     * A value of zero implies the start of the reference sequence.
+     * @param end A value of zero implies the end of the reference sequence.
+     * @param contained If true, the alignments for the SAMRecords must be completely contained in the interval
+     * specified by start and end.  If false, the SAMRecords need only overlap the interval.
+     * @return Iterator for the matching SAMRecords
+     */
+    CloseableIterator<SAMRecord> query(final String sequence, final int start, final int end, final boolean contained) {
+        if (mStream == null) {
+            throw new IllegalStateException("File reader is closed");
+        }
+        if (mCurrentIterator != null) {
+            throw new IllegalStateException("Iteration in progress");
+        }
+        if (!mIsSeekable) {
+            throw new UnsupportedOperationException("Cannot query stream-based BAM file");
+        }
+        final int referenceIndex = mFileHeader.getSequenceIndex(sequence);
+        if (referenceIndex == -1) {
+            mCurrentIterator = new EmptyBamIterator();
+        } else {
+            final QueryInterval[] queryIntervals = {new QueryInterval(referenceIndex, start, end)};
+            mCurrentIterator = createIndexIterator(queryIntervals, contained);
+        }
+        return mCurrentIterator;
+    }
+
+    /**
+     * Prepare to iterate through the SAMRecords that match any of the given intervals.
+     * Only a single iterator on a BAMFile can be extant at a time.  The previous one must be closed
+     * before calling any of the methods that return an iterator.
+     *
+     * Note that an unmapped SAMRecord may still have a reference name and an alignment start for sorting
+     * purposes (typically this is the coordinate of its mate), and will be found by this method if the coordinate
+     * matches the specified interval.
+     *
+     * Note that this method is not necessarily efficient in terms of disk I/O.  The index does not have perfect
+     * resolution, so some SAMRecords may be read and then discarded because they do not match the specified interval.
+     *
+     * @param intervals list of intervals to be queried.  Must be optimized.
+     * @param contained If true, the alignments for the SAMRecords must be completely contained in the interval
+     * specified by start and end.  If false, the SAMRecords need only overlap the interval.
+     * @return Iterator for the matching SAMRecords
+     * @see QueryInterval#optimizeIntervals(QueryInterval[])
+     */
+    public CloseableIterator<SAMRecord> query(final QueryInterval[] intervals, final boolean contained) {
+        if (mStream == null) {
+            throw new IllegalStateException("File reader is closed");
+        }
+        if (mCurrentIterator != null) {
+            throw new IllegalStateException("Iteration in progress");
+        }
+        if (!mIsSeekable) {
+            throw new UnsupportedOperationException("Cannot query stream-based BAM file");
+        }
+        mCurrentIterator = createIndexIterator(intervals, contained);
+        return mCurrentIterator;
+    }
+
+    /**
+     * Prepare to iterate through the SAMRecords with the given alignment start.
+     * Only a single iterator on a BAMFile can be extant at a time.  The previous one must be closed
+     * before calling any of the methods that return an iterator.
+     *
+     * Note that an unmapped SAMRecord may still have a reference name and an alignment start for sorting
+     * purposes (typically this is the coordinate of its mate), and will be found by this method if the coordinate
+     * matches the specified interval.
+     *
+     * Note that this method is not necessarily efficient in terms of disk I/O.  The index does not have perfect
+     * resolution, so some SAMRecords may be read and then discarded because they do not match the specified interval.
+     *
+     * @param sequence Reference sequence sought.
+     * @param start Alignment start sought.
+     * @return Iterator for the matching SAMRecords.
+     */
+    public CloseableIterator<SAMRecord> queryAlignmentStart(final String sequence, final int start) {
+        if (mStream == null) {
+            throw new IllegalStateException("File reader is closed");
+        }
+        if (mCurrentIterator != null) {
+            throw new IllegalStateException("Iteration in progress");
+        }
+        if (!mIsSeekable) {
+            throw new UnsupportedOperationException("Cannot query stream-based BAM file");
+        }
+        final int referenceIndex = mFileHeader.getSequenceIndex(sequence);
+        if (referenceIndex == -1) {
+            mCurrentIterator = new EmptyBamIterator();
+        } else {
+            mCurrentIterator = createStartingAtIndexIterator(referenceIndex, start);
+        }
+        return mCurrentIterator;
+    }
+
+    /**
+     * Prepare to iterate through the SAMRecords that are unmapped and do not have a reference name or alignment start.
+     * Only a single iterator on a BAMFile can be extant at a time.  The previous one must be closed
+     * before calling any of the methods that return an iterator.
+     *
+     * @return Iterator for the matching SAMRecords.
+     */
+    public CloseableIterator<SAMRecord> queryUnmapped() {
+        if (mStream == null) {
+            throw new IllegalStateException("File reader is closed");
+        }
+        if (mCurrentIterator != null) {
+            throw new IllegalStateException("Iteration in progress");
+        }
+        if (!mIsSeekable) {
+            throw new UnsupportedOperationException("Cannot query stream-based BAM file");
+        }
+        try {
+            final long startOfLastLinearBin = getIndex().getStartOfLastLinearBin();
+            if (startOfLastLinearBin != -1) {
+                mCompressedInputStream.seek(startOfLastLinearBin);
+            } else {
+                // No mapped reads in file, just start at the first read in file.
+                mCompressedInputStream.seek(mFirstRecordPointer);
+            }
+            mCurrentIterator = new BAMFileIndexUnmappedIterator();
+            return mCurrentIterator;
+        } catch (final IOException e) {
+            throw new RuntimeIOException("IOException seeking to unmapped reads", e);
+        }
+    }
+
+    /**
+     * Reads the header of a BAM file from a stream
+     * @param stream A BinaryCodec to read the header from
+     * @param validationStringency Determines how stringent to be when validating the sam
+     * @param source Note that this is used only for reporting errors.
+     */
+    protected static SAMFileHeader readHeader(final BinaryCodec stream, final ValidationStringency validationStringency, final String source)
+        throws IOException {
+
+        final byte[] buffer = new byte[4];
+        stream.readBytes(buffer);
+        if (!Arrays.equals(buffer, BAMFileConstants.BAM_MAGIC)) {
+            throw new IOException("Invalid BAM file header");
+        }
+
+        final int headerTextLength = stream.readInt();
+        final String textHeader = stream.readString(headerTextLength);
+        final SAMTextHeaderCodec headerCodec = new SAMTextHeaderCodec();
+        headerCodec.setValidationStringency(validationStringency);
+        final SAMFileHeader samFileHeader = headerCodec.decode(new StringLineReader(textHeader),
+                source);
+
+        final int sequenceCount = stream.readInt();
+        if (!samFileHeader.getSequenceDictionary().isEmpty()) {
+            // It is allowed to have binary sequences but no text sequences, so only validate if both are present
+            if (sequenceCount != samFileHeader.getSequenceDictionary().size()) {
+                throw new SAMFormatException("Number of sequences in text header (" +
+                        samFileHeader.getSequenceDictionary().size() +
+                        ") != number of sequences in binary header (" + sequenceCount + ") for file " + source);
+            }
+            for (int i = 0; i < sequenceCount; i++) {
+                final SAMSequenceRecord binarySequenceRecord = readSequenceRecord(stream, source);
+                final SAMSequenceRecord sequenceRecord = samFileHeader.getSequence(i);
+                if (!sequenceRecord.getSequenceName().equals(binarySequenceRecord.getSequenceName())) {
+                    throw new SAMFormatException("For sequence " + i + ", text and binary have different names in file " +
+                            source);
+                }
+                if (sequenceRecord.getSequenceLength() != binarySequenceRecord.getSequenceLength()) {
+                    throw new SAMFormatException("For sequence " + i + ", text and binary have different lengths in file " +
+                            source);
+                }
+            }
+        } else {
+            // If only binary sequences are present, copy them into samFileHeader
+            final List<SAMSequenceRecord> sequences = new ArrayList<SAMSequenceRecord>(sequenceCount);
+            for (int i = 0; i < sequenceCount; i++) {
+                sequences.add(readSequenceRecord(stream, source));
+            }
+            samFileHeader.setSequenceDictionary(new SAMSequenceDictionary(sequences));
+        }
+
+        return samFileHeader;
+    }
+
+    /**
+     * Reads a single binary sequence record from the file or stream
+     * @param source Note that this is used only for reporting errors.
+     */
+    private static SAMSequenceRecord readSequenceRecord(final BinaryCodec stream, final String source) {
+        final int nameLength = stream.readInt();
+        if (nameLength <= 1) {
+            throw new SAMFormatException("Invalid BAM file header: missing sequence name in file " + source);
+        }
+        final String sequenceName = stream.readString(nameLength - 1);
+        // Skip the null terminator
+        stream.readByte();
+        final int sequenceLength = stream.readInt();
+        return new SAMSequenceRecord(SAMSequenceRecord.truncateSequenceName(sequenceName), sequenceLength);
+    }
+
+    /**
+     * Encapsulates the restriction that only one iterator may be open at a time.
+     */
+    private abstract class AbstractBamIterator implements CloseableIterator<SAMRecord> {
+
+        private boolean isClosed = false;
+
+        public void close() {
+            if (!isClosed) {
+                if (mCurrentIterator != null && this != mCurrentIterator) {
+                    throw new IllegalStateException("Attempt to close non-current iterator");
+                }
+                mCurrentIterator = null;
+                isClosed = true;
+            }
+        }
+
+        protected void assertOpen() {
+            if (isClosed) throw new AssertionError("Iterator has been closed");
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException("Not supported: remove");
+        }
+
+    }
+
+    private class EmptyBamIterator extends AbstractBamIterator {
+        @Override
+        public boolean hasNext() {
+            return false;
+        }
+
+        @Override
+        public SAMRecord next() {
+            throw new NoSuchElementException("next called on empty iterator");
+        }
+    }
+
+    /**
+
+    /**
+     * Iterator for non-indexed sequential iteration through all SAMRecords in file.
+     * Starting point of iteration is wherever current file position is when the iterator is constructed.
+     */
+    private class BAMFileIterator extends AbstractBamIterator {
+        private SAMRecord mNextRecord = null;
+        private final BAMRecordCodec bamRecordCodec;
+        private long samRecordIndex = 0; // Records at what position (counted in records) we are at in the file
+
+        BAMFileIterator() {
+            this(true);
+        }
+
+        /**
+         * @param advance Trick to enable subclass to do more setup before advancing
+         */
+        BAMFileIterator(final boolean advance) {
+            this.bamRecordCodec = new BAMRecordCodec(getFileHeader(), samRecordFactory);
+            this.bamRecordCodec.setInputStream(BAMFileReader.this.mStream.getInputStream(),
+                    BAMFileReader.this.mStream.getInputFileName());
+
+            if (advance) {
+                advance();
+            }
+        }
+
+        public boolean hasNext() {
+            assertOpen();
+            return (mNextRecord != null);
+        }
+
+        public SAMRecord next() {
+            assertOpen();
+            final SAMRecord result = mNextRecord;
+            advance();
+            return result;
+        }
+
+        void advance() {
+            try {
+                mNextRecord = getNextRecord();
+
+                if (mNextRecord != null) {
+                    ++this.samRecordIndex;
+                    // Because some decoding is done lazily, the record needs to remember the validation stringency.
+                    mNextRecord.setValidationStringency(mValidationStringency);
+
+                    if (mValidationStringency != ValidationStringency.SILENT) {
+                        final List<SAMValidationError> validationErrors = mNextRecord.isValid(mValidationStringency == ValidationStringency.STRICT);
+                        SAMUtils.processValidationErrors(validationErrors,
+                                this.samRecordIndex, BAMFileReader.this.getValidationStringency());
+                    }
+                }
+                if (eagerDecode && mNextRecord != null) {
+                    mNextRecord.eagerDecode();
+                }
+            } catch (final IOException exc) {
+                throw new RuntimeIOException(exc.getMessage(), exc);
+            }
+        }
+
+        /**
+         * Read the next record from the input stream.
+         */
+        SAMRecord getNextRecord() throws IOException {
+            final long startCoordinate = mCompressedInputStream.getFilePointer();
+            final SAMRecord next = bamRecordCodec.decode();
+            final long stopCoordinate = mCompressedInputStream.getFilePointer();
+
+            if(mReader != null && next != null)
+                next.setFileSource(new SAMFileSource(mReader,new BAMFileSpan(new Chunk(startCoordinate,stopCoordinate))));
+
+            return next;
+        }
+
+        /**
+         * @return The record that will be return by the next call to next()
+         */
+        protected SAMRecord peek() {
+            return mNextRecord;
+        }
+    }
+
+    /**
+     * Prepare to iterate through SAMRecords in the given reference that start exactly at the given start coordinate.
+     * @param referenceIndex Desired reference sequence.
+     * @param start 1-based alignment start.
+     */
+    private CloseableIterator<SAMRecord> createStartingAtIndexIterator(final int referenceIndex,
+                                                                       final int start) {
+
+        // Hit the index to determine the chunk boundaries for the required data.
+        final BAMIndex fileIndex = getIndex();
+        final BAMFileSpan fileSpan = fileIndex.getSpanOverlapping(referenceIndex, start, 0);
+        final long[] filePointers = fileSpan != null ? fileSpan.toCoordinateArray() : null;
+
+        // Create an iterator over the above chunk boundaries.
+        final BAMFileIndexIterator iterator = new BAMFileIndexIterator(filePointers);
+
+        // Add some preprocessing filters for edge-case reads that don't fit into this
+        // query type.
+        return new BAMQueryFilteringIterator(iterator,new BAMStartingAtIteratorFilter(referenceIndex,start));
+    }
+
+    /**
+     * @throws java.lang.IllegalArgumentException if the intervals are not optimized
+     * @see QueryInterval#optimizeIntervals(QueryInterval[])
+     */
+    private void assertIntervalsOptimized(final QueryInterval[] intervals) {
+        if (intervals.length == 0) return;
+        for (int i = 1; i < intervals.length; ++i) {
+        final QueryInterval prev = intervals[i-1];
+        final QueryInterval thisInterval = intervals[i];
+            if (prev.compareTo(thisInterval) >= 0) {
+                throw new IllegalArgumentException(String.format("List of intervals is not sorted: %s >= %s", prev, thisInterval));
+            }
+            if (prev.overlaps(thisInterval)) {
+                throw new IllegalArgumentException(String.format("List of intervals is not optimized: %s intersects %s", prev, thisInterval));
+            }
+            if (prev.abuts(thisInterval)) {
+                throw new IllegalArgumentException(String.format("List of intervals is not optimized: %s abuts %s", prev, thisInterval));
+            }
+        }
+    }
+
+    private CloseableIterator<SAMRecord> createIndexIterator(final QueryInterval[] intervals,
+                                                             final boolean contained) {
+
+        assertIntervalsOptimized(intervals);
+
+        // Hit the index to determine the chunk boundaries for the required data.
+        final BAMFileSpan[] inputSpans = new BAMFileSpan[intervals.length];
+        final BAMIndex fileIndex = getIndex();
+        for (int i = 0; i < intervals.length; ++i) {
+            final QueryInterval interval = intervals[i];
+            final BAMFileSpan span = fileIndex.getSpanOverlapping(interval.referenceIndex, interval.start, interval.end);
+            inputSpans[i] = span;
+        }
+        final long[] filePointers;
+        if (inputSpans.length > 0) {
+            filePointers = BAMFileSpan.merge(inputSpans).toCoordinateArray();
+        } else {
+            filePointers = null;
+        }
+
+        // Create an iterator over the above chunk boundaries.
+        final BAMFileIndexIterator iterator = new BAMFileIndexIterator(filePointers);
+
+        // Add some preprocessing filters for edge-case reads that don't fit into this
+        // query type.
+        return new BAMQueryFilteringIterator(iterator, new BAMQueryMultipleIntervalsIteratorFilter(intervals, contained));
+    }
+
+    /**
+     * Iterate over the SAMRecords defined by the sections of the file described in the ctor argument.
+     */
+    private class BAMFileIndexIterator extends BAMFileIterator {
+
+        private long[] mFilePointers = null;
+        private int mFilePointerIndex = 0;
+        private long mFilePointerLimit = -1;
+
+        /**
+         * Prepare to iterate through SAMRecords stored in the specified compressed blocks at the given offset.
+         * @param filePointers the block / offset combination, stored in chunk format.
+         */
+        BAMFileIndexIterator(final long[] filePointers) {
+            super(false);  // delay advance() until after construction
+            mFilePointers = filePointers;
+            advance();
+        }
+
+        SAMRecord getNextRecord()
+            throws IOException {
+            // Advance to next file block if necessary
+            while (mCompressedInputStream.getFilePointer() >= mFilePointerLimit) {
+                if (mFilePointers == null ||
+                        mFilePointerIndex >= mFilePointers.length) {
+                    return null;
+                }
+                final long startOffset = mFilePointers[mFilePointerIndex++];
+                final long endOffset = mFilePointers[mFilePointerIndex++];
+                mCompressedInputStream.seek(startOffset);
+                mFilePointerLimit = endOffset;
+            }
+            // Pull next record from stream
+            return super.getNextRecord();
+        }
+    }
+
+    /**
+     * Pull SAMRecords from a coordinate-sorted iterator, and filter out any that do not match the filter.
+     */
+    public class BAMQueryFilteringIterator extends AbstractBamIterator {
+        /**
+         * The wrapped iterator.
+         */
+        protected final CloseableIterator<SAMRecord> wrappedIterator;
+        /**
+         * The next record to be returned.  Will be null if no such record exists.
+         */
+        protected SAMRecord mNextRecord;
+        private final BAMIteratorFilter iteratorFilter;
+
+        public BAMQueryFilteringIterator(final CloseableIterator<SAMRecord> iterator,
+                                         final BAMIteratorFilter iteratorFilter) {
+            this.wrappedIterator = iterator;
+            this.iteratorFilter = iteratorFilter;
+            mNextRecord = advance();
+        }
+
+        /**
+         * Returns true if a next element exists; false otherwise.
+         */
+        public boolean hasNext() {
+            assertOpen();
+            return mNextRecord != null;
+        }
+
+        /**
+         * Gets the next record from the given iterator.
+         * @return The next SAM record in the iterator.
+         */
+        public SAMRecord next() {
+            if(!hasNext())
+                throw new NoSuchElementException("BAMQueryFilteringIterator: no next element available");
+            final SAMRecord currentRead = mNextRecord;
+            mNextRecord = advance();
+            return currentRead;
+        }
+
+        SAMRecord advance() {
+            while (true) {
+                // Pull next record from stream
+                if(!wrappedIterator.hasNext())
+                    return null;
+
+                final SAMRecord record = wrappedIterator.next();
+                switch (iteratorFilter.compareToFilter(record)) {
+                    case MATCHES_FILTER: return record;
+                    case STOP_ITERATION: return null;
+                    case CONTINUE_ITERATION: break; // keep looping
+                    default: throw new SAMException("Unexpected return from compareToFilter");
+                }
+            }
+        }
+    }
+
+    /**
+     * A decorating iterator that filters out records that do not match the given reference and start position.
+     */
+    private class BAMStartingAtIteratorFilter implements BAMIteratorFilter {
+
+        private final int mReferenceIndex;
+        private final int mRegionStart;
+
+        public BAMStartingAtIteratorFilter(final int referenceIndex, final int start) {
+            mReferenceIndex = referenceIndex;
+            mRegionStart = start;
+        }
+
+        /**
+         *
+         * @return MATCHES_FILTER if this record matches the filter;
+         * CONTINUE_ITERATION if does not match filter but iteration should continue;
+         * STOP_ITERATION if does not match filter and iteration should end.
+         */
+        @Override
+        public FilteringIteratorState compareToFilter(final SAMRecord record) {
+            // If beyond the end of this reference sequence, end iteration
+            final int referenceIndex = record.getReferenceIndex();
+            if (referenceIndex < 0 || referenceIndex > mReferenceIndex) {
+                return FilteringIteratorState.STOP_ITERATION;
+            } else if (referenceIndex < mReferenceIndex) {
+                // If before this reference sequence, continue
+                return FilteringIteratorState.CONTINUE_ITERATION;
+            }
+            final int alignmentStart = record.getAlignmentStart();
+            if (alignmentStart > mRegionStart) {
+                // If scanned beyond target region, end iteration
+                return FilteringIteratorState.STOP_ITERATION;
+            } else  if (alignmentStart == mRegionStart) {
+                    return FilteringIteratorState.MATCHES_FILTER;
+            } else {
+                return FilteringIteratorState.CONTINUE_ITERATION;
+            }
+        }
+
+    }
+
+    private class BAMFileIndexUnmappedIterator extends BAMFileIterator  {
+        private BAMFileIndexUnmappedIterator() {
+            while (this.hasNext() && peek().getReferenceIndex() != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
+                advance();
+            }
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/BAMFileSpan.java b/src/main/java/htsjdk/samtools/BAMFileSpan.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMFileSpan.java
rename to src/main/java/htsjdk/samtools/BAMFileSpan.java
diff --git a/src/main/java/htsjdk/samtools/BAMFileWriter.java b/src/main/java/htsjdk/samtools/BAMFileWriter.java
new file mode 100644
index 0000000..f6a474e
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/BAMFileWriter.java
@@ -0,0 +1,200 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.BinaryCodec;
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.samtools.util.zip.DeflaterFactory;
+
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.StringWriter;
+import java.io.Writer;
+
+/**
+ * Concrete implementation of SAMFileWriter for writing gzipped BAM files.
+ */
+class BAMFileWriter extends SAMFileWriterImpl {
+
+    private final BinaryCodec outputBinaryCodec;
+    private BAMRecordCodec bamRecordCodec = null;
+    private final BlockCompressedOutputStream blockCompressedOutputStream;
+    private BAMIndexer bamIndexer = null;
+
+    protected BAMFileWriter(final File path) {
+        blockCompressedOutputStream = new BlockCompressedOutputStream(path);
+        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
+        outputBinaryCodec.setOutputFileName(path.getAbsolutePath());
+    }
+
+    protected BAMFileWriter(final File path, final int compressionLevel) {
+        blockCompressedOutputStream = new BlockCompressedOutputStream(path, compressionLevel);
+        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
+        outputBinaryCodec.setOutputFileName(path.getAbsolutePath());
+    }
+
+    protected BAMFileWriter(final OutputStream os, final File file) {
+        blockCompressedOutputStream = new BlockCompressedOutputStream(os, file);
+        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
+        outputBinaryCodec.setOutputFileName(getPathString(file));
+    }
+
+    protected BAMFileWriter(final OutputStream os, final File file, final int compressionLevel) {
+        blockCompressedOutputStream = new BlockCompressedOutputStream(os, file, compressionLevel);
+        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
+        outputBinaryCodec.setOutputFileName(getPathString(file));
+    }
+
+    protected BAMFileWriter(final OutputStream os, final File file, final int compressionLevel, final DeflaterFactory deflaterFactory) {
+        blockCompressedOutputStream = new BlockCompressedOutputStream(os, file, compressionLevel, deflaterFactory);
+        outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
+        outputBinaryCodec.setOutputFileName(getPathString(file));
+    }
+
+    private void prepareToWriteAlignments() {
+        if (bamRecordCodec == null) {
+            bamRecordCodec = new BAMRecordCodec(getFileHeader());
+            bamRecordCodec.setOutputStream(outputBinaryCodec.getOutputStream(), getFilename());
+        }
+    }
+
+    /** @return absolute path, or null if arg is null.  */
+    private String getPathString(final File path){
+        return (path != null) ? path.getAbsolutePath() : null;
+    }
+
+   // Allow enabling the bam index construction
+   // only enabled by factory method before anything is written
+   void enableBamIndexConstruction () {
+        if (!getSortOrder().equals(SAMFileHeader.SortOrder.coordinate)){
+           throw new SAMException("Not creating BAM index since not sorted by coordinates: " + getSortOrder());
+        }
+        if(getFilename() == null){
+            throw new SAMException("Not creating BAM index since we don't have an output file name");
+        }
+        bamIndexer = createBamIndex(getFilename());
+    }
+
+    private BAMIndexer createBamIndex(final String path) {
+        try {
+            final String indexFileBase = path.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION) ?
+                    path.substring(0, path.lastIndexOf('.')) : path;
+            final File indexFile = new File(indexFileBase + BAMIndex.BAMIndexSuffix);
+            if (indexFile.exists()) {
+                if (!indexFile.canWrite()) {
+                    throw new SAMException("Not creating BAM index since unable to write index file " + indexFile);
+                }
+            }
+            return new BAMIndexer(indexFile, getFileHeader());
+        } catch (Exception e) {
+            throw new SAMException("Not creating BAM index", e);
+        }
+    }
+
+    protected void writeAlignment(final SAMRecord alignment) {
+        prepareToWriteAlignments();
+
+        if (bamIndexer != null) {
+            try {
+                final long startOffset = blockCompressedOutputStream.getFilePointer();
+                bamRecordCodec.encode(alignment);
+                final long stopOffset = blockCompressedOutputStream.getFilePointer();
+                // set the alignment's SourceInfo and then prepare its index information
+                alignment.setFileSource(new SAMFileSource(null, new BAMFileSpan(new Chunk(startOffset, stopOffset))));
+                bamIndexer.processAlignment(alignment);
+            } catch (Exception e) {
+                bamIndexer = null;
+                throw new SAMException("Exception when processing alignment for BAM index " + alignment, e);
+            }
+        } else {
+            bamRecordCodec.encode(alignment);
+        }
+    }
+
+    protected void writeHeader(final String textHeader) {
+        writeHeader(outputBinaryCodec, getFileHeader(), textHeader);
+    }
+
+    protected void finish() {
+        outputBinaryCodec.close();
+            try {
+                if (bamIndexer != null) {
+                    bamIndexer.finish();
+                }
+            } catch (Exception e) {
+                throw new SAMException("Exception writing BAM index file", e);
+            }
+    }
+
+    /** @return absolute path, or null if this writer does not correspond to a file.  */
+    protected String getFilename() {
+        return outputBinaryCodec.getOutputFileName();
+    }
+
+    /**
+     * Writes a header to a BAM file. samFileHeader and headerText are redundant - one can be used to regenerate the other but in
+     * some instances we already have both so this allows us to save some cycles
+     */
+    protected static void writeHeader(final BinaryCodec outputBinaryCodec, final SAMFileHeader samFileHeader, final String headerText) {
+        outputBinaryCodec.writeBytes(BAMFileConstants.BAM_MAGIC);
+
+        // calculate and write the length of the SAM file header text and the header text
+        outputBinaryCodec.writeString(headerText, true, false);
+
+        // write the sequences binarily.  This is redundant with the text header
+        outputBinaryCodec.writeInt(samFileHeader.getSequenceDictionary().size());
+        for (final SAMSequenceRecord sequenceRecord: samFileHeader.getSequenceDictionary().getSequences()) {
+            outputBinaryCodec.writeString(sequenceRecord.getSequenceName(), true, true);
+            outputBinaryCodec.writeInt(sequenceRecord.getSequenceLength());
+        }
+    }
+
+    /**
+     * Writes a header to a BAM file. Might need to regenerate the String version of the header, if one already has both the
+     * samFileHeader and the String, use the version of this method which takes both.
+     */
+    protected static void writeHeader(final BinaryCodec outputBinaryCodec, final SAMFileHeader samFileHeader) {
+        // Do not use SAMFileHeader.getTextHeader() as it is not updated when changes to the underlying object are made
+        final String headerString;
+        final Writer stringWriter = new StringWriter();
+        new SAMTextHeaderCodec().encode(stringWriter, samFileHeader, true);
+        headerString = stringWriter.toString();
+
+        writeHeader(outputBinaryCodec, samFileHeader, headerString);
+    }
+
+    protected static void writeHeader(final OutputStream outputStream, final SAMFileHeader samFileHeader) {
+        final BlockCompressedOutputStream blockCompressedOutputStream = new BlockCompressedOutputStream(outputStream, null);
+        final BinaryCodec outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
+        writeHeader(outputBinaryCodec, samFileHeader);
+        try {
+            blockCompressedOutputStream.flush();
+        } catch (final IOException ioe) {
+            throw new RuntimeIOException(ioe);
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/BAMIndex.java b/src/main/java/htsjdk/samtools/BAMIndex.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMIndex.java
rename to src/main/java/htsjdk/samtools/BAMIndex.java
diff --git a/src/java/htsjdk/samtools/BAMIndexContent.java b/src/main/java/htsjdk/samtools/BAMIndexContent.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMIndexContent.java
rename to src/main/java/htsjdk/samtools/BAMIndexContent.java
diff --git a/src/java/htsjdk/samtools/BAMIndexMetaData.java b/src/main/java/htsjdk/samtools/BAMIndexMetaData.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMIndexMetaData.java
rename to src/main/java/htsjdk/samtools/BAMIndexMetaData.java
diff --git a/src/java/htsjdk/samtools/BAMIndexWriter.java b/src/main/java/htsjdk/samtools/BAMIndexWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMIndexWriter.java
rename to src/main/java/htsjdk/samtools/BAMIndexWriter.java
diff --git a/src/java/htsjdk/samtools/BAMIndexer.java b/src/main/java/htsjdk/samtools/BAMIndexer.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMIndexer.java
rename to src/main/java/htsjdk/samtools/BAMIndexer.java
diff --git a/src/main/java/htsjdk/samtools/BAMIteratorFilter.java b/src/main/java/htsjdk/samtools/BAMIteratorFilter.java
new file mode 100644
index 0000000..bc7d2c0
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/BAMIteratorFilter.java
@@ -0,0 +1,26 @@
+package htsjdk.samtools;
+
+
+/**
+ * Interface implemented by filetering iterators used for BAM/CRAM readers.
+ */
+interface BAMIteratorFilter {
+    public enum IntervalComparison {
+        BEFORE, AFTER, OVERLAPPING, CONTAINED
+    }
+
+    /**
+     * Type returned by BAMIteratorFilter that tell iterators implementing this interface
+     * how to handle each SAMRecord.
+     */
+    public enum FilteringIteratorState {
+        MATCHES_FILTER, STOP_ITERATION, CONTINUE_ITERATION
+    }
+
+    /**
+     * Determine if given record passes the filter, and if it does not, whether iteration
+     * should continue or if this record is beyond the region(s) of interest.
+     */
+    FilteringIteratorState compareToFilter(final SAMRecord record);
+}
+
diff --git a/src/main/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilter.java b/src/main/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilter.java
new file mode 100644
index 0000000..8dadc69
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilter.java
@@ -0,0 +1,59 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CoordMath;
+
+/**
+ * Filters out records that do not match any of the given intervals and query type.
+ */
+public class BAMQueryMultipleIntervalsIteratorFilter implements BAMIteratorFilter {
+    final QueryInterval[] intervals;
+    final boolean contained;
+    int intervalIndex = 0;
+
+
+    public BAMQueryMultipleIntervalsIteratorFilter(final QueryInterval[] intervals,
+                                                   final boolean contained) {
+        this.contained = contained;
+        this.intervals = intervals;
+    }
+
+    @Override
+    public FilteringIteratorState compareToFilter(final SAMRecord record) {
+        while (intervalIndex < intervals.length) {
+            final IntervalComparison comparison = compareIntervalToRecord(intervals[intervalIndex], record);
+            switch (comparison) {
+                // Interval is before SAMRecord.  Try next interval;
+                case BEFORE: ++intervalIndex; break;
+                // Interval is after SAMRecord.  Keep scanning forward in SAMRecords
+                case AFTER: return FilteringIteratorState.CONTINUE_ITERATION;
+                // Found a good record
+                case CONTAINED: return FilteringIteratorState.MATCHES_FILTER;
+                // Either found a good record, or else keep scanning SAMRecords
+                case OVERLAPPING: return
+                        (contained ? FilteringIteratorState.CONTINUE_ITERATION : FilteringIteratorState.MATCHES_FILTER);
+            }
+        }
+        // Went past the last interval
+        return FilteringIteratorState.STOP_ITERATION;
+    }
+
+    public static IntervalComparison compareIntervalToRecord(final QueryInterval interval, final SAMRecord record) {
+        // interval.end <= 0 implies the end of the reference sequence.
+        final int intervalEnd = (interval.end <= 0? Integer.MAX_VALUE: interval.end);
+        final int alignmentEnd;
+        if (record.getReadUnmappedFlag() && record.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START) {
+            // Unmapped read with coordinate of mate.
+            alignmentEnd = record.getAlignmentStart();
+        } else {
+            alignmentEnd = record.getAlignmentEnd();
+        }
+
+        if (interval.referenceIndex < record.getReferenceIndex()) return IntervalComparison.BEFORE;
+        else if (interval.referenceIndex > record.getReferenceIndex()) return IntervalComparison.AFTER;
+        else if (intervalEnd < record.getAlignmentStart()) return IntervalComparison.BEFORE;
+        else if (alignmentEnd < interval.start) return IntervalComparison.AFTER;
+        else if (CoordMath.encloses(interval.start, intervalEnd, record.getAlignmentStart(), alignmentEnd)) {
+            return IntervalComparison.CONTAINED;
+        } else return IntervalComparison.OVERLAPPING;
+    }
+}
diff --git a/src/java/htsjdk/samtools/BAMRecord.java b/src/main/java/htsjdk/samtools/BAMRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMRecord.java
rename to src/main/java/htsjdk/samtools/BAMRecord.java
diff --git a/src/java/htsjdk/samtools/BAMRecordCodec.java b/src/main/java/htsjdk/samtools/BAMRecordCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/BAMRecordCodec.java
rename to src/main/java/htsjdk/samtools/BAMRecordCodec.java
diff --git a/src/java/htsjdk/samtools/BamFileIoUtils.java b/src/main/java/htsjdk/samtools/BamFileIoUtils.java
similarity index 100%
rename from src/java/htsjdk/samtools/BamFileIoUtils.java
rename to src/main/java/htsjdk/samtools/BamFileIoUtils.java
diff --git a/src/java/htsjdk/samtools/BamIndexValidator.java b/src/main/java/htsjdk/samtools/BamIndexValidator.java
similarity index 100%
rename from src/java/htsjdk/samtools/BamIndexValidator.java
rename to src/main/java/htsjdk/samtools/BamIndexValidator.java
diff --git a/src/java/htsjdk/samtools/Bin.java b/src/main/java/htsjdk/samtools/Bin.java
similarity index 100%
rename from src/java/htsjdk/samtools/Bin.java
rename to src/main/java/htsjdk/samtools/Bin.java
diff --git a/src/java/htsjdk/samtools/BinList.java b/src/main/java/htsjdk/samtools/BinList.java
similarity index 100%
rename from src/java/htsjdk/samtools/BinList.java
rename to src/main/java/htsjdk/samtools/BinList.java
diff --git a/src/java/htsjdk/samtools/BinaryBAMIndexWriter.java b/src/main/java/htsjdk/samtools/BinaryBAMIndexWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/BinaryBAMIndexWriter.java
rename to src/main/java/htsjdk/samtools/BinaryBAMIndexWriter.java
diff --git a/src/java/htsjdk/samtools/BinaryCigarCodec.java b/src/main/java/htsjdk/samtools/BinaryCigarCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/BinaryCigarCodec.java
rename to src/main/java/htsjdk/samtools/BinaryCigarCodec.java
diff --git a/src/java/htsjdk/samtools/BinaryTagCodec.java b/src/main/java/htsjdk/samtools/BinaryTagCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/BinaryTagCodec.java
rename to src/main/java/htsjdk/samtools/BinaryTagCodec.java
diff --git a/src/java/htsjdk/samtools/BinningIndexBuilder.java b/src/main/java/htsjdk/samtools/BinningIndexBuilder.java
similarity index 100%
rename from src/java/htsjdk/samtools/BinningIndexBuilder.java
rename to src/main/java/htsjdk/samtools/BinningIndexBuilder.java
diff --git a/src/java/htsjdk/samtools/BinningIndexContent.java b/src/main/java/htsjdk/samtools/BinningIndexContent.java
similarity index 100%
rename from src/java/htsjdk/samtools/BinningIndexContent.java
rename to src/main/java/htsjdk/samtools/BinningIndexContent.java
diff --git a/src/java/htsjdk/samtools/BrowseableBAMIndex.java b/src/main/java/htsjdk/samtools/BrowseableBAMIndex.java
similarity index 100%
rename from src/java/htsjdk/samtools/BrowseableBAMIndex.java
rename to src/main/java/htsjdk/samtools/BrowseableBAMIndex.java
diff --git a/src/java/htsjdk/samtools/CRAMBAIIndexer.java b/src/main/java/htsjdk/samtools/CRAMBAIIndexer.java
similarity index 100%
rename from src/java/htsjdk/samtools/CRAMBAIIndexer.java
rename to src/main/java/htsjdk/samtools/CRAMBAIIndexer.java
diff --git a/src/main/java/htsjdk/samtools/CRAMCRAIIndexer.java b/src/main/java/htsjdk/samtools/CRAMCRAIIndexer.java
new file mode 100644
index 0000000..4599d9b
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/CRAMCRAIIndexer.java
@@ -0,0 +1,135 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.CRAIEntry;
+import htsjdk.samtools.cram.CRAIIndex;
+import htsjdk.samtools.cram.build.CramIO;
+import htsjdk.samtools.cram.common.Version;
+import htsjdk.samtools.cram.structure.*;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.RuntimeIOException;
+
+import java.io.BufferedOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.util.Scanner;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * Indexer for creating/reading/writing a CRAIIndex for a CRAM file/stream. There
+ * are three ways to obtain an index:
+ * </p><ul>
+ * <li>create an index for an entire CRAM stream and write it to an output stream</li>
+ * <li>create an index on-the-fly by processing one container at a time</li>
+ * <li>read an existing index from an input stream</li>
+ * </ul><p>
+ */
+public class CRAMCRAIIndexer {
+
+    final private CRAIIndex craiIndex = new CRAIIndex();
+    final private GZIPOutputStream os;
+
+    /**
+     * Create a CRAMCRAIIndexer that writes to the given output stream.
+     * @param os output stream to which the index will be written
+     * @param samHeader SAMFileHeader - user to verify sort order
+     */
+    public CRAMCRAIIndexer(OutputStream os, SAMFileHeader samHeader) {
+        if (samHeader.getSortOrder() != SAMFileHeader.SortOrder.coordinate) {
+            throw new SAMException("CRAM file be coordinate-sorted for indexing.");
+        }
+        try {
+            this.os = new GZIPOutputStream(new BufferedOutputStream(os));
+        }
+        catch (IOException e) {
+            throw new RuntimeIOException("Error opening CRAI index output stream");
+        }
+    }
+
+    /**
+     * Create index entries for a single container.
+     * @param container the container to index
+     */
+    public void processContainer(final Container container) {
+        craiIndex.processContainer(container);
+    }
+
+    // TODO this is only used by test code
+    public void addEntry(CRAIEntry entry) {
+        craiIndex.addEntry(entry);
+    }
+
+    /**
+     * Finish creating the index by writing the accumulated entries out to the stream.
+     */
+    public void finish() {
+        try {
+            craiIndex.writeIndex(os);
+            os.flush();
+            os.close();
+        }
+        catch (IOException e) {
+            throw new RuntimeIOException("Error writing CRAI index to output stream");
+        }
+    }
+
+    /**
+     * Generate and write a CRAI index to an output stream from a CRAM input stream
+     *
+     * @param cramStream CRAM stream to index; must be coordinate sorted
+     * @param craiStream stream for output index
+     */
+    public static void writeIndex(final SeekableStream cramStream, OutputStream craiStream) {
+        try {
+            final CramHeader cramHeader = CramIO.readCramHeader(cramStream);
+            final CRAMCRAIIndexer indexer = new CRAMCRAIIndexer(craiStream, cramHeader.getSamFileHeader());
+            final Version cramVersion = cramHeader.getVersion();
+
+            // get the first container and it's offset
+            long offset = cramStream.position();
+            Container container = ContainerIO.readContainer(cramVersion, cramStream);
+
+            while (container != null && !container.isEOF()) {
+                container.offset = offset;
+                indexer.processContainer(container);
+                offset = cramStream.position();
+                container = ContainerIO.readContainer(cramVersion, cramStream);
+            }
+
+            indexer.finish();
+        }
+        catch (IOException e) {
+            throw new RuntimeIOException("Error writing CRAI index to output stream");
+        }
+    }
+
+    /**
+     * Read an input stream containing a .crai index and return a CRAIIndex object.
+     * @param is Input stream to read
+     * @return A CRAIIndex object representing the index.
+     */
+    public static CRAIIndex readIndex(final InputStream is) {
+        CRAIIndex craiIndex = new CRAIIndex();
+        Scanner scanner = null;
+
+        try {
+            scanner = new Scanner(new GZIPInputStream(is));
+            while (scanner.hasNextLine()) {
+                final String line = scanner.nextLine();
+                craiIndex.addEntry(new CRAIEntry(line));
+            }
+        }
+        catch (IOException e) {
+            throw new RuntimeIOException("Error reading CRAI index from output stream");
+        }
+        finally {
+            if (null != scanner) {
+                scanner.close();
+            }
+        }
+
+        return craiIndex;
+    }
+
+}
\ No newline at end of file
diff --git a/src/java/htsjdk/samtools/CRAMContainerStreamWriter.java b/src/main/java/htsjdk/samtools/CRAMContainerStreamWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/CRAMContainerStreamWriter.java
rename to src/main/java/htsjdk/samtools/CRAMContainerStreamWriter.java
diff --git a/src/main/java/htsjdk/samtools/CRAMFileReader.java b/src/main/java/htsjdk/samtools/CRAMFileReader.java
new file mode 100644
index 0000000..acdb8ba
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/CRAMFileReader.java
@@ -0,0 +1,533 @@
+/*******************************************************************************
+ * Copyright 2013 EMBL-EBI
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package htsjdk.samtools;
+
+import htsjdk.samtools.SAMFileHeader.SortOrder;
+import htsjdk.samtools.SamReader.Type;
+import htsjdk.samtools.cram.CRAIIndex;
+import htsjdk.samtools.cram.ref.CRAMReferenceSource;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.cram.structure.Container;
+import htsjdk.samtools.cram.structure.ContainerIO;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.CoordMath;
+import htsjdk.samtools.util.RuntimeEOFException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.NoSuchElementException;
+
+/**
+ * {@link htsjdk.samtools.BAMFileReader BAMFileReader} analogue for CRAM files.
+ * Supports random access using BAI index file formats.
+ *
+ * @author vadim
+ */
+ at SuppressWarnings("UnusedDeclaration")
+public class CRAMFileReader extends SamReader.ReaderImplementation implements SamReader.Indexing {
+    private File cramFile;
+    private final CRAMReferenceSource referenceSource;
+    private InputStream inputStream;
+    private CRAMIterator iterator;
+    private BAMIndex mIndex;
+    private File mIndexFile;
+    private boolean mEnableIndexCaching;
+    private boolean mEnableIndexMemoryMapping;
+
+    private ValidationStringency validationStringency;
+
+    /**
+     * Create a CRAMFileReader from either a file or input stream using the reference source returned by
+     * {@link ReferenceSource#getDefaultCRAMReferenceSource() getDefaultCRAMReferenceSource}.
+     *
+     * @param cramFile CRAM file to open
+     * @param inputStream CRAM stream to read
+     *
+     * @throws IllegalArgumentException if the {@code cramFile} and the {@code inputStream} are both null
+     * @throws IllegalStateException if a {@link ReferenceSource#getDefaultCRAMReferenceSource() default}
+     * reference source cannot be acquired
+     */
+    public CRAMFileReader(final File cramFile, final InputStream inputStream) {
+        this(cramFile, inputStream, ReferenceSource.getDefaultCRAMReferenceSource());
+    }
+
+    /**
+     * Create a CRAMFileReader from either a file or input stream using the supplied reference source.
+     *
+     * @param cramFile        CRAM file to read
+     * @param inputStream     CRAM stream to read
+     * @param referenceSource a {@link htsjdk.samtools.cram.ref.ReferenceSource source} of
+     *                        reference sequences. May not be null.
+     *
+     * @throws IllegalArgumentException if the {@code cramFile} and the {@code inputStream} are both null
+     * or if the {@code CRAMReferenceSource} is null
+     */
+    public CRAMFileReader(final File cramFile, final InputStream inputStream,
+                          final CRAMReferenceSource referenceSource) {
+        if (cramFile == null && inputStream == null) {
+            throw new IllegalArgumentException("Either file or input stream is required.");
+        }
+        if (referenceSource == null) {
+            throw new IllegalArgumentException("A reference is required for CRAM readers");
+        }
+
+        this.cramFile = cramFile;
+        this.inputStream = inputStream;
+        this.referenceSource = referenceSource;
+        getIterator();
+    }
+
+    /**
+     * Create a CRAMFileReader from a file and optional index file using the supplied reference source. If index file
+     * is supplied then random access will be available.
+     *
+     * @param cramFile        CRAM file to read. May not be null.
+     * @param indexFile       index file to be used for random access. May be null.
+     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
+     *                        reference sequences. May not be null.
+     * @throws IllegalArgumentException if the {@code cramFile} or the {@code CRAMReferenceSource} is null
+     */
+    public CRAMFileReader(final File cramFile, final File indexFile,
+                          final CRAMReferenceSource referenceSource) {
+        if (cramFile == null)
+            throw new IllegalArgumentException("File is required.");
+        if (referenceSource == null) {
+            throw new IllegalArgumentException("A reference is required for CRAM readers");
+        }
+
+        this.cramFile = cramFile;
+        this.mIndexFile = indexFile;
+        this.referenceSource = referenceSource;
+
+        getIterator();
+    }
+
+    /**
+     * Create a CRAMFileReader from a file using the supplied reference source.
+     *
+     * @param cramFile        CRAM file to read. Can not be null.
+     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
+     *                        reference sequences. May not be null.
+     * @throws IllegalArgumentException if the {@code cramFile} or the {@code CRAMReferenceSource} is null
+     */
+    public CRAMFileReader(final File cramFile, final CRAMReferenceSource referenceSource) {
+        if (cramFile == null)
+            throw new IllegalArgumentException("CRAM file cannot be null.");
+        if (referenceSource == null) {
+            throw new IllegalArgumentException("A reference is required for CRAM readers");
+        }
+
+        this.cramFile = cramFile;
+        this.referenceSource = referenceSource;
+
+        getIterator();
+    }
+
+    /**
+     * Create a CRAMFileReader from an input stream and optional index stream using the supplied reference
+     * source and validation stringency.
+     *
+     * @param inputStream      CRAM stream to read. May not be null.
+     * @param indexInputStream index stream to be used for random access. May be null.
+     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
+     *                        reference sequences. May not be null.
+     * @param validationStringency Validation stringency to be used when reading
+     *
+     * @throws IllegalArgumentException if the {@code inputStream} or the {@code CRAMReferenceSource} is null
+     */
+    public CRAMFileReader(final InputStream inputStream, final SeekableStream indexInputStream,
+                          final CRAMReferenceSource referenceSource, final ValidationStringency validationStringency) throws IOException {
+        if (inputStream == null) {
+            throw new IllegalArgumentException("Input stream can not be null for CRAM reader");
+        }
+        if (referenceSource == null) {
+            throw new IllegalArgumentException("A reference is required for CRAM readers");
+        }
+
+        this.inputStream = inputStream;
+        this.referenceSource = referenceSource;
+        this.validationStringency = validationStringency;
+
+        iterator = new CRAMIterator(inputStream, referenceSource, validationStringency);
+        if (indexInputStream != null) {
+            SeekableStream baiStream = SamIndexes.asBaiSeekableStreamOrNull(indexInputStream, iterator.getSAMFileHeader().getSequenceDictionary());
+            if (null != baiStream)  {
+                mIndex = new CachingBAMFileIndex(baiStream, iterator.getSAMFileHeader().getSequenceDictionary());
+            }
+            else {
+                throw new IllegalArgumentException("CRAM index must be a BAI or CRAI stream");
+            }
+        }
+    }
+
+    /**
+     * Create a CRAMFileReader from an input stream and optional index file using the supplied reference
+     * source and validation stringency.
+     *
+     * @param stream            CRAM stream to read. May not be null.
+     * @param indexFile         index file to be used for random access. May be null.
+     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
+     *                        reference sequences. May not be null.
+     * @param validationStringency Validation stringency to be used when reading
+     *
+     * @throws IllegalArgumentException if the {@code inputStream} or the {@code CRAMReferenceSource} is null
+     */
+    public CRAMFileReader(final InputStream stream,
+                          final File indexFile, final CRAMReferenceSource referenceSource,
+                          final ValidationStringency validationStringency) throws IOException {
+        this(stream, indexFile == null ? null: new SeekableFileStream(indexFile), referenceSource, validationStringency);
+    }
+
+    /**
+     * Create a CRAMFileReader from a CRAM file and optional index file using the supplied reference
+     * source and validation stringency.
+     *
+     * @param cramFile        CRAM stream to read. May not be null.
+     * @param indexFile       index file to be used for random access. May be null.
+     * @param referenceSource a {@link htsjdk.samtools.cram.ref.CRAMReferenceSource source} of
+     *                        reference sequences. May not be null.
+     * @param validationStringency Validation stringency to be used when reading
+     *
+     * @throws IllegalArgumentException if the {@code cramFile} or the {@code CRAMReferenceSource} is null
+     */
+    public CRAMFileReader(final File cramFile,
+                          final File indexFile, final CRAMReferenceSource referenceSource,
+                          final ValidationStringency validationStringency) throws IOException {
+        this(new FileInputStream(cramFile), indexFile, referenceSource, validationStringency);
+        this.cramFile = cramFile;
+    }
+
+    @Override
+    void enableIndexCaching(final boolean enabled) {
+        // relevant to BAI only
+        mEnableIndexCaching = enabled;
+    }
+
+    @Override
+    void enableIndexMemoryMapping(final boolean enabled) {
+        // relevant to BAI only
+        mEnableIndexMemoryMapping = enabled;
+    }
+
+    @Override
+    void enableCrcChecking(final boolean enabled) {
+        // inapplicable to CRAM: do nothing
+    }
+
+    @Override
+    void setSAMRecordFactory(final SAMRecordFactory factory) {
+    }
+
+    @Override
+    public boolean hasIndex() {
+        return mIndex != null || mIndexFile != null;
+    }
+
+    @Override
+    public BAMIndex getIndex() {
+        if (!hasIndex())
+            throw new SAMException("No index is available for this CRAM file.");
+        if (mIndex == null) {
+            final SAMSequenceDictionary dictionary = getFileHeader()
+                    .getSequenceDictionary();
+            if (mIndexFile.getName().endsWith(BAMIndex.BAMIndexSuffix)) {
+                mIndex = mEnableIndexCaching ? new CachingBAMFileIndex(mIndexFile,
+                        dictionary, mEnableIndexMemoryMapping)
+                        : new DiskBasedBAMFileIndex(mIndexFile, dictionary,
+                        mEnableIndexMemoryMapping);
+                return mIndex;
+            }
+
+            if (!mIndexFile.getName().endsWith(CRAIIndex.CRAI_INDEX_SUFFIX)) return null;
+            // convert CRAI into BAI:
+            final SeekableStream baiStream;
+            try {
+                baiStream = SamIndexes.asBaiSeekableStreamOrNull(new SeekableFileStream(mIndexFile), iterator.getSAMFileHeader().getSequenceDictionary());
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+
+            mIndex = mEnableIndexCaching ? new CachingBAMFileIndex(baiStream, getFileHeader().getSequenceDictionary()) :
+                    new DiskBasedBAMFileIndex(baiStream, getFileHeader().getSequenceDictionary());
+        }
+        return mIndex;
+    }
+
+    @Override
+    public boolean hasBrowseableIndex() {
+        return false;
+    }
+
+    @Override
+    public BrowseableBAMIndex getBrowseableIndex() {
+        return null;
+    }
+
+    @Override
+    public SAMRecordIterator iterator(final SAMFileSpan fileSpan) {
+        // get the file coordinates for the span:
+        final long[] coordinateArray = ((BAMFileSpan) fileSpan).toCoordinateArray();
+        if (coordinateArray == null || coordinateArray.length == 0) return emptyIterator;
+        try {
+            // create an input stream that reads the source cram stream only within the coordinate pairs:
+            final SeekableStream seekableStream = getSeekableStreamOrFailWithRTE();
+            return new CRAMIterator(seekableStream, referenceSource, coordinateArray, validationStringency);
+        } catch (final IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Override
+    public SAMFileHeader getFileHeader() {
+        return iterator.getSAMFileHeader();
+    }
+
+    @Override
+    public SAMRecordIterator getIterator() {
+        if (iterator != null && cramFile == null)
+            return iterator;
+        try {
+            final CRAMIterator newIterator;
+            if (cramFile != null) {
+                newIterator = new CRAMIterator(new FileInputStream(cramFile),
+                        referenceSource, validationStringency);
+            } else
+                newIterator = new CRAMIterator(inputStream, referenceSource, validationStringency);
+
+            iterator = newIterator;
+            return iterator;
+        } catch (final Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> getIterator(final SAMFileSpan fileSpan) {
+        return iterator(fileSpan);
+    }
+
+    @Override
+    public SAMFileSpan getFilePointerSpanningReads() {
+        return new BAMFileSpan(new Chunk(iterator.firstContainerOffset << 16, Long.MAX_VALUE));
+    }
+
+    private static final SAMRecordIterator emptyIterator = new SAMRecordIterator() {
+
+        @Override
+        public boolean hasNext() {
+            return false;
+        }
+
+        @Override
+        public SAMRecord next() {
+            throw new RuntimeException("No records.");
+        }
+
+        @Override
+        public void remove() {
+            throw new RuntimeException("Remove not supported.");
+        }
+
+        @Override
+        public void close() {
+        }
+
+        @Override
+        public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
+            return this;
+        }
+    };
+
+    @Override
+    public CloseableIterator<SAMRecord> queryAlignmentStart(final String sequence,
+                                                            final int start) {
+        final SAMFileHeader fileHeader = getFileHeader();
+        final int referenceIndex = fileHeader.getSequenceIndex(sequence);
+        return new CRAMIntervalIterator(new QueryInterval[]{new QueryInterval(referenceIndex, start, -1)}, true);
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> queryUnmapped() {
+        final long startOfLastLinearBin = getIndex().getStartOfLastLinearBin();
+
+        final SeekableStream seekableStream = getSeekableStreamOrFailWithRTE();
+        final CRAMIterator newIterator;
+        try {
+            seekableStream.seek(0);
+            newIterator = new CRAMIterator(seekableStream, referenceSource, validationStringency);
+            seekableStream.seek(startOfLastLinearBin >>> 16);
+            final Container container = ContainerIO.readContainerHeader(newIterator.getCramHeader().getVersion().major, seekableStream);
+            seekableStream.seek(seekableStream.position() + container.containerByteSize);
+            iterator = newIterator;
+            boolean atAlignments;
+            do {
+                atAlignments = iterator.advanceToAlignmentInContainer(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX, SAMRecord.NO_ALIGNMENT_START);
+            } while (!atAlignments && iterator.hasNext());
+        } catch (final IOException e) {
+            throw new RuntimeEOFException(e);
+        }
+
+        return iterator;
+    }
+
+    private SeekableStream getSeekableStreamOrFailWithRTE() {
+        SeekableStream seekableStream = null;
+        if (cramFile != null) {
+            try {
+                seekableStream = new SeekableFileStream(cramFile);
+            } catch (final FileNotFoundException e) {
+                throw new RuntimeException(e);
+            }
+        } else if (inputStream instanceof SeekableStream) {
+            seekableStream = (SeekableStream) inputStream;
+        }
+        return seekableStream;
+    }
+
+    @Override
+    public void close() {
+        CloserUtil.close(iterator);
+        CloserUtil.close(inputStream);
+        CloserUtil.close(mIndex);
+    }
+
+    @Override
+    void setValidationStringency(final ValidationStringency validationStringency) {
+        this.validationStringency = validationStringency;
+        if (iterator != null) iterator.setValidationStringency(validationStringency);
+    }
+
+    @Override
+    public ValidationStringency getValidationStringency() {
+        return validationStringency;
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> query(final QueryInterval[] intervals,
+                                              final boolean contained) {
+        return new CRAMIntervalIterator(intervals, contained);
+    }
+
+    @Override
+    public Type type() {
+        return Type.CRAM_TYPE;
+    }
+
+    @Override
+    void enableFileSource(final SamReader reader, final boolean enabled) {
+        if (iterator != null)
+            iterator.setFileSource(enabled ? reader : null);
+    }
+
+    private class CRAMIntervalIterator
+            extends BAMQueryMultipleIntervalsIteratorFilter
+            implements SAMRecordIterator {
+
+        // the granularity of this iterator is the container, so the records returned
+        // by it must still be filtered to find those matching the filter criteria
+        private CRAMIterator unfilteredIterator;
+        SAMRecord nextRec = null;
+
+        public CRAMIntervalIterator(final QueryInterval[] queries, final boolean contained) {
+            super(queries, contained);
+
+            long[] coordinates = coordinatesFromQueryIntervals(getIndex(), queries);
+            if (coordinates != null && coordinates.length != 0) {
+                try {
+                    unfilteredIterator = new CRAMIterator(
+                            getSeekableStreamOrFailWithRTE(),
+                            referenceSource,
+                            coordinates,
+                            validationStringency
+                    );
+                } catch (final IOException e) {
+                    throw new RuntimeEOFException(e);
+                }
+                getNextRecord(); // advance to the first record that matches the filter criteria
+            }
+        }
+
+        // convert queries -> merged BAMFileSpan -> coordinate array
+        private long[] coordinatesFromQueryIntervals(BAMIndex index, QueryInterval[] queries) {
+            ArrayList<BAMFileSpan> spanList = new ArrayList<>(1);
+            Arrays.asList(queries).forEach(qi -> spanList.add(mIndex.getSpanOverlapping(qi.referenceIndex, qi.start, qi.end)));
+            BAMFileSpan spanArray[] = new BAMFileSpan[spanList.size()];
+            for (int i = 0; i < spanList.size(); i++) {
+                spanArray[i] = spanList.get(i);
+            }
+
+            return BAMFileSpan.merge(spanArray).toCoordinateArray();
+        }
+
+        @Override
+        public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
+            return null;
+        }
+
+        @Override
+        public void close() {
+            if (unfilteredIterator != null) {
+                unfilteredIterator.close();
+            }
+        }
+
+        @Override
+        public boolean hasNext() {
+            return nextRec != null;
+        }
+
+        @Override
+        public SAMRecord next() {
+            if (!hasNext()) {
+                throw new NoSuchElementException("Next called on empty CRAMIntervalIterator");
+            }
+            return getNextRecord();
+        }
+
+        private SAMRecord getNextRecord() {
+            final SAMRecord result = nextRec;
+            nextRec = null;
+            while(nextRec == null && unfilteredIterator.hasNext()) {
+                SAMRecord nextRecord = unfilteredIterator.next();
+                switch(compareToFilter(nextRecord)) {
+                    case MATCHES_FILTER:
+                        nextRec = nextRecord;
+                        break;
+                    case CONTINUE_ITERATION:
+                        continue;
+                    case STOP_ITERATION:
+                        break;
+                    default:
+                        throw new SAMException("Unexpected return from compareToFilter");
+                }
+            }
+            return result;
+        }
+
+        @Override
+        public void remove() {
+            throw new RuntimeException("Method \"remove\" not implemented for CRAMIntervalIterator.");
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/CRAMFileWriter.java b/src/main/java/htsjdk/samtools/CRAMFileWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/CRAMFileWriter.java
rename to src/main/java/htsjdk/samtools/CRAMFileWriter.java
diff --git a/src/main/java/htsjdk/samtools/CRAMIterator.java b/src/main/java/htsjdk/samtools/CRAMIterator.java
new file mode 100644
index 0000000..4238677
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/CRAMIterator.java
@@ -0,0 +1,306 @@
+/*******************************************************************************
+ * Copyright 2013 EMBL-EBI
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License countingInputStream distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package htsjdk.samtools;
+
+import htsjdk.samtools.SAMFileHeader.SortOrder;
+import htsjdk.samtools.cram.build.ContainerParser;
+import htsjdk.samtools.cram.build.Cram2SamRecordFactory;
+import htsjdk.samtools.cram.build.CramContainerIterator;
+import htsjdk.samtools.cram.build.CramNormalizer;
+import htsjdk.samtools.cram.build.CramSpanContainerIterator;
+import htsjdk.samtools.cram.io.CountingInputStream;
+import htsjdk.samtools.cram.ref.CRAMReferenceSource;
+import htsjdk.samtools.cram.structure.Container;
+import htsjdk.samtools.cram.structure.ContainerIO;
+import htsjdk.samtools.cram.structure.CramCompressionRecord;
+import htsjdk.samtools.cram.structure.CramHeader;
+import htsjdk.samtools.cram.structure.Slice;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.Log;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import htsjdk.samtools.cram.CRAMException;
+
+public class CRAMIterator implements SAMRecordIterator {
+    private static final Log log = Log.getInstance(CRAMIterator.class);
+    private final CountingInputStream countingInputStream;
+    private CramHeader cramHeader;
+    private ArrayList<SAMRecord> records;
+    private SAMRecord nextRecord = null;
+    private CramNormalizer normalizer;
+    private byte[] refs;
+    private int prevSeqId = SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX;
+    public Container container;
+    private SamReader mReader;
+    long firstContainerOffset = 0;
+    private Iterator<Container> containerIterator;
+
+    private ContainerParser parser;
+    private final CRAMReferenceSource referenceSource;
+
+    private Iterator<SAMRecord> iterator = Collections.<SAMRecord>emptyList().iterator();
+
+    private ValidationStringency validationStringency = ValidationStringency.DEFAULT_STRINGENCY;
+
+    public ValidationStringency getValidationStringency() {
+        return validationStringency;
+    }
+
+    public void setValidationStringency(
+            final ValidationStringency validationStringency) {
+        this.validationStringency = validationStringency;
+    }
+
+    private long samRecordIndex;
+    private ArrayList<CramCompressionRecord> cramRecords;
+
+    public CRAMIterator(final InputStream inputStream, final CRAMReferenceSource referenceSource, final ValidationStringency validationStringency)
+            throws IOException {
+        if (null == referenceSource) {
+            throw new CRAMException("A reference source is required for CRAM files");
+        }
+        this.countingInputStream = new CountingInputStream(inputStream);
+        this.referenceSource = referenceSource;
+        this.validationStringency = validationStringency;
+        final CramContainerIterator containerIterator = new CramContainerIterator(this.countingInputStream);
+        cramHeader = containerIterator.getCramHeader();
+        this.containerIterator = containerIterator;
+
+        firstContainerOffset = this.countingInputStream.getCount();
+        records = new ArrayList<SAMRecord>(10000);
+        normalizer = new CramNormalizer(cramHeader.getSamFileHeader(),
+                referenceSource);
+        parser = new ContainerParser(cramHeader.getSamFileHeader());
+    }
+
+    public CRAMIterator(final SeekableStream seekableStream, final CRAMReferenceSource referenceSource, final long[] coordinates, final ValidationStringency validationStringency)
+            throws IOException {
+        if (null == referenceSource) {
+            throw new CRAMException("A reference source is required for CRAM files");
+        }
+        this.countingInputStream = new CountingInputStream(seekableStream);
+        this.referenceSource = referenceSource;
+        this.validationStringency = validationStringency;
+        final CramSpanContainerIterator containerIterator = CramSpanContainerIterator.fromFileSpan(seekableStream, coordinates);
+        cramHeader = containerIterator.getCramHeader();
+        this.containerIterator = containerIterator;
+
+        firstContainerOffset = containerIterator.getFirstContainerOffset();
+        records = new ArrayList<SAMRecord>(10000);
+        normalizer = new CramNormalizer(cramHeader.getSamFileHeader(),
+                referenceSource);
+        parser = new ContainerParser(cramHeader.getSamFileHeader());
+    }
+
+    @Deprecated
+    public CRAMIterator(final SeekableStream seekableStream, final CRAMReferenceSource referenceSource, final long[] coordinates)
+            throws IOException {
+        this(seekableStream, referenceSource, coordinates, ValidationStringency.DEFAULT_STRINGENCY);
+    }
+
+    public CramHeader getCramHeader() {
+        return cramHeader;
+    }
+
+    void nextContainer() throws IOException, IllegalArgumentException,
+            IllegalAccessException, CRAMException {
+
+        if (containerIterator != null) {
+            if (!containerIterator.hasNext()) {
+                records.clear();
+                nextRecord = null;
+                return;
+            }
+            container = containerIterator.next();
+            if (container.isEOF()) {
+                records.clear();
+                nextRecord = null;
+                return;
+            }
+        } else {
+            container = ContainerIO.readContainer(cramHeader.getVersion(), countingInputStream);
+            if (container.isEOF()) {
+                records.clear();
+                nextRecord = null;
+                return;
+            }
+        }
+
+        if (records == null)
+            records = new ArrayList<SAMRecord>(container.nofRecords);
+        else
+            records.clear();
+        if (cramRecords == null)
+            cramRecords = new ArrayList<CramCompressionRecord>(container.nofRecords);
+        else
+            cramRecords.clear();
+
+        parser.getRecords(container, cramRecords, validationStringency);
+
+        if (container.sequenceId == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
+            refs = new byte[]{};
+            prevSeqId = SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX;
+        } else if (container.sequenceId == Slice.MULTI_REFERENCE) {
+            refs = null;
+            prevSeqId = Slice.MULTI_REFERENCE;
+        } else if (prevSeqId < 0 || prevSeqId != container.sequenceId) {
+            final SAMSequenceRecord sequence = cramHeader.getSamFileHeader()
+                    .getSequence(container.sequenceId);
+            refs = referenceSource.getReferenceBases(sequence, true);
+            if (refs == null) {
+                throw new CRAMException(String.format("Contig %s not found in the reference file.", sequence.getSequenceName()));
+            }
+            prevSeqId = container.sequenceId;
+        }
+
+        for (int i = 0; i < container.slices.length; i++) {
+            final Slice slice = container.slices[i];
+            if (slice.sequenceId < 0)
+                continue;
+            if (validationStringency != ValidationStringency.SILENT && !slice.validateRefMD5(refs)) {
+                log.error(String
+                        .format("Reference sequence MD5 mismatch for slice: seq id %d, start %d, span %d, expected MD5 %s", slice.sequenceId,
+                                slice.alignmentStart, slice.alignmentSpan, String.format("%032x", new BigInteger(1, slice.refMD5))));
+            }
+        }
+
+        normalizer.normalize(cramRecords, refs, 0,
+                container.header.substitutionMatrix);
+
+        final Cram2SamRecordFactory cramToSamRecordFactory = new Cram2SamRecordFactory(
+                cramHeader.getSamFileHeader());
+
+        for (final CramCompressionRecord cramRecord : cramRecords) {
+            final SAMRecord samRecord = cramToSamRecordFactory.create(cramRecord);
+            if (!cramRecord.isSegmentUnmapped()) {
+                final SAMSequenceRecord sequence = cramHeader.getSamFileHeader()
+                        .getSequence(cramRecord.sequenceId);
+                refs = referenceSource.getReferenceBases(sequence, true);
+            }
+
+            samRecord.setValidationStringency(validationStringency);
+
+            if (validationStringency != ValidationStringency.SILENT) {
+                final List<SAMValidationError> validationErrors = samRecord.isValid();
+                SAMUtils.processValidationErrors(validationErrors,
+                        samRecordIndex, validationStringency);
+            }
+
+            if (mReader != null) {
+                final long chunkStart = (container.offset << 16) | cramRecord.sliceIndex;
+                final long chunkEnd = ((container.offset << 16) | cramRecord.sliceIndex) + 1;
+                nextRecord.setFileSource(new SAMFileSource(mReader,
+                        new BAMFileSpan(new Chunk(chunkStart, chunkEnd))));
+            }
+
+            records.add(samRecord);
+            samRecordIndex++;
+        }
+        cramRecords.clear();
+        iterator = records.iterator();
+    }
+
+    /**
+     * Skip cached records until given alignment start position.
+     *
+     * @param refIndex reference sequence index
+     * @param pos      alignment start to skip to
+     */
+    public boolean advanceToAlignmentInContainer(final int refIndex, final int pos) {
+        if (!hasNext()) return false;
+        int i = 0;
+        for (final SAMRecord record : records) {
+            if (refIndex != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && record.getReferenceIndex() != refIndex) continue;
+
+            if (pos <= 0) {
+                if (record.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START) {
+                    iterator = records.listIterator(i);
+                    return true;
+                }
+            } else {
+                if (record.getAlignmentStart() >= pos) {
+                    iterator = records.listIterator(i);
+                    return true;
+                }
+            }
+            i++;
+        }
+        iterator = Collections.<SAMRecord>emptyList().iterator();
+        return false;
+    }
+
+    @Override
+    public boolean hasNext() {
+        if (container != null && container.isEOF()) return false;
+        if (!iterator.hasNext()) {
+            try {
+                nextContainer();
+            } catch (IOException e) {
+                throw new SAMException(e);
+            } catch (IllegalAccessException e) {
+                throw new SAMException(e);
+            }
+        }
+
+        return !records.isEmpty();
+    }
+
+    @Override
+    public SAMRecord next() {
+        return iterator.next();
+    }
+
+    @Override
+    public void remove() {
+        throw new RuntimeException("Removal of records not implemented.");
+    }
+
+    @Override
+    public void close() {
+        records.clear();
+        //noinspection EmptyCatchBlock
+        try {
+            if (countingInputStream != null)
+                countingInputStream.close();
+        } catch (final IOException e) {
+        }
+    }
+
+    @Override
+    public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
+        throw new RuntimeException("Not implemented.");
+    }
+
+    public SamReader getFileSource() {
+        return mReader;
+    }
+
+    public void setFileSource(final SamReader mReader) {
+        this.mReader = mReader;
+    }
+
+    public SAMFileHeader getSAMFileHeader() {
+        return cramHeader.getSamFileHeader();
+    }
+
+}
diff --git a/src/java/htsjdk/samtools/CachingBAMFileIndex.java b/src/main/java/htsjdk/samtools/CachingBAMFileIndex.java
similarity index 100%
rename from src/java/htsjdk/samtools/CachingBAMFileIndex.java
rename to src/main/java/htsjdk/samtools/CachingBAMFileIndex.java
diff --git a/src/java/htsjdk/samtools/ChainedDownsamplingIterator.java b/src/main/java/htsjdk/samtools/ChainedDownsamplingIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/ChainedDownsamplingIterator.java
rename to src/main/java/htsjdk/samtools/ChainedDownsamplingIterator.java
diff --git a/src/java/htsjdk/samtools/Chunk.java b/src/main/java/htsjdk/samtools/Chunk.java
similarity index 100%
rename from src/java/htsjdk/samtools/Chunk.java
rename to src/main/java/htsjdk/samtools/Chunk.java
diff --git a/src/java/htsjdk/samtools/Cigar.java b/src/main/java/htsjdk/samtools/Cigar.java
similarity index 100%
rename from src/java/htsjdk/samtools/Cigar.java
rename to src/main/java/htsjdk/samtools/Cigar.java
diff --git a/src/java/htsjdk/samtools/CigarElement.java b/src/main/java/htsjdk/samtools/CigarElement.java
similarity index 100%
rename from src/java/htsjdk/samtools/CigarElement.java
rename to src/main/java/htsjdk/samtools/CigarElement.java
diff --git a/src/java/htsjdk/samtools/CigarOperator.java b/src/main/java/htsjdk/samtools/CigarOperator.java
similarity index 100%
rename from src/java/htsjdk/samtools/CigarOperator.java
rename to src/main/java/htsjdk/samtools/CigarOperator.java
diff --git a/src/java/htsjdk/samtools/ComparableSamRecordIterator.java b/src/main/java/htsjdk/samtools/ComparableSamRecordIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/ComparableSamRecordIterator.java
rename to src/main/java/htsjdk/samtools/ComparableSamRecordIterator.java
diff --git a/src/java/htsjdk/samtools/ConstantMemoryDownsamplingIterator.java b/src/main/java/htsjdk/samtools/ConstantMemoryDownsamplingIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/ConstantMemoryDownsamplingIterator.java
rename to src/main/java/htsjdk/samtools/ConstantMemoryDownsamplingIterator.java
diff --git a/src/java/htsjdk/samtools/CoordinateSortedPairInfoMap.java b/src/main/java/htsjdk/samtools/CoordinateSortedPairInfoMap.java
similarity index 100%
rename from src/java/htsjdk/samtools/CoordinateSortedPairInfoMap.java
rename to src/main/java/htsjdk/samtools/CoordinateSortedPairInfoMap.java
diff --git a/src/java/htsjdk/samtools/CustomReaderFactory.java b/src/main/java/htsjdk/samtools/CustomReaderFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/CustomReaderFactory.java
rename to src/main/java/htsjdk/samtools/CustomReaderFactory.java
diff --git a/src/java/htsjdk/samtools/DefaultSAMRecordFactory.java b/src/main/java/htsjdk/samtools/DefaultSAMRecordFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/DefaultSAMRecordFactory.java
rename to src/main/java/htsjdk/samtools/DefaultSAMRecordFactory.java
diff --git a/src/main/java/htsjdk/samtools/Defaults.java b/src/main/java/htsjdk/samtools/Defaults.java
new file mode 100644
index 0000000..5e3f6da
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/Defaults.java
@@ -0,0 +1,174 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.Log;
+
+import java.io.File;
+import java.util.Collections;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+/**
+ * Embodies defaults for global values that affect how the SAM JDK operates. Defaults are encoded in the class
+ * and are also overridable using system properties.
+ *
+ * @author Tim Fennell
+ */
+public class Defaults {
+    private static Log log = Log.getInstance(Defaults.class);
+    
+    /** Should BAM index files be created when writing out coordinate sorted BAM files?  Default = false. */
+    public static final boolean CREATE_INDEX;
+
+    /** Should MD5 files be created when writing out SAM and BAM files?  Default = false. */
+    public static final boolean CREATE_MD5;
+
+    /** Should asynchronous read I/O be used where supported by the samtools package (one thread per file).
+     *  Default = false.
+     */
+    public static final boolean USE_ASYNC_IO_READ_FOR_SAMTOOLS;
+
+    /** Should asynchronous write I/O be used where supported by the samtools package (one thread per file).
+     *  Default = false.
+     */
+    public static final boolean USE_ASYNC_IO_WRITE_FOR_SAMTOOLS;
+
+    /** Should asynchronous write I/O be used where supported by the tribble package (one thread per file).
+     *  Default = false.
+     */
+    public static final boolean USE_ASYNC_IO_WRITE_FOR_TRIBBLE;
+
+    /** Compresion level to be used for writing BAM and other block-compressed outputs.  Default = 5. */
+    public static final int COMPRESSION_LEVEL;
+
+    /** Buffer size, in bytes, used whenever reading/writing files or streams.  Default = 128k. */
+    public static final int BUFFER_SIZE;
+
+    /** The output format of the flag field when writing SAM text.  Ignored for reading SAM text. */
+    public static final SamFlagField SAM_FLAG_FIELD_FORMAT;
+
+    /**
+     * Even if BUFFER_SIZE is 0, this is guaranteed to be non-zero.  If BUFFER_SIZE is non-zero,
+     * this == BUFFER_SIZE
+     */
+    public static final int NON_ZERO_BUFFER_SIZE;
+
+    /**
+     * The reference FASTA file.  If this is not set, the file is null.  This file may be required for reading
+     * writing SAM files (ex. CRAM).
+     */
+    public static final File REFERENCE_FASTA;
+
+    /** Custom reader factory able to handle URL based resources like ga4gh.
+     *  Expected format: <url prefix>,<fully qualified factory class name>[,<jar file name>]
+     *  E.g. https://www.googleapis.com/genomics/v1beta/reads/,com.google.genomics.ReaderFactory
+     *  OR https://www.googleapis.com/genomics/v1beta/reads/,com.google.genomics.ReaderFactory,/tmp/genomics.jar
+     */
+    public static final String CUSTOM_READER_FACTORY;
+
+    /**
+     * Boolean describing whether downloading a reference file is allowed (for CRAM files),
+     * in case the reference file is not specified by the user
+     * Enabling this is not necessarily a good idea, since this process often fails
+     */
+    public static final boolean USE_CRAM_REF_DOWNLOAD;
+
+    /**
+     * A mask (pattern) to use when building EBI reference service URL for a
+     * given MD5 checksum. Must contain one and only one string placeholder.
+     */
+    public static final String EBI_REFERENCE_SERVICE_URL_MASK;
+
+    /**
+     * Boolean describing whether downloading of SRA native libraries is allowed,
+     * in case such native libraries are not found locally
+     */
+    public static final boolean SRA_LIBRARIES_DOWNLOAD;
+
+
+    static {
+        CREATE_INDEX = getBooleanProperty("create_index", false);
+        CREATE_MD5 = getBooleanProperty("create_md5", false);
+        USE_ASYNC_IO_READ_FOR_SAMTOOLS = getBooleanProperty("use_async_io_read_samtools", false);
+        USE_ASYNC_IO_WRITE_FOR_SAMTOOLS = getBooleanProperty("use_async_io_write_samtools", false);
+        USE_ASYNC_IO_WRITE_FOR_TRIBBLE = getBooleanProperty("use_async_io_write_tribble", false);
+        COMPRESSION_LEVEL = getIntProperty("compression_level", 5);
+        BUFFER_SIZE = getIntProperty("buffer_size", 1024 * 128);
+        if (BUFFER_SIZE == 0) {
+            NON_ZERO_BUFFER_SIZE = 1024 * 128;
+        } else {
+            NON_ZERO_BUFFER_SIZE = BUFFER_SIZE;
+        }
+        REFERENCE_FASTA = getFileProperty("reference_fasta", null);
+        USE_CRAM_REF_DOWNLOAD = getBooleanProperty("use_cram_ref_download", false);
+        EBI_REFERENCE_SERVICE_URL_MASK = "http://www.ebi.ac.uk/ena/cram/md5/%s";
+        CUSTOM_READER_FACTORY = getStringProperty("custom_reader", "");
+        SAM_FLAG_FIELD_FORMAT = SamFlagField.valueOf(getStringProperty("sam_flag_field_format", SamFlagField.DECIMAL.name()));
+        SRA_LIBRARIES_DOWNLOAD = getBooleanProperty("sra_libraries_download", false);
+    }
+
+    /**
+     * Returns a map of all default values (keys are names), lexicographically sorted by keys.
+     * The returned map is unmodifiable.
+     * This function is useful for example when logging all defaults.
+     */
+    public static SortedMap<String, Object> allDefaults(){
+        final SortedMap<String, Object> result = new TreeMap<>();
+        result.put("CREATE_INDEX", CREATE_INDEX);
+        result.put("CREATE_MD5", CREATE_MD5);
+        result.put("USE_ASYNC_IO_READ_FOR_SAMTOOLS", USE_ASYNC_IO_READ_FOR_SAMTOOLS);
+        result.put("USE_ASYNC_IO_WRITE_FOR_SAMTOOLS", USE_ASYNC_IO_WRITE_FOR_SAMTOOLS);
+        result.put("USE_ASYNC_IO_WRITE_FOR_TRIBBLE", USE_ASYNC_IO_WRITE_FOR_TRIBBLE);
+        result.put("COMPRESSION_LEVEL", COMPRESSION_LEVEL);
+        result.put("BUFFER_SIZE", BUFFER_SIZE);
+        result.put("NON_ZERO_BUFFER_SIZE", NON_ZERO_BUFFER_SIZE);
+        result.put("REFERENCE_FASTA", REFERENCE_FASTA);
+        result.put("USE_CRAM_REF_DOWNLOAD", USE_CRAM_REF_DOWNLOAD);
+        result.put("EBI_REFERENCE_SERVICE_URL_MASK", EBI_REFERENCE_SERVICE_URL_MASK);
+        result.put("CUSTOM_READER_FACTORY", CUSTOM_READER_FACTORY);
+        result.put("SAM_FLAG_FIELD_FORMAT", SAM_FLAG_FIELD_FORMAT);
+        return Collections.unmodifiableSortedMap(result);
+    }
+
+    /** Gets a string system property, prefixed with "samjdk." using the default 
+     * if the property does not exist or if the java.security manager raises an exception for
+     * applications started with  -Djava.security.manager  . */
+    private static String getStringProperty(final String name, final String def) {
+        try {
+            return System.getProperty("samjdk." + name, def);
+        } catch (final java.security.AccessControlException error) {
+            log.warn(error,"java Security Manager forbids 'System.getProperty(\"" + name + "\")' , returning default value: " + def );
+            return def;
+        }
+    }
+
+    /** Checks whether a string system property, prefixed with "samjdk.", exists.
+     * If the property does not exist or if the java.security manager raises an exception for
+     * applications started with  -Djava.security.manager  this method returns false. */
+    private static boolean hasProperty(final String name){
+        try {
+            return null != System.getProperty("samjdk." + name);
+        } catch (final java.security.AccessControlException error) {
+            log.warn(error,"java Security Manager forbids 'System.getProperty(\"" + name + "\")' , returning false");
+            return false;
+        }
+    }
+
+    /** Gets a boolean system property, prefixed with "samjdk." using the default if the property does not exist. */
+    private static boolean getBooleanProperty(final String name, final boolean def) {
+        final String value = getStringProperty(name, Boolean.toString(def));
+        return Boolean.parseBoolean(value);
+    }
+
+    /** Gets an int system property, prefixed with "samjdk." using the default if the property does not exist. */
+    private static int getIntProperty(final String name, final int def) {
+        final String value = getStringProperty(name, Integer.toString(def));
+        return Integer.parseInt(value);
+    }
+
+    /** Gets a File system property, prefixed with "samjdk." using the default if the property does not exist. */
+    private static File getFileProperty(final String name, final String def) {
+        final String value = getStringProperty(name, def);
+        // TODO: assert that it is readable
+        return (null == value) ? null : new File(value);
+    }
+}
diff --git a/src/java/htsjdk/samtools/DiskBasedBAMFileIndex.java b/src/main/java/htsjdk/samtools/DiskBasedBAMFileIndex.java
similarity index 100%
rename from src/java/htsjdk/samtools/DiskBasedBAMFileIndex.java
rename to src/main/java/htsjdk/samtools/DiskBasedBAMFileIndex.java
diff --git a/src/java/htsjdk/samtools/DownsamplingIterator.java b/src/main/java/htsjdk/samtools/DownsamplingIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/DownsamplingIterator.java
rename to src/main/java/htsjdk/samtools/DownsamplingIterator.java
diff --git a/src/java/htsjdk/samtools/DownsamplingIteratorFactory.java b/src/main/java/htsjdk/samtools/DownsamplingIteratorFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/DownsamplingIteratorFactory.java
rename to src/main/java/htsjdk/samtools/DownsamplingIteratorFactory.java
diff --git a/src/java/htsjdk/samtools/DuplicateScoringStrategy.java b/src/main/java/htsjdk/samtools/DuplicateScoringStrategy.java
similarity index 100%
rename from src/java/htsjdk/samtools/DuplicateScoringStrategy.java
rename to src/main/java/htsjdk/samtools/DuplicateScoringStrategy.java
diff --git a/src/java/htsjdk/samtools/DuplicateSet.java b/src/main/java/htsjdk/samtools/DuplicateSet.java
similarity index 100%
rename from src/java/htsjdk/samtools/DuplicateSet.java
rename to src/main/java/htsjdk/samtools/DuplicateSet.java
diff --git a/src/java/htsjdk/samtools/DuplicateSetIterator.java b/src/main/java/htsjdk/samtools/DuplicateSetIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/DuplicateSetIterator.java
rename to src/main/java/htsjdk/samtools/DuplicateSetIterator.java
diff --git a/src/java/htsjdk/samtools/FileTruncatedException.java b/src/main/java/htsjdk/samtools/FileTruncatedException.java
similarity index 100%
rename from src/java/htsjdk/samtools/FileTruncatedException.java
rename to src/main/java/htsjdk/samtools/FileTruncatedException.java
diff --git a/src/java/htsjdk/samtools/FixBAMFile.java b/src/main/java/htsjdk/samtools/FixBAMFile.java
similarity index 100%
rename from src/java/htsjdk/samtools/FixBAMFile.java
rename to src/main/java/htsjdk/samtools/FixBAMFile.java
diff --git a/src/java/htsjdk/samtools/GenomicIndexUtil.java b/src/main/java/htsjdk/samtools/GenomicIndexUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/GenomicIndexUtil.java
rename to src/main/java/htsjdk/samtools/GenomicIndexUtil.java
diff --git a/src/java/htsjdk/samtools/HighAccuracyDownsamplingIterator.java b/src/main/java/htsjdk/samtools/HighAccuracyDownsamplingIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/HighAccuracyDownsamplingIterator.java
rename to src/main/java/htsjdk/samtools/HighAccuracyDownsamplingIterator.java
diff --git a/src/java/htsjdk/samtools/LinearIndex.java b/src/main/java/htsjdk/samtools/LinearIndex.java
similarity index 100%
rename from src/java/htsjdk/samtools/LinearIndex.java
rename to src/main/java/htsjdk/samtools/LinearIndex.java
diff --git a/src/java/htsjdk/samtools/MergingSamRecordIterator.java b/src/main/java/htsjdk/samtools/MergingSamRecordIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/MergingSamRecordIterator.java
rename to src/main/java/htsjdk/samtools/MergingSamRecordIterator.java
diff --git a/src/java/htsjdk/samtools/NotPrimarySkippingIterator.java b/src/main/java/htsjdk/samtools/NotPrimarySkippingIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/NotPrimarySkippingIterator.java
rename to src/main/java/htsjdk/samtools/NotPrimarySkippingIterator.java
diff --git a/src/java/htsjdk/samtools/QueryInterval.java b/src/main/java/htsjdk/samtools/QueryInterval.java
similarity index 100%
rename from src/java/htsjdk/samtools/QueryInterval.java
rename to src/main/java/htsjdk/samtools/QueryInterval.java
diff --git a/src/java/htsjdk/samtools/ReservedTagConstants.java b/src/main/java/htsjdk/samtools/ReservedTagConstants.java
similarity index 100%
rename from src/java/htsjdk/samtools/ReservedTagConstants.java
rename to src/main/java/htsjdk/samtools/ReservedTagConstants.java
diff --git a/src/java/htsjdk/samtools/SAMBinaryTagAndUnsignedArrayValue.java b/src/main/java/htsjdk/samtools/SAMBinaryTagAndUnsignedArrayValue.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMBinaryTagAndUnsignedArrayValue.java
rename to src/main/java/htsjdk/samtools/SAMBinaryTagAndUnsignedArrayValue.java
diff --git a/src/java/htsjdk/samtools/SAMBinaryTagAndValue.java b/src/main/java/htsjdk/samtools/SAMBinaryTagAndValue.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMBinaryTagAndValue.java
rename to src/main/java/htsjdk/samtools/SAMBinaryTagAndValue.java
diff --git a/src/java/htsjdk/samtools/SAMException.java b/src/main/java/htsjdk/samtools/SAMException.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMException.java
rename to src/main/java/htsjdk/samtools/SAMException.java
diff --git a/src/java/htsjdk/samtools/SAMFileHeader.java b/src/main/java/htsjdk/samtools/SAMFileHeader.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMFileHeader.java
rename to src/main/java/htsjdk/samtools/SAMFileHeader.java
diff --git a/src/main/java/htsjdk/samtools/SAMFileReader.java b/src/main/java/htsjdk/samtools/SAMFileReader.java
new file mode 100644
index 0000000..07189f7
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SAMFileReader.java
@@ -0,0 +1,751 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+
+import htsjdk.samtools.seekablestream.SeekableBufferedStream;
+import htsjdk.samtools.seekablestream.SeekableHTTPStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.*;
+
+import java.io.*;
+import java.net.URL;
+import java.util.NoSuchElementException;
+import java.util.zip.GZIPInputStream;
+
+/**
+ * Class for reading and querying SAM/BAM files.  Delegates to appropriate concrete implementation.
+ *
+ * @see SamReaderFactory
+ */
+ at Deprecated
+public class SAMFileReader implements SamReader, SamReader.Indexing {
+
+    private static ValidationStringency defaultValidationStringency = ValidationStringency.DEFAULT_STRINGENCY;
+
+    public static ValidationStringency getDefaultValidationStringency() {
+        return defaultValidationStringency;
+    }
+
+    /**
+     * Set validation stringency for all subsequently-created SAMFileReaders.  This is the only way to
+     * change the validation stringency for SAM header.
+     * NOTE: Programs that change this should make sure to have a try/finally clause wrapping the work that
+     * they do, so that the original stringency can be restored after the program's work is done.  This facilitates
+     * calling a program that is usually run stand-alone from another program, without messing up the original
+     * validation stringency.
+     */
+    public static void setDefaultValidationStringency(final ValidationStringency defaultValidationStringency) {
+        SAMFileReader.defaultValidationStringency = defaultValidationStringency;
+    }
+
+    /**
+     * Returns the SAMSequenceDictionary from the provided FASTA.
+     */
+    public static SAMSequenceDictionary getSequenceDictionary(final File dictionaryFile) {
+        final SAMFileReader samFileReader = new SAMFileReader(dictionaryFile);
+        final SAMSequenceDictionary dict = samFileReader.getFileHeader().getSequenceDictionary();
+        CloserUtil.close(dictionaryFile);
+        return dict;
+    }
+
+    private boolean mIsBinary = false;
+    private BAMIndex mIndex = null;
+    private SAMRecordFactory samRecordFactory = new DefaultSAMRecordFactory();
+    private ReaderImplementation mReader = null;
+    private boolean useAsyncIO = Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS;
+
+    private File samFile = null;
+
+    private static class EmptySamIterator implements CloseableIterator<SAMRecord> {
+        @Override
+        public boolean hasNext() {
+            return false;
+        }
+
+        @Override
+        public SAMRecord next() {
+            throw new NoSuchElementException("next called on empty iterator");
+        }
+
+        @Override
+        public void remove() {
+            throw new UnsupportedOperationException("Not supported: remove");
+        }
+
+        @Override
+        public void close() {
+            //no-op
+        }
+    }
+
+
+    /**
+     * Prepare to read a SAM or BAM file.  Indexed lookup not allowed because reading from InputStream.
+     */
+    public SAMFileReader(final InputStream stream) {
+        this(stream, false);
+    }
+
+    /**
+     * Prepare to read a SAM or BAM file.  If the given file is a BAM, and has a companion BAI index file
+     * that is named according to the convention, it will be found and opened, and indexed query will be allowed.
+     */
+    public SAMFileReader(final File file) {
+        this(file, null, false);
+    }
+
+    /**
+     * Prepare to read a SAM or BAM file.  If the given file is a BAM, and an index is present, indexed query
+     * will be allowed.
+     *
+     * @param file      SAM or BAM to read.
+     * @param indexFile Index file that is companion to BAM, or null if no index file, or if index file
+     *                  should be found automatically.
+     */
+    public SAMFileReader(final File file, final File indexFile) {
+        this(file, indexFile, false);
+    }
+
+    /**
+     * Read a SAM or BAM file.  Indexed lookup not allowed because reading from InputStream.
+     *
+     * @param stream      input SAM or BAM.  This is buffered internally so caller need not buffer.
+     * @param eagerDecode if true, decode SAM record entirely when reading it.
+     */
+    public SAMFileReader(final InputStream stream, final boolean eagerDecode) {
+        init(stream, null, null, eagerDecode, defaultValidationStringency);
+    }
+
+    /**
+     * Read a SAM or BAM file, possibly with an index file if present.
+     * If the given file is a BAM, and an index is present, indexed query will be allowed.
+     *
+     * @param file        SAM or BAM.
+     * @param eagerDecode if true, decode SAM record entirely when reading it.
+     */
+    public SAMFileReader(final File file, final boolean eagerDecode) {
+        this(file, null, eagerDecode);
+    }
+
+    /**
+     * Read a SAM or BAM file, possibly with an index file. If the given file is a BAM, and an index is present,
+     * indexed query will be allowed.
+     *
+     * @param file        SAM or BAM.
+     * @param indexFile   Location of index file, or null in order to use the default index file (if present).
+     * @param eagerDecode eagerDecode if true, decode SAM record entirely when reading it.
+     */
+    public SAMFileReader(final File file, final File indexFile, final boolean eagerDecode) {
+        init(null, file, indexFile, eagerDecode, defaultValidationStringency);
+    }
+
+    /**
+     * Read a BAM file by http
+     * indexed query will be allowed.
+     *
+     * @param url         BAM.
+     * @param indexFile   Location of index file, or null if indexed access not required.
+     * @param eagerDecode eagerDecode if true, decode SAM record entirely when reading it.
+     */
+    public SAMFileReader(final URL url, final File indexFile, final boolean eagerDecode) {
+        init(new SeekableBufferedStream(new SeekableHTTPStream(url)),
+                indexFile, eagerDecode, defaultValidationStringency);
+    }
+
+    /**
+     * Read a BAM file via caller-supplied mechanism.  Indexed query will be allowed, but
+     * index file must be provided in that case.
+     *
+     * @param strm        BAM -- If the stream is not buffered, caller should wrap in SeekableBufferedStream for
+     *                    better performance.
+     * @param indexFile   Location of index file, or null indexed access not required.
+     * @param eagerDecode if true, decode SAM record entirely when reading it.
+     */
+    public SAMFileReader(final SeekableStream strm, final File indexFile, final boolean eagerDecode) {
+        init(strm, indexFile, eagerDecode, defaultValidationStringency);
+    }
+
+    /**
+     * @param strm BAM -- If the stream is not buffered, caller should wrap in SeekableBufferedStream for
+     *             better performance.
+     */
+    public SAMFileReader(final SeekableStream strm, final SeekableStream indexStream, final boolean eagerDecode) {
+        init(strm, indexStream, eagerDecode, defaultValidationStringency);
+    }
+
+    public void close() {
+        if (mReader != null) {
+            mReader.close();
+        }
+        mReader = null;
+        mIndex = null;
+    }
+
+    /**
+     * If true, this reader will use asynchronous IO.
+     */
+    public void setUseAsyncIO(final boolean useAsyncIO) {
+        this.useAsyncIO = useAsyncIO;
+    }
+
+    /**
+     * If true, writes the source of every read into the source SAMRecords.
+     *
+     * @param enabled true to write source information into each SAMRecord.
+     */
+    public void enableFileSource(final boolean enabled) {
+        mReader.enableFileSource(this, enabled);
+    }
+
+    /**
+     * If true, uses the caching version of the index reader.
+     *
+     * @param enabled true to use the caching version of the reader.
+     */
+    public void enableIndexCaching(final boolean enabled) {
+        if (mIndex != null)
+            throw new SAMException("Unable to turn on index caching; index file has already been loaded.");
+        mReader.enableIndexCaching(enabled);
+    }
+
+    /**
+     * If false, disable the use of memory mapping for accessing index files (default behavior is to use memory mapping).
+     * This is slower but more scalable when accessing large numbers of BAM files sequentially.
+     *
+     * @param enabled True to use memory mapping, false to use regular I/O.
+     */
+    public void enableIndexMemoryMapping(final boolean enabled) {
+        if (mIndex != null) {
+            throw new SAMException("Unable to change index memory mapping; index file has already been loaded.");
+        }
+        mReader.enableIndexMemoryMapping(enabled);
+    }
+
+    /**
+     * Only meaningful for BAM file readers - enables or disables checking of checksums on uncompressed
+     * data during decompression. Enabling this will increase decompression time by 15-30%.
+     */
+    public void enableCrcChecking(final boolean enabled) {
+        this.mReader.enableCrcChecking(enabled);
+    }
+
+    /**
+     * Override the default SAMRecordFactory class used to instantiate instances of SAMRecord and BAMRecord.
+     */
+    public void setSAMRecordFactory(final SAMRecordFactory factory) {
+        this.samRecordFactory = factory;
+        this.mReader.setSAMRecordFactory(factory);
+    }
+
+    /**
+     * @return True if this is a BAM reader.
+     */
+    public boolean isBinary() {
+        return mIsBinary;
+    }
+
+    /**
+     * @return true if ths is a BAM file, and has an index
+     */
+    public boolean hasIndex() {
+        return mReader.hasIndex();
+    }
+
+    @Override
+    public Indexing indexing() {
+        return this;
+    }
+
+    /**
+     * Retrieves the index for the given file type.  Ensure that the index is of the specified type.
+     *
+     * @return An index of the given type.
+     */
+    public BAMIndex getIndex() {
+        return mReader.getIndex();
+    }
+
+    /**
+     * Returns true if the supported index is browseable, meaning the bins in it can be traversed
+     * and chunk data inspected and retrieved.
+     *
+     * @return True if the index supports the BrowseableBAMIndex interface.  False otherwise.
+     */
+    public boolean hasBrowseableIndex() {
+        return hasIndex() && getIndex() instanceof BrowseableBAMIndex;
+    }
+
+    /**
+     * Gets an index tagged with the BrowseableBAMIndex interface.  Throws an exception if no such
+     * index is available.
+     *
+     * @return An index with a browseable interface, if possible.
+     * @throws SAMException if no such index is available.
+     */
+    public BrowseableBAMIndex getBrowseableIndex() {
+        final BAMIndex index = getIndex();
+        if (!(index instanceof BrowseableBAMIndex))
+            throw new SAMException("Cannot return index: index created by BAM is not browseable.");
+        return BrowseableBAMIndex.class.cast(index);
+    }
+
+    public SAMFileHeader getFileHeader() {
+        return mReader.getFileHeader();
+    }
+
+    @Override
+    public Type type() {
+        return mReader.type();
+    }
+
+    @Override
+    public String getResourceDescription() {
+        return this.toString();
+    }
+
+    /**
+     * Control validation of SAMRecords as they are read from file.
+     * In order to control validation stringency for SAM Header, call SAMFileReader.setDefaultValidationStringency
+     * before constructing a SAMFileReader.
+     */
+    public void setValidationStringency(final ValidationStringency validationStringency) {
+        mReader.setValidationStringency(validationStringency);
+    }
+
+    /**
+     * Iterate through file in order.  For a SAMFileReader constructed from an InputStream, and for any SAM file,
+     * a 2nd iteration starts where the 1st one left off.  For a BAM constructed from a File, each new iteration
+     * starts at the first record.
+     * <p/>
+     * Only a single open iterator on a SAM or BAM file may be extant at any one time.  If you want to start
+     * a second iteration, the first one must be closed first.
+     */
+    public SAMRecordIterator iterator() {
+        return new AssertingIterator(mReader.getIterator());
+    }
+
+    /**
+     * Iterate through the given chunks in the file.
+     *
+     * @param chunks List of chunks for which to retrieve data.
+     * @return An iterator over the given chunks.
+     */
+    public SAMRecordIterator iterator(final SAMFileSpan chunks) {
+        return new AssertingIterator(mReader.getIterator(chunks));
+    }
+
+    /**
+     * Gets a pointer spanning all reads in the BAM file.
+     *
+     * @return Unbounded pointer to the first record, in chunk format.
+     */
+    public SAMFileSpan getFilePointerSpanningReads() {
+        return mReader.getFilePointerSpanningReads();
+    }
+
+    /**
+     * Iterate over records that match the given interval.  Only valid to call this if hasIndex() == true.
+     * <p/>
+     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
+     * a second iteration, the first one must be closed first.  You can use a second SAMFileReader to iterate
+     * in parallel over the same underlying file.
+     * <p/>
+     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
+     * and then discarded because they do not match the interval of interest.
+     * <p/>
+     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
+     * is in the query region.
+     *
+     * @param sequence  Reference sequence of interest.
+     * @param start     1-based, inclusive start of interval of interest. Zero implies start of the reference sequence.
+     * @param end       1-based, inclusive end of interval of interest. Zero implies end of the reference sequence.
+     * @param contained If true, each SAMRecord returned is will have its alignment completely contained in the
+     *                  interval of interest.  If false, the alignment of the returned SAMRecords need only overlap the interval of interest.
+     * @return Iterator over the SAMRecords matching the interval.
+     */
+    public SAMRecordIterator query(final String sequence, final int start, final int end, final boolean contained) {
+        final int referenceIndex = getFileHeader().getSequenceIndex(sequence);
+        final CloseableIterator<SAMRecord> currentIterator;
+        if (referenceIndex == -1) {
+            currentIterator = new EmptySamIterator();
+        } else {
+            final QueryInterval[] queryIntervals = {new QueryInterval(referenceIndex, start, end)};
+            currentIterator = mReader.query(queryIntervals, contained);
+        }
+        return new AssertingIterator(currentIterator);
+    }
+
+    /**
+     * Iterate over records that overlap the given interval.  Only valid to call this if hasIndex() == true.
+     * <p/>
+     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
+     * a second iteration, the first one must be closed first.
+     * <p/>
+     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
+     * and then discarded because they do not match the interval of interest.
+     * <p/>
+     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
+     * is in the query region.
+     *
+     * @param sequence Reference sequence of interest.
+     * @param start    1-based, inclusive start of interval of interest. Zero implies start of the reference sequence.
+     * @param end      1-based, inclusive end of interval of interest. Zero implies end of the reference sequence.
+     * @return Iterator over the SAMRecords overlapping the interval.
+     */
+    public SAMRecordIterator queryOverlapping(final String sequence, final int start, final int end) {
+        return query(sequence, start, end, false);
+    }
+
+    /**
+     * Iterate over records that are contained in the given interval.  Only valid to call this if hasIndex() == true.
+     * <p/>
+     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
+     * a second iteration, the first one must be closed first.
+     * <p/>
+     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
+     * and then discarded because they do not match the interval of interest.
+     * <p/>
+     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
+     * is in the query region.
+     *
+     * @param sequence Reference sequence of interest.
+     * @param start    1-based, inclusive start of interval of interest. Zero implies start of the reference sequence.
+     * @param end      1-based, inclusive end of interval of interest. Zero implies end of the reference sequence.
+     * @return Iterator over the SAMRecords contained in the interval.
+     */
+    public SAMRecordIterator queryContained(final String sequence, final int start, final int end) {
+        return query(sequence, start, end, true);
+    }
+
+    /**
+     * Iterate over records that match one of the given intervals.  This may be more efficient than querying
+     * each interval separately, because multiple reads of the same SAMRecords is avoided.
+     * <p/>
+     * Only valid to call this if hasIndex() == true.
+     * <p/>
+     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
+     * a second iteration, the first one must be closed first.  You can use a second SAMFileReader to iterate
+     * in parallel over the same underlying file.
+     * <p/>
+     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
+     * and then discarded because they do not match an interval of interest.
+     * <p/>
+     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
+     * is in the query region.
+     *
+     * @param intervals Intervals to be queried.  The intervals must be optimized, i.e. in order, with overlapping
+     *                  and abutting intervals merged.  This can be done with {@link htsjdk.samtools.QueryInterval#optimizeIntervals}
+     * @param contained If true, each SAMRecord returned is will have its alignment completely contained in one of the
+     *                  intervals of interest.  If false, the alignment of the returned SAMRecords need only overlap one of
+     *                  the intervals of interest.
+     * @return Iterator over the SAMRecords matching the interval.
+     */
+    public SAMRecordIterator query(final QueryInterval[] intervals, final boolean contained) {
+        return new AssertingIterator(mReader.query(intervals, contained));
+    }
+
+    /**
+     * Iterate over records that overlap any of the given intervals.  This may be more efficient than querying
+     * each interval separately, because multiple reads of the same SAMRecords is avoided.
+     * <p/>
+     * Only valid to call this if hasIndex() == true.
+     * <p/>
+     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
+     * a second iteration, the first one must be closed first.
+     * <p/>
+     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
+     * and then discarded because they do not match the interval of interest.
+     * <p/>
+     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
+     * is in the query region.
+     *
+     * @param intervals Intervals to be queried.  The intervals must be optimized, i.e. in order, with overlapping
+     *                  and abutting intervals merged.  This can be done with {@link htsjdk.samtools.QueryInterval#optimizeIntervals}
+     * @return Iterator over the SAMRecords overlapping any of the intervals.
+     */
+    public SAMRecordIterator queryOverlapping(final QueryInterval[] intervals) {
+        return query(intervals, false);
+    }
+
+    /**
+     * Iterate over records that are contained in the given interval.  This may be more efficient than querying
+     * each interval separately, because multiple reads of the same SAMRecords is avoided.
+     * <p/>
+     * Only valid to call this if hasIndex() == true.
+     * <p/>
+     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
+     * a second iteration, the first one must be closed first.
+     * <p/>
+     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
+     * and then discarded because they do not match the interval of interest.
+     * <p/>
+     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
+     * is in the query region.
+     *
+     * @param intervals Intervals to be queried.  The intervals must be optimized, i.e. in order, with overlapping
+     *                  and abutting intervals merged.  This can be done with {@link htsjdk.samtools.QueryInterval#optimizeIntervals}
+     * @return Iterator over the SAMRecords contained in any of the intervals.
+     */
+    public SAMRecordIterator queryContained(final QueryInterval[] intervals) {
+        return query(intervals, true);
+    }
+
+
+    public SAMRecordIterator queryUnmapped() {
+        return new AssertingIterator(mReader.queryUnmapped());
+    }
+
+    /**
+     * Iterate over records that map to the given sequence and start at the given position.  Only valid to call this if hasIndex() == true.
+     * <p/>
+     * Only a single open iterator on a given SAMFileReader may be extant at any one time.  If you want to start
+     * a second iteration, the first one must be closed first.
+     * <p/>
+     * Note that indexed lookup is not perfectly efficient in terms of disk I/O.  I.e. some SAMRecords may be read
+     * and then discarded because they do not match the interval of interest.
+     * <p/>
+     * Note that an unmapped read will be returned by this call if it has a coordinate for the purpose of sorting that
+     * matches the arguments.
+     *
+     * @param sequence Reference sequence of interest.
+     * @param start    Alignment start of interest.
+     * @return Iterator over the SAMRecords with the given alignment start.
+     */
+    public SAMRecordIterator queryAlignmentStart(final String sequence, final int start) {
+        return new AssertingIterator(mReader.queryAlignmentStart(sequence, start));
+    }
+
+    /**
+     * Fetch the mate for the given read.  Only valid to call this if hasIndex() == true.
+     * This will work whether the mate has a coordinate or not, so long as the given read has correct
+     * mate information.  This method iterates over the SAM file, so there may not be an unclosed
+     * iterator on the SAM file when this method is called.
+     * <p/>
+     * Note that it is not possible to call queryMate when iterating over the SAMFileReader, because queryMate
+     * requires its own iteration, and there cannot be two simultaneous iterations on the same SAMFileReader.  The
+     * work-around is to open a second SAMFileReader on the same input file, and call queryMate on the second
+     * reader.
+     *
+     * @param rec Record for which mate is sought.  Must be a paired read.
+     * @return rec's mate, or null if it cannot be found.
+     */
+    public SAMRecord queryMate(final SAMRecord rec) {
+        if (!rec.getReadPairedFlag()) {
+            throw new IllegalArgumentException("queryMate called for unpaired read.");
+        }
+        if (rec.getFirstOfPairFlag() == rec.getSecondOfPairFlag()) {
+            throw new IllegalArgumentException("SAMRecord must be either first and second of pair, but not both.");
+        }
+        final boolean firstOfPair = rec.getFirstOfPairFlag();
+        final CloseableIterator<SAMRecord> it;
+        if (rec.getMateReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
+            it = queryUnmapped();
+        } else {
+            it = queryAlignmentStart(rec.getMateReferenceName(), rec.getMateAlignmentStart());
+        }
+        try {
+            SAMRecord mateRec = null;
+            while (it.hasNext()) {
+                final SAMRecord next = it.next();
+                if (!next.getReadPairedFlag()) {
+                    if (rec.getReadName().equals(next.getReadName())) {
+                        throw new SAMFormatException("Paired and unpaired reads with same name: " + rec.getReadName());
+                    }
+                    continue;
+                }
+                if (firstOfPair) {
+                    if (next.getFirstOfPairFlag()) continue;
+                } else {
+                    if (next.getSecondOfPairFlag()) continue;
+                }
+                if (rec.getReadName().equals(next.getReadName())) {
+                    if (mateRec != null) {
+                        throw new SAMFormatException("Multiple SAMRecord with read name " + rec.getReadName() +
+                                " for " + (firstOfPair ? "second" : "first") + " end.");
+                    }
+                    mateRec = next;
+                }
+            }
+            return mateRec;
+        } finally {
+            it.close();
+        }
+    }
+
+
+    private void init(final SeekableStream strm, final File indexFile, final boolean eagerDecode,
+                      final ValidationStringency validationStringency) {
+
+        try {
+            if (streamLooksLikeBam(strm)) {
+                mIsBinary = true;
+                mReader = new BAMFileReader(strm, indexFile, eagerDecode,  useAsyncIO, validationStringency, this.samRecordFactory);
+            } else {
+                throw new SAMFormatException("Unrecognized file format: " + strm);
+            }
+            setValidationStringency(validationStringency);
+        } catch (final IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    private void init(final SeekableStream strm, final SeekableStream indexStream, final boolean eagerDecode,
+                      final ValidationStringency validationStringency) {
+
+        try {
+            if (streamLooksLikeBam(strm)) {
+                mIsBinary = true;
+                mReader = new BAMFileReader(strm, indexStream, eagerDecode, useAsyncIO, validationStringency, this.samRecordFactory);
+            } else {
+                throw new SAMFormatException("Unrecognized file format: " + strm);
+            }
+            setValidationStringency(validationStringency);
+        } catch (final IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    // Its too expensive to examine the remote file to determine type.
+    // Rely on file extension.
+    private boolean streamLooksLikeBam(final SeekableStream strm) {
+        String source = strm.getSource();
+        if (source == null) return true;
+        source = source.toLowerCase();
+        //Source will typically be a file path or URL
+        //If it's a URL we require one of the query parameters to be bam file
+        return source.endsWith(".bam") || source.contains(".bam?") || source.contains(".bam&") || source.contains(".bam%26");
+    }
+
+    private void init(final InputStream stream, File file, final File indexFile, final boolean eagerDecode,
+                      final ValidationStringency validationStringency) {
+        if (stream != null && file != null) throw new IllegalArgumentException("stream and file are mutually exclusive");
+        this.samFile = file;
+
+        try {
+            BufferedInputStream bufferedStream;
+            // Buffering is required because mark() and reset() are called on the input stream.
+            final int bufferSize = Math.max(Defaults.BUFFER_SIZE, BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE);
+            if (file != null) bufferedStream = new BufferedInputStream(new FileInputStream(file), bufferSize);
+            else bufferedStream = IOUtil.toBufferedStream(stream);
+            if (SamStreams.isBAMFile(bufferedStream)) {
+                mIsBinary = true;
+                if (file == null || !file.isFile()) {
+                    // Handle case in which file is a named pipe, e.g. /dev/stdin or created by mkfifo
+                    mReader = new BAMFileReader(bufferedStream, indexFile, eagerDecode, useAsyncIO, validationStringency, this.samRecordFactory);
+                } else {
+                    bufferedStream.close();
+                    mReader = new BAMFileReader(file, indexFile, eagerDecode, useAsyncIO,  validationStringency, this.samRecordFactory);
+                }
+            } else if (BlockCompressedInputStream.isValidFile(bufferedStream)) {
+                mIsBinary = false;
+                mReader = new SAMTextReader(new BlockCompressedInputStream(bufferedStream), validationStringency, this.samRecordFactory);
+            } else if (SamStreams.isGzippedSAMFile(bufferedStream)) {
+                mIsBinary = false;
+                mReader = new SAMTextReader(new GZIPInputStream(bufferedStream), validationStringency, this.samRecordFactory);
+            } else if (SamStreams.isCRAMFile(bufferedStream)) {
+                if (file == null || !file.isFile()) {
+                    file = null;
+                } else {
+                    bufferedStream.close();
+                    bufferedStream = null;
+                }
+                mReader = new CRAMFileReader(file, bufferedStream);
+            } else if (isSAMFile(bufferedStream)) {
+                if (indexFile != null) {
+                    bufferedStream.close();
+                    throw new RuntimeException("Cannot use index file with textual SAM file");
+                }
+                mIsBinary = false;
+                mReader = new SAMTextReader(bufferedStream, file, validationStringency, this.samRecordFactory);
+            } else {
+                bufferedStream.close();
+                throw new SAMFormatException("Unrecognized file format");
+            }
+
+            setValidationStringency(validationStringency);
+            mReader.setSAMRecordFactory(this.samRecordFactory);
+        } catch (final IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    private static int readBytes(final InputStream stream, final byte[] buffer, final int offset, final int length)
+            throws IOException {
+        int bytesRead = 0;
+        while (bytesRead < length) {
+            final int count = stream.read(buffer, offset + bytesRead, length - bytesRead);
+            if (count <= 0) {
+                break;
+            }
+            bytesRead += count;
+        }
+        return bytesRead;
+    }
+
+    private boolean isSAMFile(final InputStream stream) {
+        // For now, assume every non-binary file is a SAM text file.
+        return true;
+    }
+
+    @Override
+    public String toString() {
+        if (this.samFile == null) {
+            return getClass().getSimpleName() + "{initialized with stream}";
+        } else {
+            return getClass().getSimpleName() + "{" + this.samFile.getAbsolutePath() + "}";
+        }
+    }
+
+    /**
+     * Convenience method to create a QueryInterval
+     *
+     * @param sequence sequence of interest, must exist in sequence dictionary
+     * @param start    1-based start position, must be >= 1
+     * @param end      1-based end position.
+     * @throws java.lang.IllegalArgumentException if sequence not found in sequence dictionary, or start position < 1
+     */
+    public QueryInterval makeQueryInterval(final String sequence, int start, int end) {
+        int referenceIndex = getFileHeader().getSequenceIndex(sequence);
+        if (referenceIndex < 0) {
+            throw new IllegalArgumentException(String.format("Sequence '%s' not found in sequence dictionary", sequence));
+        }
+        if (start < 1) {
+            throw new IllegalArgumentException("Start position must be >= 1");
+        }
+        return new QueryInterval(referenceIndex, start, end);
+    }
+
+    /**
+     * Convenience method to create a QueryInterval that goes from start to end of given sequence.
+     *
+     * @param sequence sequence of interest, must exist in sequence dictionary
+     * @param start    1-based start position, must be >= 1
+     * @throws java.lang.IllegalArgumentException if sequence not found in sequence dictionary, or start position < 1
+     */
+    public QueryInterval makeQueryInterval(final String sequence, int start) {
+        return makeQueryInterval(sequence, start, 0);
+    }
+
+}
diff --git a/src/java/htsjdk/samtools/SAMFileSource.java b/src/main/java/htsjdk/samtools/SAMFileSource.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMFileSource.java
rename to src/main/java/htsjdk/samtools/SAMFileSource.java
diff --git a/src/java/htsjdk/samtools/SAMFileSpan.java b/src/main/java/htsjdk/samtools/SAMFileSpan.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMFileSpan.java
rename to src/main/java/htsjdk/samtools/SAMFileSpan.java
diff --git a/src/java/htsjdk/samtools/SAMFileTruncatedReader.java b/src/main/java/htsjdk/samtools/SAMFileTruncatedReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMFileTruncatedReader.java
rename to src/main/java/htsjdk/samtools/SAMFileTruncatedReader.java
diff --git a/src/java/htsjdk/samtools/SAMFileWriter.java b/src/main/java/htsjdk/samtools/SAMFileWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMFileWriter.java
rename to src/main/java/htsjdk/samtools/SAMFileWriter.java
diff --git a/src/main/java/htsjdk/samtools/SAMFileWriterFactory.java b/src/main/java/htsjdk/samtools/SAMFileWriterFactory.java
new file mode 100644
index 0000000..61f1c9c
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SAMFileWriterFactory.java
@@ -0,0 +1,505 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.Md5CalculatingOutputStream;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.samtools.util.zip.DeflaterFactory;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.zip.Deflater;
+
+/**
+ * Create a writer for writing SAM, BAM, or CRAM files.
+ */
+public class SAMFileWriterFactory implements Cloneable {
+    private final static Log log = Log.getInstance(SAMFileWriterFactory.class);
+    private static boolean defaultCreateIndexWhileWriting = Defaults.CREATE_INDEX;
+    private boolean createIndex = defaultCreateIndexWhileWriting;
+    private static boolean defaultCreateMd5File = Defaults.CREATE_MD5;
+    private boolean createMd5File = defaultCreateMd5File;
+    private boolean useAsyncIo = Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS;
+    private int asyncOutputBufferSize = AsyncSAMFileWriter.DEFAULT_QUEUE_SIZE;
+    private int bufferSize = Defaults.BUFFER_SIZE;
+    private File tmpDir;
+    /** compression level 0: min 9:max */
+    private int compressionLevel = BlockCompressedOutputStream.getDefaultCompressionLevel();
+    private SamFlagField samFlagFieldOutput = SamFlagField.NONE;
+    private Integer maxRecordsInRam = null;
+    private DeflaterFactory deflaterFactory = BlockCompressedOutputStream.getDefaultDeflaterFactory();
+
+    /** simple constructor */
+    public SAMFileWriterFactory() {
+    }
+    
+    /** copy constructor */
+    public SAMFileWriterFactory( final SAMFileWriterFactory other) {
+        if( other == null ) throw new IllegalArgumentException("SAMFileWriterFactory(null)");
+        this.createIndex = other.createIndex;
+        this.createMd5File = other.createMd5File;
+        this.useAsyncIo = other.useAsyncIo;
+        this.asyncOutputBufferSize = other.asyncOutputBufferSize;
+        this.bufferSize = other.bufferSize;
+        this.tmpDir = other.tmpDir;
+        this.compressionLevel = other.compressionLevel;
+        this.maxRecordsInRam = other.maxRecordsInRam;
+    }
+    
+    @Override
+    public SAMFileWriterFactory clone() {
+        return new SAMFileWriterFactory(this);
+    }
+
+    /**
+     * Sets the default for whether to create md5Files for BAM files this factory.
+     */
+    public static void setDefaultCreateMd5File(final boolean createMd5File) {
+        defaultCreateMd5File = createMd5File;
+    }
+
+    /**
+     * Sets whether to create md5Files for BAMs from this factory.
+     */
+    public SAMFileWriterFactory setCreateMd5File(final boolean createMd5File) {
+        this.createMd5File = createMd5File;
+        return this;
+    }
+
+    /**
+     * Set the deflater factory used by BAM writers created by this writer factory. Must not be null.
+     * If this method is not called, the default  {@link DeflaterFactory} is used which creates the default JDK {@link Deflater}.
+     * This method returns the SAMFileWriterFactory itself. */
+    public SAMFileWriterFactory setDeflaterFactory(final DeflaterFactory deflaterFactory){
+        if (deflaterFactory == null){
+            throw new IllegalArgumentException("null deflater factory");
+        }
+        this.deflaterFactory = deflaterFactory;
+        return this;
+    }
+
+    /** set compression level 0!none 9: max */
+    public SAMFileWriterFactory setCompressionLevel(final int compressionLevel) {
+        this.compressionLevel = Math.min(9, Math.max(0, compressionLevel));
+        return this;
+    }
+    
+    public int getCompressionLevel() {
+        return compressionLevel;
+    }
+    
+    /**
+     * Sets the default for subsequent SAMFileWriterFactories
+     * that do not specify whether to create an index.
+     * If a BAM (not SAM) file is created, the setting is true, and the file header specifies coordinate order,
+     * then a BAM index file will be written along with the BAM file.
+     *
+     * @param setting whether to attempt to create a BAM index while creating the BAM file
+     */
+    public static void setDefaultCreateIndexWhileWriting(final boolean setting) {
+        defaultCreateIndexWhileWriting = setting;
+    }
+
+    /**
+     * Convenience method allowing newSAMFileWriterFactory().setCreateIndex(true);
+     * Equivalent to SAMFileWriterFactory.setDefaultCreateIndexWhileWriting(true); newSAMFileWriterFactory();
+     * If a BAM or CRAM (not SAM) file is created, the setting is true, and the file header specifies coordinate order,
+     * then a BAM index file will be written along with the BAM file.
+     *
+     * @param setting whether to attempt to create a BAM index while creating the BAM file.
+     * @return this factory object
+     */
+    public SAMFileWriterFactory setCreateIndex(final boolean setting) {
+        this.createIndex = setting;
+        return this;
+    }
+
+    /**
+     * Before creating a writer that is not presorted, this method may be called in order to override
+     * the default number of SAMRecords stored in RAM before spilling to disk
+     * (c.f. SAMFileWriterImpl.MAX_RECORDS_IN_RAM).  When writing very large sorted SAM files, you may need
+     * call this method in order to avoid running out of file handles.  The RAM available to the JVM may need
+     * to be increased in order to hold the specified number of records in RAM.  This value affects the number
+     * of records stored in subsequent calls to one of the make...() methods.
+     *
+     * @param maxRecordsInRam Number of records to store in RAM before spilling to temporary file when
+     *                        creating a sorted SAM or BAM file.
+     */
+    public SAMFileWriterFactory setMaxRecordsInRam(final int maxRecordsInRam) {
+        this.maxRecordsInRam = maxRecordsInRam;
+        return this;
+    }
+
+    /**
+     * Turn on or off the use of asynchronous IO for writing output SAM and BAM files.  If true then
+     * each SAMFileWriter creates a dedicated thread which is used for compression and IO activities.
+     */
+    public SAMFileWriterFactory setUseAsyncIo(final boolean useAsyncIo) {
+        this.useAsyncIo = useAsyncIo;
+        return this;
+    }
+
+    /**
+     * If and only if using asynchronous IO then sets the maximum number of records that can be buffered per
+     * SAMFileWriter before producers will block when trying to write another SAMRecord.
+     */
+    public SAMFileWriterFactory setAsyncOutputBufferSize(final int asyncOutputBufferSize) {
+        this.asyncOutputBufferSize = asyncOutputBufferSize;
+        return this;
+    }
+
+    /**
+     * Controls size of write buffer.
+     * Default value: [[htsjdk.samtools.Defaults#BUFFER_SIZE]]
+     */
+    public SAMFileWriterFactory setBufferSize(final int bufferSize) {
+        this.bufferSize = bufferSize;
+        return this;
+    }
+
+    /**
+     * Set the temporary directory to use when sort data.
+     *
+     * @param tmpDir Path to the temporary directory
+     */
+    public SAMFileWriterFactory setTempDirectory(final File tmpDir) {
+        this.tmpDir = tmpDir;
+        return this;
+    }
+
+    /**
+     * Set the flag output format only when writing text.
+     * Default value: [[htsjdk.samtools.SAMTextWriter.samFlagFieldOutput.DECIMAL]]
+     */
+    public SAMFileWriterFactory setSamFlagFieldOutput(final SamFlagField samFlagFieldOutput) {
+        if (samFlagFieldOutput == null) throw new IllegalArgumentException("Sam flag field was null");
+        this.samFlagFieldOutput = samFlagFieldOutput;
+        return this;
+    }
+
+    /**
+     * Create a BAMFileWriter that is ready to receive SAMRecords.  Uses default compression level.
+     *
+     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputFile where to write the output.
+     */
+    public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile) {
+        return makeBAMWriter(header, presorted, outputFile, this.getCompressionLevel());
+    }
+
+    /**
+     * Create a BAMFileWriter that is ready to receive SAMRecords.
+     *
+     * @param header           entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted        if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputFile       where to write the output.
+     * @param compressionLevel Override default compression level with the given value, between 0 (fastest) and 9 (smallest).
+     */
+    public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile,
+                                       final int compressionLevel) {
+        try {
+            final boolean createMd5File = this.createMd5File && IOUtil.isRegularPath(outputFile);
+            if (this.createMd5File && !createMd5File) {
+                log.warn("Cannot create MD5 file for BAM because output file is not a regular file: " + outputFile.getAbsolutePath());
+            }
+            OutputStream os = IOUtil.maybeBufferOutputStream(new FileOutputStream(outputFile, false), bufferSize);
+            if (createMd5File) os = new Md5CalculatingOutputStream(os, new File(outputFile.getAbsolutePath() + ".md5"));
+            final BAMFileWriter ret = new BAMFileWriter(os, outputFile, compressionLevel, deflaterFactory);
+            final boolean createIndex = this.createIndex && IOUtil.isRegularPath(outputFile);
+            if (this.createIndex && !createIndex) {
+                log.warn("Cannot create index for BAM because output file is not a regular file: " + outputFile.getAbsolutePath());
+            }
+            if (this.tmpDir != null) ret.setTempDirectory(this.tmpDir);
+            initializeBAMWriter(ret, header, presorted, createIndex);
+
+            if (this.useAsyncIo) return new AsyncSAMFileWriter(ret, this.asyncOutputBufferSize);
+            else return ret;
+        } catch (final IOException ioe) {
+            throw new RuntimeIOException("Error opening file: " + outputFile.getAbsolutePath());
+        }
+    }
+
+    private void initializeBAMWriter(final BAMFileWriter writer, final SAMFileHeader header, final boolean presorted, final boolean createIndex) {
+        writer.setSortOrder(header.getSortOrder(), presorted);
+        if (maxRecordsInRam != null) {
+            writer.setMaxRecordsInRam(maxRecordsInRam);
+        }
+        writer.setHeader(header);
+        if (createIndex && writer.getSortOrder().equals(SAMFileHeader.SortOrder.coordinate)) {
+            writer.enableBamIndexConstruction();
+        }
+    }
+
+    /**
+     * Create a SAMTextWriter that is ready to receive SAMRecords.
+     *
+     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputFile where to write the output.
+     */
+    public SAMFileWriter makeSAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile) {
+        /**
+         * Use the value specified from Defaults.SAM_FLAG_FIELD_FORMAT when samFlagFieldOutput value has not been set.  This should
+         * be SamFlagField.DECIMAL when the user has not set Defaults.SAM_FLAG_FIELD_FORMAT.
+         */
+        if (samFlagFieldOutput == SamFlagField.NONE) {
+            samFlagFieldOutput = Defaults.SAM_FLAG_FIELD_FORMAT;
+        }
+        try {
+            final SAMTextWriter ret = this.createMd5File
+                    ? new SAMTextWriter(new Md5CalculatingOutputStream(new FileOutputStream(outputFile, false),
+                    new File(outputFile.getAbsolutePath() + ".md5")), samFlagFieldOutput)
+                    : new SAMTextWriter(outputFile, samFlagFieldOutput);
+            ret.setSortOrder(header.getSortOrder(), presorted);
+            if (maxRecordsInRam != null) {
+                ret.setMaxRecordsInRam(maxRecordsInRam);
+            }
+            ret.setHeader(header);
+
+            if (this.useAsyncIo) return new AsyncSAMFileWriter(ret, this.asyncOutputBufferSize);
+            else return ret;
+        } catch (final IOException ioe) {
+            throw new RuntimeIOException("Error opening file: " + outputFile.getAbsolutePath());
+        }
+    }
+
+    /**
+     * Create a SAMTextWriter for writing to a stream that is ready to receive SAMRecords.
+     * This method does not support the creation of an MD5 file
+     *
+     * @param header    entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param stream    the stream to write records to.  Note that this method does not buffer the stream, so the
+     *                  caller must buffer if desired.  Note that PrintStream is buffered.
+     */
+    public SAMFileWriter makeSAMWriter(final SAMFileHeader header, final boolean presorted, final OutputStream stream) {
+        /**
+         * Use the value specified from Defaults.SAM_FLAG_FIELD_FORMAT when samFlagFieldOutput value has not been set.  This should
+         * be samFlagFieldOutput.DECIMAL when the user has not set Defaults.SAM_FLAG_FIELD_FORMAT.
+         */
+        if (samFlagFieldOutput == SamFlagField.NONE) {
+            samFlagFieldOutput = Defaults.SAM_FLAG_FIELD_FORMAT;
+        }
+        return initWriter(header, presorted, false, new SAMTextWriter(stream, samFlagFieldOutput));
+    }
+
+    /**
+     * Create a BAMFileWriter for writing to a stream that is ready to receive SAMRecords.
+     * This method does not support the creation of an MD5 file
+     *
+     * @param header    entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param stream    the stream to write records to.  Note that this method does not buffer the stream, so the
+     *                  caller must buffer if desired.  Note that PrintStream is buffered.
+     */
+
+    public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final OutputStream stream) {
+        return initWriter(header, presorted, true, new BAMFileWriter(stream, null, this.getCompressionLevel(), this.deflaterFactory));
+    }
+
+    /**
+     * Initialize SAMTextWriter or a BAMFileWriter and possibly wrap in AsyncSAMFileWriter
+     *
+     * @param header    entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param binary    do we want to generate a BAM or a SAM
+     * @param writer    SAM or BAM writer to initialize and maybe wrap.
+     */
+
+    private SAMFileWriter initWriter(final SAMFileHeader header, final boolean presorted, final boolean binary,
+                                     final SAMFileWriterImpl writer) {
+        writer.setSortOrder(header.getSortOrder(), presorted);
+        if (maxRecordsInRam != null) {
+            writer.setMaxRecordsInRam(maxRecordsInRam);
+        }
+        writer.setHeader(header);
+
+        if (this.useAsyncIo) return new AsyncSAMFileWriter(writer, this.asyncOutputBufferSize);
+        else return writer;
+    }
+
+    /**
+     * Create either a SAM or a BAM writer based on examination of the outputFile extension.
+     *
+     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputFile where to write the output.  Must end with .sam or .bam.
+     * @return SAM or BAM writer based on file extension of outputFile.
+     */
+    public SAMFileWriter makeSAMOrBAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile) {
+        final String filename = outputFile.getName();
+        if (filename.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION)) {
+            return makeBAMWriter(header, presorted, outputFile);
+        }
+        if (filename.endsWith(".sam")) {
+            return makeSAMWriter(header, presorted, outputFile);
+        }
+        return makeBAMWriter(header, presorted, outputFile);
+    }
+
+    /**
+     *
+     * Create a SAM, BAM or CRAM writer based on examination of the outputFile extension.
+     *
+     * @param header header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputFile where to write the output.  Must end with .sam, .bam or .cram.
+     * @param referenceFasta reference sequence file
+     * @return SAMFileWriter appropriate for the file type specified in outputFile
+     *
+     */
+    public SAMFileWriter makeWriter(final SAMFileHeader header, final boolean presorted, final File outputFile, final File referenceFasta) {
+        if (outputFile.getName().endsWith(SamReader.Type.CRAM_TYPE.fileExtension())) {
+            return makeCRAMWriter(header, presorted, outputFile, referenceFasta);
+        }
+        else {
+            return makeSAMOrBAMWriter(header, presorted, outputFile);
+        }
+    }
+
+    /**
+     * Create a CRAMFileWriter on an output stream. Requires the input to be presorted to match the sort order defined
+     * by the input header.
+     *
+     * Note: does not honor factory settings for CREATE_MD5, CREATE_INDEX, USE_ASYNC_IO.
+     *
+     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param stream where to write the output.
+     * @param referenceFasta reference sequence file
+     * @return CRAMFileWriter
+     */
+    public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final OutputStream stream, final File referenceFasta) {
+        // create the CRAMFileWriter directly without propagating factory settings
+        final CRAMFileWriter writer = new CRAMFileWriter(stream, new ReferenceSource(referenceFasta), header, null);
+        setCRAMWriterDefaults(writer);
+        return writer;
+    }
+
+    /**
+     * Create a CRAMFileWriter on an output file. Requires input record to be presorted to match the
+     * sort order defined by the input header.
+     *
+     * Note: does not honor factory settings for USE_ASYNC_IO.
+     *
+     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param outputFile where to write the output.  Must end with .sam, .bam or .cram.
+     * @param referenceFasta reference sequence file
+     * @return CRAMFileWriter
+     *
+     */
+    public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final File outputFile, final File referenceFasta) {
+        return createCRAMWriterWithSettings(header, true, outputFile, referenceFasta);
+    }
+
+    /**
+     * Create a CRAMFileWriter on an output file.
+     *
+     * Note: does not honor factory setting for USE_ASYNC_IO.
+     *
+     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputFile where to write the output.  Must end with .sam, .bam or .cram.
+     * @param referenceFasta reference sequence file
+     * @return CRAMFileWriter
+     *
+     */
+    public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile, final File referenceFasta) {
+        return createCRAMWriterWithSettings(header, presorted, outputFile, referenceFasta);
+    }
+
+    /**
+     * Create a CRAMFileWriter on an output file based on factory settings.
+     *
+     * Note: does not honor the factory setting for USE_ASYNC_IO.
+     *
+     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputFile where to write the output.  Must end with .sam, .bam or .cram.
+     * @param referenceFasta reference sequence file
+     * @return CRAMFileWriter
+     */
+    private CRAMFileWriter createCRAMWriterWithSettings(
+            final SAMFileHeader header,
+            final boolean presorted,
+            final File outputFile,
+            final File referenceFasta) {
+        OutputStream cramOS = null;
+        OutputStream indexOS = null ;
+
+        if (createIndex) {
+            if (!IOUtil.isRegularPath(outputFile)) {
+                log.warn("Cannot create index for CRAM because output file is not a regular file: " + outputFile.getAbsolutePath());
+            }
+            else {
+                try {
+                    final File indexFile = new File(outputFile.getAbsolutePath() + BAMIndex.BAMIndexSuffix) ;
+                    indexOS = new FileOutputStream(indexFile) ;
+                }
+                catch (final IOException ioe) {
+                    throw new RuntimeIOException("Error creating index file for: " + outputFile.getAbsolutePath()+ BAMIndex.BAMIndexSuffix);
+                }
+            }
+        }
+
+        try {
+            cramOS = IOUtil.maybeBufferOutputStream(new FileOutputStream(outputFile, false), bufferSize);
+        }
+        catch (final IOException ioe) {
+            throw new RuntimeIOException("Error creating CRAM file: " + outputFile.getAbsolutePath());
+        }
+
+        final CRAMFileWriter writer = new CRAMFileWriter(
+                createMd5File ? new Md5CalculatingOutputStream(cramOS, new File(outputFile.getAbsolutePath() + ".md5")) : cramOS,
+                indexOS,
+                presorted,
+                new ReferenceSource(referenceFasta),
+                header,
+                outputFile.getAbsolutePath());
+        setCRAMWriterDefaults(writer);
+
+        return writer;
+    }
+
+    // Set the default CRAM writer preservation parameters
+    private void setCRAMWriterDefaults(final CRAMFileWriter writer) {
+        writer.setPreserveReadNames(true);
+        writer.setCaptureAllTags(true);
+    }
+
+    @Override
+    public String toString() {
+        return "SAMFileWriterFactory [createIndex=" + createIndex + ", createMd5File=" + createMd5File + ", useAsyncIo="
+                + useAsyncIo + ", asyncOutputBufferSize=" + asyncOutputBufferSize + ", bufferSize=" + bufferSize
+                + ", tmpDir=" + tmpDir + ", compressionLevel=" + compressionLevel + ", maxRecordsInRam="
+                + maxRecordsInRam + "]";
+    }
+
+}
diff --git a/src/java/htsjdk/samtools/SAMFileWriterImpl.java b/src/main/java/htsjdk/samtools/SAMFileWriterImpl.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMFileWriterImpl.java
rename to src/main/java/htsjdk/samtools/SAMFileWriterImpl.java
diff --git a/src/main/java/htsjdk/samtools/SAMFlag.java b/src/main/java/htsjdk/samtools/SAMFlag.java
new file mode 100644
index 0000000..11dc155
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SAMFlag.java
@@ -0,0 +1,110 @@
+/*
+ * The MIT License
+ *
+ * Author: Pierre Lindenbaum PhD @yokofakun
+ *  Institut du Thorax - Nantes - France
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * SAM flags as enum, to be used in GUI, menu, etc...
+ */
+public enum SAMFlag {
+    READ_PAIRED(                    0x1,    "Template having multiple segments in sequencing"),
+    PROPER_PAIR(                    0x2,    "Each segment properly aligned according to the aligner"),
+    READ_UNMAPPED(                  0x4,    "Segment unmapped"),
+    MATE_UNMAPPED(                  0x8,    "Next segment in the template unmapped"),
+    READ_REVERSE_STRAND(            0x10,   "SEQ being reverse complemented"),
+    MATE_REVERSE_STRAND(            0x20,   "SEQ of the next segment in the template being reverse complemented"),
+    FIRST_OF_PAIR(                  0x40,   "The first segment in the template"),
+    SECOND_OF_PAIR(                 0x80,   "The last segment in the template"),
+    NOT_PRIMARY_ALIGNMENT(          0x100,  "Secondary alignment"),
+    READ_FAILS_VENDOR_QUALITY_CHECK(0x200,  "Not passing quality controls"),
+    DUPLICATE_READ(                 0x400,  "PCR or optical duplicate"), 
+    SUPPLEMENTARY_ALIGNMENT(        0x800,  "Supplementary alignment")
+    ;
+
+    /* visible for the package, to be used by SAMRecord */
+    final int flag;
+    private final String description;
+
+    SAMFlag(int flag, String description) {
+        this.flag = flag;
+        this.description = description;
+    }
+
+    /** @return this flag as an int */
+    public int intValue() {
+        return flag;
+    }
+
+    /** @return a human label for this SAMFlag */
+    public String getLabel() {
+        return name().toLowerCase().replace('_', ' ');
+    }
+
+    /** @return a human description for this SAMFlag */
+    public String getDescription() {
+        return this.description;
+    }
+
+    /** @return the SAMFlag for the value 'flag' or null if it was not found */
+    public static SAMFlag valueOf(int flag) {
+        for (SAMFlag f : values()) {
+            if (flag == f.flag)
+                return f;
+        }
+        return null;
+    }
+
+    /** @return find SAMFlag the flag by name, or null if it was not found */
+    public static SAMFlag findByName(String flag)
+        {   
+        for (SAMFlag f : values()) {
+            if (f.name().equals(flag))
+                return f;
+        }
+        return null;
+    }
+
+    /** @returns true if the bit for is set for flag */
+    public boolean isSet(int flag) {
+        return (this.flag & flag) != 0;
+    }
+
+    /** @returns true if the bit for is not set for flag */
+    public boolean isUnset(int flag) {
+        return !isSet(flag);
+    }
+
+    /** @returns the java.util.Set of SAMFlag for 'flag' */
+    public static Set<SAMFlag> getFlags(int flag) {
+        Set<SAMFlag> set = new HashSet<SAMFlag>();
+        for (SAMFlag f : values()) {
+            if (f.isSet(flag))
+                set.add(f);
+        }
+        return set;
+    }
+}
diff --git a/src/java/htsjdk/samtools/SAMFormatException.java b/src/main/java/htsjdk/samtools/SAMFormatException.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMFormatException.java
rename to src/main/java/htsjdk/samtools/SAMFormatException.java
diff --git a/src/java/htsjdk/samtools/SAMHeaderRecordComparator.java b/src/main/java/htsjdk/samtools/SAMHeaderRecordComparator.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMHeaderRecordComparator.java
rename to src/main/java/htsjdk/samtools/SAMHeaderRecordComparator.java
diff --git a/src/main/java/htsjdk/samtools/SAMLineParser.java b/src/main/java/htsjdk/samtools/SAMLineParser.java
new file mode 100644
index 0000000..f73a67e
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SAMLineParser.java
@@ -0,0 +1,487 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2012 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.StringUtil;
+
+import java.io.File;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+/**
+ * This class enables creation of a SAMRecord object from a String in SAM text format.  The SAM flag field will be inferred
+ * for each record separately, unless the expected format is set using `withSamFlagField`.
+ */
+public class SAMLineParser {
+
+    // From SAM specification
+    private static final int QNAME_COL = 0;
+    private static final int FLAG_COL = 1;
+    private static final int RNAME_COL = 2;
+    private static final int POS_COL = 3;
+    private static final int MAPQ_COL = 4;
+    private static final int CIGAR_COL = 5;
+    private static final int MRNM_COL = 6;
+    private static final int MPOS_COL = 7;
+    private static final int ISIZE_COL = 8;
+    private static final int SEQ_COL = 9;
+    private static final int QUAL_COL = 10;
+
+    private static final int NUM_REQUIRED_FIELDS = 11;
+
+    /**
+     * Allocate this once rather than for every line as a performance
+     * optimization. The size is arbitrary -- merely large enough to handle the
+     * maximum number of fields we might expect from a reasonable SAM file.
+     */
+    private final String[] mFields = new String[10000];
+
+    /**
+     * Add information about the origin (reader and position) to SAM records.
+     */
+    private final SamReader mParentReader;
+    private final SAMRecordFactory samRecordFactory;
+    private final ValidationStringency validationStringency;
+    private final SAMFileHeader mFileHeader;
+    private final File mFile;
+    private Optional<SamFlagField> samFlagField = Optional.empty();
+
+    private final TextTagCodec tagCodec = new TextTagCodec();
+
+    private int currentLineNumber;
+    private String currentLine;
+
+    //
+    // Constructors
+    //
+
+    /**
+     * Public constructor. Use the default SAMRecordFactory and stringency.
+     *
+     * @param samFileHeader SAM file header
+     */
+    public SAMLineParser(final SAMFileHeader samFileHeader) {
+
+        this(new DefaultSAMRecordFactory(),
+                ValidationStringency.DEFAULT_STRINGENCY, samFileHeader,
+                null, null);
+    }
+
+    /**
+     * Public constructor. Use the default SAMRecordFactory and stringency.
+     *
+     * @param samFileHeader SAM file header
+     * @param samFileReader SAM file reader For passing to SAMRecord.setFileSource, may be null.
+     * @param samFile       SAM file being read (for error message only, may be null)
+     */
+    public SAMLineParser(final SAMFileHeader samFileHeader,
+                         final SamReader samFileReader, final File samFile) {
+
+        this(new DefaultSAMRecordFactory(),
+                ValidationStringency.DEFAULT_STRINGENCY, samFileHeader,
+                samFileReader, samFile);
+    }
+
+    /**
+     * Public constructor.
+     *
+     * @param samRecordFactory     SamRecord Factory
+     * @param validationStringency validation stringency
+     * @param samFileHeader        SAM file header
+     * @param samFileReader        SAM file reader For passing to SAMRecord.setFileSource, may be null.
+     * @param samFile              SAM file being read (for error message only, may be null)
+     */
+    public SAMLineParser(final SAMRecordFactory samRecordFactory,
+                         final ValidationStringency validationStringency,
+                         final SAMFileHeader samFileHeader, final SamReader samFileReader,
+                         final File samFile) {
+
+        if (samRecordFactory == null)
+            throw new NullPointerException("The SamRecordFactory must be set");
+
+        if (validationStringency == null)
+            throw new NullPointerException("The validationStringency must be set");
+
+        if (samFileHeader == null)
+            throw new NullPointerException("The mFileHeader must be set");
+
+        this.samRecordFactory = samRecordFactory;
+        this.validationStringency = validationStringency;
+        this.mFileHeader = samFileHeader;
+
+        // Can be null
+        this.mParentReader = samFileReader;
+
+        // Can be null
+        this.mFile = samFile;
+    }
+
+    /**
+     * Get the File header.
+     *
+     * @return the SAM file header
+     */
+    public SAMFileHeader getFileHeader() {
+
+        return this.mFileHeader;
+    }
+
+    /**
+     * Get validation stringency.
+     *
+     * @return validation stringency
+     */
+    public ValidationStringency getValidationStringency() {
+        return this.validationStringency;
+    }
+
+    /**
+     * Sets the expected SAM flag type expected for all records.
+     */
+    public SAMLineParser withSamFlagField(final SamFlagField samFlagField) {
+        if (samFlagField == null) throw new IllegalArgumentException("Sam flag field was null");
+        this.samFlagField = Optional.of(samFlagField);
+        return this;
+    }
+
+    private int parseInt(final String s, final String fieldName) {
+        final int ret;
+        try {
+            ret = Integer.parseInt(s);
+        } catch (NumberFormatException e) {
+            throw reportFatalErrorParsingLine("Non-numeric value in " + fieldName + " column");
+        }
+        return ret;
+    }
+    
+    private int parseFlag(final String s, final String fieldName) {
+        try {
+            return samFlagField.isPresent() ? samFlagField.get().parse(s) : SamFlagField.parseDefault(s);
+        } catch (NumberFormatException e) {
+            throw reportFatalErrorParsingLine("Non-numeric value in " + fieldName + " column");
+        } catch (SAMFormatException e) {
+            throw reportFatalErrorParsingLine("Error in " + fieldName + " column: " + e.getMessage(), e);
+        }
+    }
+
+    private void validateReferenceName(final String rname, final String fieldName) {
+        if (rname.equals("=")) {
+            if (fieldName.equals("MRNM")) {
+                return;
+            }
+            reportErrorParsingLine("= is not a valid value for "
+                    + fieldName + " field.");
+        }
+        if (!this.mFileHeader.getSequenceDictionary().isEmpty()) {
+            if (this.mFileHeader.getSequence(rname) == null) {
+                reportErrorParsingLine(fieldName
+                        + " '" + rname + "' not found in any SQ record");
+            }
+        }
+    }
+
+    /**
+     * Parse a SAM line.
+     *
+     * @param line line to parse
+     * @return a new SAMRecord object
+     */
+    public SAMRecord parseLine(final String line) {
+
+        return parseLine(line, -1);
+    }
+
+    /**
+     * Parse a SAM line.
+     *
+     * @param line       line to parse
+     * @param lineNumber line number in the file. If the line number is not known
+     *                   can be <=0.
+     * @return a new SAMRecord object
+     */
+    public SAMRecord parseLine(final String line, final int lineNumber) {
+
+        this.currentLineNumber = lineNumber;
+        this.currentLine = line;
+
+        final int numFields = StringUtil.split(line, mFields, '\t');
+        if (numFields < NUM_REQUIRED_FIELDS) {
+            throw reportFatalErrorParsingLine("Not enough fields");
+        }
+        if (numFields == mFields.length) {
+            reportErrorParsingLine("Too many fields in SAM text record.");
+        }
+        for (int i = 0; i < numFields; ++i) {
+            if (mFields[i].isEmpty()) {
+                reportErrorParsingLine("Empty field at position " + i + " (zero-based)");
+            }
+        }
+        final SAMRecord samRecord =
+                samRecordFactory.createSAMRecord(this.mFileHeader);
+        samRecord.setValidationStringency(this.validationStringency);
+        if (mParentReader != null)
+            samRecord.setFileSource(new SAMFileSource(mParentReader, null));
+        samRecord.setHeader(this.mFileHeader);
+        samRecord.setReadName(mFields[QNAME_COL]);
+
+        final int flags = parseFlag(mFields[FLAG_COL], "FLAG");
+        samRecord.setFlags(flags);
+
+        String rname = mFields[RNAME_COL];
+        if (!rname.equals("*")) {
+            rname = SAMSequenceRecord.truncateSequenceName(rname);
+            validateReferenceName(rname, "RNAME");
+            samRecord.setReferenceName(rname);
+        } else if (!samRecord.getReadUnmappedFlag()) {
+            reportErrorParsingLine("RNAME is not specified but flags indicate mapped");
+        }
+
+        final int pos = parseInt(mFields[POS_COL], "POS");
+        final int mapq = parseInt(mFields[MAPQ_COL], "MAPQ");
+        final String cigar = mFields[CIGAR_COL];
+        if (!SAMRecord.NO_ALIGNMENT_REFERENCE_NAME.equals(samRecord
+                .getReferenceName())) {
+            if (pos == 0) {
+                reportErrorParsingLine("POS must be non-zero if RNAME is specified");
+            }
+            if (!samRecord.getReadUnmappedFlag() && cigar.equals("*")) {
+                reportErrorParsingLine("CIGAR must not be '*' if RNAME is specified");
+            }
+        } else {
+            if (pos != 0) {
+                reportErrorParsingLine("POS must be zero if RNAME is not specified");
+            }
+            if (mapq != 0) {
+                reportErrorParsingLine("MAPQ must be zero if RNAME is not specified");
+            }
+            if (!cigar.equals("*")) {
+                reportErrorParsingLine("CIGAR must be '*' if RNAME is not specified");
+            }
+        }
+        samRecord.setAlignmentStart(pos);
+        samRecord.setMappingQuality(mapq);
+        samRecord.setCigarString(cigar);
+
+        String mateRName = mFields[MRNM_COL];
+        if (mateRName.equals("*")) {
+            if (samRecord.getReadPairedFlag() && !samRecord.getMateUnmappedFlag()) {
+                reportErrorParsingLine("MRNM not specified but flags indicate mate mapped");
+            }
+        } else {
+            if (!samRecord.getReadPairedFlag()) {
+                reportErrorParsingLine("MRNM specified but flags indicate unpaired");
+            }
+            if (!"=".equals(mateRName)) {
+                mateRName = SAMSequenceRecord.truncateSequenceName(mateRName);
+            }
+            validateReferenceName(mateRName, "MRNM");
+            if (mateRName.equals("=")) {
+                if (samRecord.getReferenceName() == null) {
+                    reportErrorParsingLine("MRNM is '=', but RNAME is not set");
+                }
+                samRecord.setMateReferenceName(samRecord.getReferenceName());
+            } else {
+                samRecord.setMateReferenceName(mateRName);
+            }
+        }
+
+        final int matePos = parseInt(mFields[MPOS_COL], "MPOS");
+        final int isize = parseInt(mFields[ISIZE_COL], "ISIZE");
+        if (!samRecord.getMateReferenceName().equals(
+                SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) {
+            if (matePos == 0) {
+                reportErrorParsingLine("MPOS must be non-zero if MRNM is specified");
+            }
+        } else {
+            if (matePos != 0) {
+                reportErrorParsingLine("MPOS must be zero if MRNM is not specified");
+            }
+            if (isize != 0) {
+                reportErrorParsingLine("ISIZE must be zero if MRNM is not specified");
+            }
+        }
+        samRecord.setMateAlignmentStart(matePos);
+        samRecord.setInferredInsertSize(isize);
+        if (!mFields[SEQ_COL].equals("*")) {
+            validateReadBases(mFields[SEQ_COL]);
+            samRecord.setReadString(mFields[SEQ_COL]);
+        } else {
+            samRecord.setReadBases(SAMRecord.NULL_SEQUENCE);
+        }
+        if (!mFields[QUAL_COL].equals("*")) {
+            if (samRecord.getReadBases() == SAMRecord.NULL_SEQUENCE) {
+                reportErrorParsingLine("QUAL should not be specified if SEQ is not specified");
+            }
+            if (samRecord.getReadString().length() != mFields[QUAL_COL].length()) {
+                reportErrorParsingLine("length(QUAL) != length(SEQ)");
+            }
+            samRecord.setBaseQualityString(mFields[QUAL_COL]);
+        } else {
+            samRecord.setBaseQualities(SAMRecord.NULL_QUALS);
+        }
+
+        for (int i = NUM_REQUIRED_FIELDS; i < numFields; ++i) {
+            parseTag(samRecord, mFields[i]);
+        }
+
+        // Only call samRecord.isValid() if errors would be reported since the validation
+        // is quite expensive in and of itself.
+        if (this.validationStringency != ValidationStringency.SILENT) {
+            final List<SAMValidationError> validationErrors = samRecord.isValid();
+            if (validationErrors != null) {
+                for (final SAMValidationError errorMessage : validationErrors) {
+                    reportErrorParsingLine(errorMessage.getMessage());
+                }
+            }
+        }
+
+        return samRecord;
+    }
+
+    private void validateReadBases(final String bases) {
+        /*
+        * Using regex is slow, so check for invalid characters via
+        * isValidReadBase(), which hopefully the JIT will optimize. if
+        * (!VALID_BASES.matcher(bases).matches()) {
+        * reportErrorParsingLine("Invalid character in read bases"); }
+        */
+        for (int i = 0; i < bases.length(); ++i) {
+            if (!isValidReadBase(bases.charAt(i))) {
+                reportErrorParsingLine("Invalid character in read bases");
+                return;
+            }
+        }
+    }
+
+    private boolean isValidReadBase(final char base) {
+        switch (base) {
+            case 'a':
+            case 'c':
+            case 'm':
+            case 'g':
+            case 'r':
+            case 's':
+            case 'v':
+            case 't':
+            case 'w':
+            case 'y':
+            case 'h':
+            case 'k':
+            case 'd':
+            case 'b':
+            case 'n':
+            case 'A':
+            case 'C':
+            case 'M':
+            case 'G':
+            case 'R':
+            case 'S':
+            case 'V':
+            case 'T':
+            case 'W':
+            case 'Y':
+            case 'H':
+            case 'K':
+            case 'D':
+            case 'B':
+            case 'N':
+            case '.':
+            case '=':
+                return true;
+            default:
+                return false;
+        }
+    }
+
+    private void parseTag(final SAMRecord samRecord, final String tag) {
+        Map.Entry<String, Object> entry = null;
+        try {
+            entry = tagCodec.decode(tag);
+        } catch (SAMFormatException e) {
+            reportErrorParsingLine(e);
+        }
+        if (entry != null) {
+            if (entry.getValue() instanceof TagValueAndUnsignedArrayFlag) {
+                final TagValueAndUnsignedArrayFlag valueAndFlag =
+                        (TagValueAndUnsignedArrayFlag) entry.getValue();
+                if (valueAndFlag.isUnsignedArray) {
+                    samRecord.setUnsignedArrayAttribute(entry.getKey(),
+                            valueAndFlag.value);
+                } else {
+                    samRecord.setAttribute(entry.getKey(), valueAndFlag.value);
+                }
+            } else {
+                samRecord.setAttribute(entry.getKey(), entry.getValue());
+            }
+        }
+    }
+
+    //
+    // Error methods
+    //
+
+    private RuntimeException reportFatalErrorParsingLine(final String reason) {
+        return new SAMFormatException(makeErrorString(reason));
+    }
+
+    private RuntimeException reportFatalErrorParsingLine(final String reason, final Throwable throwable) {
+        return new SAMFormatException(makeErrorString(reason), throwable);
+    }
+
+    private void reportErrorParsingLine(final String reason) {
+        final String errorMessage = makeErrorString(reason);
+
+        if (validationStringency == ValidationStringency.STRICT) {
+            throw new SAMFormatException(errorMessage);
+        } else if (validationStringency == ValidationStringency.LENIENT) {
+            System.err
+                    .println("Ignoring SAM validation error due to lenient parsing:");
+            System.err.println(errorMessage);
+        }
+    }
+
+    private void reportErrorParsingLine(final Exception e) {
+        final String errorMessage = makeErrorString(e.getMessage());
+        if (validationStringency == ValidationStringency.STRICT) {
+            throw new SAMFormatException(errorMessage);
+        } else if (validationStringency == ValidationStringency.LENIENT) {
+            System.err
+                    .println("Ignoring SAM validation error due to lenient parsing:");
+            System.err.println(errorMessage);
+        }
+    }
+
+    private String makeErrorString(final String reason) {
+        String fileMessage = "";
+        if (mFile != null) {
+            fileMessage = "File " + mFile + "; ";
+        }
+        return "Error parsing text SAM file. "
+                + reason + "; " + fileMessage + "Line "
+                + (this.currentLineNumber <= 0 ? "unknown" : this.currentLineNumber)
+                + "\nLine: " + this.currentLine;
+    }
+
+}
diff --git a/src/java/htsjdk/samtools/SAMProgramRecord.java b/src/main/java/htsjdk/samtools/SAMProgramRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMProgramRecord.java
rename to src/main/java/htsjdk/samtools/SAMProgramRecord.java
diff --git a/src/java/htsjdk/samtools/SAMReadGroupRecord.java b/src/main/java/htsjdk/samtools/SAMReadGroupRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMReadGroupRecord.java
rename to src/main/java/htsjdk/samtools/SAMReadGroupRecord.java
diff --git a/src/java/htsjdk/samtools/SAMRecord.java b/src/main/java/htsjdk/samtools/SAMRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecord.java
rename to src/main/java/htsjdk/samtools/SAMRecord.java
diff --git a/src/java/htsjdk/samtools/SAMRecordComparator.java b/src/main/java/htsjdk/samtools/SAMRecordComparator.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecordComparator.java
rename to src/main/java/htsjdk/samtools/SAMRecordComparator.java
diff --git a/src/java/htsjdk/samtools/SAMRecordCoordinateComparator.java b/src/main/java/htsjdk/samtools/SAMRecordCoordinateComparator.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecordCoordinateComparator.java
rename to src/main/java/htsjdk/samtools/SAMRecordCoordinateComparator.java
diff --git a/src/java/htsjdk/samtools/SAMRecordDuplicateComparator.java b/src/main/java/htsjdk/samtools/SAMRecordDuplicateComparator.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecordDuplicateComparator.java
rename to src/main/java/htsjdk/samtools/SAMRecordDuplicateComparator.java
diff --git a/src/java/htsjdk/samtools/SAMRecordFactory.java b/src/main/java/htsjdk/samtools/SAMRecordFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecordFactory.java
rename to src/main/java/htsjdk/samtools/SAMRecordFactory.java
diff --git a/src/java/htsjdk/samtools/SAMRecordIterator.java b/src/main/java/htsjdk/samtools/SAMRecordIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecordIterator.java
rename to src/main/java/htsjdk/samtools/SAMRecordIterator.java
diff --git a/src/java/htsjdk/samtools/SAMRecordQueryHashComparator.java b/src/main/java/htsjdk/samtools/SAMRecordQueryHashComparator.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecordQueryHashComparator.java
rename to src/main/java/htsjdk/samtools/SAMRecordQueryHashComparator.java
diff --git a/src/java/htsjdk/samtools/SAMRecordQueryNameComparator.java b/src/main/java/htsjdk/samtools/SAMRecordQueryNameComparator.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecordQueryNameComparator.java
rename to src/main/java/htsjdk/samtools/SAMRecordQueryNameComparator.java
diff --git a/src/main/java/htsjdk/samtools/SAMRecordSetBuilder.java b/src/main/java/htsjdk/samtools/SAMRecordSetBuilder.java
new file mode 100644
index 0000000..714199f
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SAMRecordSetBuilder.java
@@ -0,0 +1,576 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.DuplicateScoringStrategy.ScoringStrategy;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CoordMath;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.samtools.util.SequenceUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+import java.util.TreeSet;
+
+/**
+ * Factory class for creating SAMRecords for testing purposes. Various methods can be called
+ * to add new SAM records (or pairs of records) to a list which can then be returned at
+ * any point. The records must reference human chromosomes (excluding randoms etc.).
+ * <p/>
+ * Although this is a class for testing, it is in the src tree because it is included in the sam jarfile.
+ *
+ * @author Tim Fennell
+ */
+public class SAMRecordSetBuilder implements Iterable<SAMRecord> {
+    private static final String[] chroms = {
+            "chr1", "chr2", "chr3", "chr4", "chr5", "chr6", "chr7", "chr8", "chr9", "chr10",
+            "chr11", "chr12", "chr13", "chr14", "chr15", "chr16", "chr17", "chr18", "chr19", "chr20",
+            "chr21", "chr22", "chrX", "chrY", "chrM"
+    };
+    private static final byte[] BASES = {'A', 'C', 'G', 'T'};
+    private static final String READ_GROUP_ID = "1";
+    private static final String SAMPLE = "FREE_SAMPLE";
+    private final Random random = new Random();
+
+    private SAMFileHeader header;
+    private final Collection<SAMRecord> records;
+
+    private int readLength = 36;
+
+    private SAMProgramRecord programRecord = null;
+    private SAMReadGroupRecord readGroup = null;
+    private boolean useNmFlag = false;
+
+    private boolean unmappedHasBasesAndQualities = true;
+    
+    public static final int DEFAULT_CHROMOSOME_LENGTH = 200000000;
+
+    public static final ScoringStrategy DEFAULT_DUPLICATE_SCORING_STRATEGY = ScoringStrategy.TOTAL_MAPPED_REFERENCE_LENGTH;
+
+    /**
+     * Constructs a new SAMRecordSetBuilder with all the data needed to keep the records
+     * sorted in coordinate order.
+     */
+    public SAMRecordSetBuilder() {
+        this(true, SAMFileHeader.SortOrder.coordinate);
+    }
+
+    /**
+     * Construct a new SAMRecordSetBuilder.
+     *
+     * @param sortOrder If sortForMe, defines the sort order.
+     * @param sortForMe If true, keep the records created in sorted order.
+     */
+    public SAMRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder) {
+        this(sortForMe, sortOrder, true);
+    }
+
+    public SAMRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder, final boolean addReadGroup) {
+        this(sortForMe, sortOrder, addReadGroup, DEFAULT_CHROMOSOME_LENGTH);
+    }
+
+    public SAMRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder, final boolean addReadGroup, final int defaultChromosomeLength) {
+        this(sortForMe, sortOrder, addReadGroup, defaultChromosomeLength, DEFAULT_DUPLICATE_SCORING_STRATEGY);
+    }
+
+    public SAMRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder, final boolean addReadGroup,
+                               final int defaultChromosomeLength, final ScoringStrategy duplicateScoringStrategy) {
+        final List<SAMSequenceRecord> sequences = new ArrayList<SAMSequenceRecord>();
+        for (final String chrom : chroms) {
+            final SAMSequenceRecord sequenceRecord = new SAMSequenceRecord(chrom, defaultChromosomeLength);
+            sequences.add(sequenceRecord);
+        }
+
+        this.header = new SAMFileHeader();
+        this.header.setSequenceDictionary(new SAMSequenceDictionary(sequences));
+        this.header.setSortOrder(sortOrder);
+        if (sortForMe) {
+            final SAMRecordComparator comparator;
+            if (sortOrder == SAMFileHeader.SortOrder.queryname) {
+                comparator = new SAMRecordQueryNameComparator();
+            } else {
+                comparator = new SAMRecordCoordinateComparator();
+            }
+            this.records = new TreeSet<SAMRecord>(comparator);
+        } else {
+            this.records = new ArrayList<SAMRecord>();
+        }
+
+        if (addReadGroup) {
+            final SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord(READ_GROUP_ID);
+            readGroupRecord.setSample(SAMPLE);
+            readGroupRecord.setPlatform("ILLUMINA");
+            final List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>();
+            readGroups.add(readGroupRecord);
+            this.header.setReadGroups(readGroups);
+        }
+    }
+
+    public void setUnmappedHasBasesAndQualities(final boolean value) {
+        this.unmappedHasBasesAndQualities = value;
+    }
+
+    public int size() {
+        return this.records.size();
+    }
+
+    /**
+     * Set the seed of the random number generator for cases in which repeatable result is desired.
+     *
+     * @param seed
+     */
+    public void setRandomSeed(final long seed) {
+        random.setSeed(seed);
+    }
+
+    /**
+     * Adds the given program record to the header, and assigns the PG tag to any SAMRecords
+     * created after it has been added. May be called multiple times in order to assign different
+     * PG IDs to different SAMRecords.  programRecord may be null to stop assignment of PG tag.
+     * It is up to the caller to ensure that program record IDs do not collide.
+     */
+    public void setProgramRecord(final SAMProgramRecord programRecord) {
+        this.programRecord = programRecord;
+        if (programRecord != null) {
+            this.header.addProgramRecord(programRecord);
+        }
+    }
+
+    public void setUseNmFlag(final boolean useNmFlag) {
+        this.useNmFlag = useNmFlag;
+    }
+
+    public void setReadGroup(final SAMReadGroupRecord readGroup) {
+        this.readGroup = readGroup;
+        if (readGroup != null) {
+            this.header.addReadGroup(readGroup);
+        }
+    }
+
+    /** Returns the accumulated list of sam records. */
+    public Collection<SAMRecord> getRecords() { return this.records; }
+
+    public void setHeader(final SAMFileHeader header) {
+        this.header = header.clone();
+    }
+
+
+    /** The record should already have the DS and MC tags computed */
+    public void addRecord(final SAMRecord record) {
+        if (record.getReadPairedFlag() && !record.getMateUnmappedFlag() &&
+                null == record.getAttribute(SAMTagUtil.getSingleton().MC)) {
+            throw new SAMException("Mate Cigar tag (MC) not found in: " + record.getReadName());
+        }
+        this.records.add(record);
+    }
+
+    /** Returns a CloseableIterator over the collection of SAMRecords. */
+    public CloseableIterator<SAMRecord> iterator() {
+        return new CloseableIterator<SAMRecord>() {
+            private final Iterator<SAMRecord> iterator = records.iterator();
+
+            public void close() { /** Do nothing. */}
+
+            public boolean hasNext() { return this.iterator.hasNext(); }
+
+            public SAMRecord next() { return this.iterator.next(); }
+
+            public void remove() { this.iterator.remove(); }
+        };
+    }
+
+    /**
+     * Adds a fragment record (mapped or unmapped) to the set using the provided contig start and optionally the strand,
+     * cigar string, quality string or default quality score.  This does not modify the flag field, which should be updated
+     * if desired before adding the return to the list of records.
+     */
+    private SAMRecord createReadNoFlag(final String name, final int contig, final int start, final boolean negativeStrand,
+                                       final boolean recordUnmapped, final String cigar, final String qualityString,
+                                       final int defaultQuality) throws SAMException {
+        final SAMRecord rec = new SAMRecord(this.header);
+        rec.setReadName(name);
+        if (header.getSequenceDictionary().size() <= contig) {
+            throw new SAMException("Contig too big [" + header.getSequenceDictionary().size() + " < " + contig);
+        }
+        if (0 <= contig) {
+            rec.setReferenceIndex(contig);
+            rec.setAlignmentStart(start);
+        }
+        if (!recordUnmapped) {
+            rec.setReadNegativeStrandFlag(negativeStrand);
+            if (null != cigar) {
+                rec.setCigarString(cigar);
+            } else if (!rec.getReadUnmappedFlag()) {
+                rec.setCigarString(readLength + "M");
+            }
+            rec.setMappingQuality(255);
+        } else {
+            rec.setReadUnmappedFlag(true);
+        }
+        rec.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
+
+        if(useNmFlag){
+            rec.setAttribute(SAMTag.NM.name(), SequenceUtil.calculateSamNmTagFromCigar(rec));
+        }
+
+        if (programRecord != null) {
+            rec.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
+        }
+
+        if (readGroup != null) {
+            rec.setAttribute(SAMTag.RG.name(), readGroup.getReadGroupId());
+        }
+
+        if (!recordUnmapped || this.unmappedHasBasesAndQualities) {
+        fillInBasesAndQualities(rec, qualityString, defaultQuality);
+        }
+
+        return rec;
+    }
+
+    /**
+     * Adds a skeletal fragment (non-PE) record to the set using the provided
+     * contig start and strand information.
+     */
+    public SAMRecord addFrag(final String name, final int contig, final int start, final boolean negativeStrand) {
+        return addFrag(name, contig, start, negativeStrand, false, null, null, -1);
+    }
+
+    /**
+     * Adds a fragment record (mapped or unmapped) to the set using the provided contig start and optionally the strand,
+     * cigar string, quality string or default quality score.
+     */
+    public SAMRecord addFrag(final String name, final int contig, final int start, final boolean negativeStrand,
+                             final boolean recordUnmapped, final String cigar, final String qualityString,
+                             final int defaultQuality) throws SAMException {
+        return addFrag(name, contig, start, negativeStrand, recordUnmapped, cigar, qualityString, defaultQuality, false);
+    }
+
+    /**
+     * Adds a fragment record (mapped or unmapped) to the set using the provided contig start and optionally the strand,
+     * cigar string, quality string or default quality score.
+     */
+    public SAMRecord addFrag(final String name, final int contig, final int start, final boolean negativeStrand,
+                             final boolean recordUnmapped, final String cigar, final String qualityString,
+                             final int defaultQuality, final boolean isSecondary) throws SAMException {
+        final htsjdk.samtools.SAMRecord rec = createReadNoFlag(name, contig, start, negativeStrand, recordUnmapped, cigar, qualityString, defaultQuality);
+        if (isSecondary) rec.setNotPrimaryAlignmentFlag(true);
+        this.records.add(rec);
+        return rec;
+    }
+
+    /**
+     * Adds a fragment record (mapped or unmapped) to the set using the provided contig start and optionally the strand,
+     * cigar string, quality string or default quality score.
+     */
+    public SAMRecord addFrag(final String name, final int contig, final int start, final boolean negativeStrand,
+                             final boolean recordUnmapped, final String cigar, final String qualityString,
+                             final int defaultQuality, final boolean isSecondary, final boolean isSupplementary) throws SAMException {
+        final htsjdk.samtools.SAMRecord rec = createReadNoFlag(name, contig, start, negativeStrand, recordUnmapped, cigar, qualityString, defaultQuality);
+        if (isSecondary) rec.setNotPrimaryAlignmentFlag(true);
+        if (isSupplementary) rec.setSupplementaryAlignmentFlag(true);
+        this.records.add(rec);
+        return rec;
+    }
+
+
+    /**
+     * Fills in the bases and qualities for the given record. Quality data is randomly generated if the defaultQuality
+     * is set to -1. Otherwise all qualities will be set to defaultQuality. If a quality string is provided that string
+     * will be used instead of the defaultQuality.
+     */
+    private void fillInBasesAndQualities(final SAMRecord rec, final String qualityString, final int defaultQuality) {
+
+        if (null == qualityString) {
+            fillInBasesAndQualities(rec, defaultQuality);
+        } else {
+            fillInBases(rec);
+            rec.setBaseQualityString(qualityString);
+        }
+    }
+
+    /**
+     * Randomly fills in the bases for the given record.
+     */
+    private void fillInBases(final SAMRecord rec) {
+        final int length = this.readLength;
+        final byte[] bases = new byte[length];
+
+        for (int i = 0; i < length; ++i) {
+            bases[i] = BASES[this.random.nextInt(BASES.length)];
+        }
+
+        rec.setReadBases(bases);
+    }
+
+    /**
+     * Adds an unmapped fragment read to the builder.
+     */
+    public void addUnmappedFragment(final String name) {
+        addFrag(name, -1, -1, false, true, null, null, -1, false);
+    }
+
+
+    /**
+     * Adds a skeletal pair of records to the set using the provided
+     * contig starts.  The pair is assumed to be a well
+     * formed pair sitting on a single contig.
+     */
+    public void addPair(final String name, final int contig, final int start1, final int start2) {
+        final SAMRecord end1 = new SAMRecord(this.header);
+        final SAMRecord end2 = new SAMRecord(this.header);
+        final boolean end1IsFirstOfPair = this.random.nextBoolean();
+
+        end1.setReadName(name);
+        end1.setReferenceIndex(contig);
+        end1.setAlignmentStart(start1);
+        end1.setReadNegativeStrandFlag(false);
+        end1.setCigarString(readLength + "M");
+        if(useNmFlag) end1.setAttribute(ReservedTagConstants.NM, 0);
+        end1.setMappingQuality(255);
+        end1.setReadPairedFlag(true);
+        end1.setProperPairFlag(true);
+        end1.setMateReferenceIndex(contig);
+        end1.setAttribute(SAMTag.MC.name(), readLength + "M");
+        end1.setMateAlignmentStart(start2);
+        end1.setMateNegativeStrandFlag(true);
+        end1.setFirstOfPairFlag(end1IsFirstOfPair);
+        end1.setSecondOfPairFlag(!end1IsFirstOfPair);
+        end1.setInferredInsertSize((int) CoordMath.getLength(start1, CoordMath.getEnd(start2, this.readLength)));
+        end1.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
+        if (programRecord != null) {
+            end1.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
+        }
+        if (readGroup != null) {
+            end1.setAttribute(SAMTag.RG.name(), readGroup.getReadGroupId());
+        }
+        fillInBasesAndQualities(end1);
+
+        end2.setReadName(name);
+        end2.setReferenceIndex(contig);
+        end2.setAlignmentStart(start2);
+        end2.setReadNegativeStrandFlag(true);
+        end2.setCigarString(readLength + "M");
+        if(useNmFlag) end2.setAttribute(ReservedTagConstants.NM,0);
+        end2.setMappingQuality(255);
+        end2.setReadPairedFlag(true);
+        end2.setProperPairFlag(true);
+        end2.setMateReferenceIndex(contig);
+        end2.setAttribute(SAMTag.MC.name(), readLength + "M");
+        end2.setMateAlignmentStart(start1);
+        end2.setMateNegativeStrandFlag(false);
+        end2.setFirstOfPairFlag(!end1IsFirstOfPair);
+        end2.setSecondOfPairFlag(end1IsFirstOfPair);
+        end2.setInferredInsertSize(end1.getInferredInsertSize());
+        end2.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
+        if (programRecord != null) {
+            end2.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
+        }
+        if (readGroup != null) {
+            end2.setAttribute(SAMTag.RG.name(), readGroup.getReadGroupId());
+        }
+        fillInBasesAndQualities(end2);
+
+        this.records.add(end1);
+        this.records.add(end2);
+    }
+
+    /**
+     * Adds a pair of records (mapped or unmmapped) to the set using the provided contig starts.
+     * The pair is assumed to be a well formed pair sitting on a single contig.
+     */
+    public List<SAMRecord> addPair(final String name, final int contig, final int start1, final int start2,
+                                   final boolean record1Unmapped, final boolean record2Unmapped, final String cigar1,
+                                   final String cigar2, final boolean strand1, final boolean strand2, final int defaultQuality) {
+        return this.addPair(name, contig, contig, start1, start2, record1Unmapped, record2Unmapped, cigar1, cigar2, strand1, strand2, false, false, defaultQuality);
+    }
+
+    /**
+     * Adds a pair of records (mapped or unmmapped) to the set using the provided contig starts.
+     * The pair is assumed to be a well formed pair sitting on a single contig.
+     */
+    public List<SAMRecord> addPair(final String name, final int contig1, final int contig2, final int start1, final int start2,
+                                   final boolean record1Unmapped, final boolean record2Unmapped, final String cigar1,
+                                   final String cigar2, final boolean strand1, final boolean strand2, final boolean record1NonPrimary,
+                                   final boolean record2NonPrimary, final int defaultQuality) {
+        final List<SAMRecord> recordsList = new LinkedList<SAMRecord>();
+
+        final SAMRecord end1 = createReadNoFlag(name, contig1, start1, strand1, record1Unmapped, cigar1, null, defaultQuality);
+        final SAMRecord end2 = createReadNoFlag(name, contig2, start2, strand2, record2Unmapped, cigar2, null, defaultQuality);
+
+        end1.setReadPairedFlag(true);
+        end1.setFirstOfPairFlag(true);
+
+        if (!record1Unmapped && !record2Unmapped) {
+            end1.setProperPairFlag(true);
+            end2.setProperPairFlag(true);
+        }
+        end2.setReadPairedFlag(true);
+        end2.setSecondOfPairFlag(true);
+
+        if (record1NonPrimary) end1.setNotPrimaryAlignmentFlag(true);
+        if (record2NonPrimary) end2.setNotPrimaryAlignmentFlag(true);
+
+        if (record1NonPrimary) end1.setNotPrimaryAlignmentFlag(true);
+        if (record2NonPrimary) end2.setNotPrimaryAlignmentFlag(true);
+
+        // set mate info
+        SamPairUtil.setMateInfo(end1, end2, true);
+
+        recordsList.add(end1);
+        recordsList.add(end2);
+
+        records.add(end1);
+        records.add(end2);
+
+        return recordsList;
+    }
+
+    /**
+     * Adds a pair of records (mapped or unmmapped) to the set using the provided contig starts.
+     * The pair is assumed to be a well formed pair sitting on a single contig.
+     */
+    public List<SAMRecord> addPair(final String name, final int contig, final int start1, final int start2,
+                                   final boolean record1Unmapped, final boolean record2Unmapped, final String cigar1,
+                                   final String cigar2, final boolean strand1, final boolean strand2, final boolean record1NonPrimary,
+                                   final boolean record2NonPrimary, final int defaultQuality) {
+        return addPair(name, contig, contig, start1, start2, record1Unmapped, record2Unmapped, cigar1, cigar2, strand1, strand2,
+                record1NonPrimary, record2NonPrimary, defaultQuality);
+    }
+
+    /**
+     * Adds a pair with both ends unmapped to the builder.
+     */
+    public void addUnmappedPair(final String name) {
+        final SAMRecord end1 = new SAMRecord(this.header);
+        final SAMRecord end2 = new SAMRecord(this.header);
+        final boolean end1IsFirstOfPair = this.random.nextBoolean();
+
+        end1.setReadName(name);
+        end1.setReadPairedFlag(true);
+        end1.setReadUnmappedFlag(true);
+        end1.setAttribute(SAMTag.MC.name(), null);
+        end1.setProperPairFlag(false);
+        end1.setFirstOfPairFlag(end1IsFirstOfPair);
+        end1.setSecondOfPairFlag(!end1IsFirstOfPair);
+        end1.setMateUnmappedFlag(true);
+        end1.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
+        if (programRecord != null) {
+            end1.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
+        }
+        if (this.unmappedHasBasesAndQualities) {
+        fillInBasesAndQualities(end1);
+        }
+
+        end2.setReadName(name);
+        end2.setReadPairedFlag(true);
+        end2.setReadUnmappedFlag(true);
+        end2.setAttribute(SAMTag.MC.name(), null);
+        end2.setProperPairFlag(false);
+        end2.setFirstOfPairFlag(!end1IsFirstOfPair);
+        end2.setSecondOfPairFlag(end1IsFirstOfPair);
+        end2.setMateUnmappedFlag(true);
+        end2.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
+        if (programRecord != null) {
+            end2.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
+        }
+        if (this.unmappedHasBasesAndQualities) {
+        fillInBasesAndQualities(end2);
+        }
+
+        this.records.add(end1);
+        this.records.add(end2);
+    }
+
+    /**
+     * Fills in bases and qualities with randomly generated data.
+     * Relies on the alignment start and end having been set to get read length.
+     */
+    private void fillInBasesAndQualities(final SAMRecord rec) {
+        fillInBasesAndQualities(rec, -1);
+    }
+
+    /**
+     * Fills in bases and qualities with a set default quality. If the defaultQuality is set to -1 quality scores will
+     * be randomly generated.
+     * Relies on the alignment start and end having been set to get read length.
+     */
+    private void fillInBasesAndQualities(final SAMRecord rec, final int defaultQuality) {
+        final int length = this.readLength;
+        final byte[] quals = new byte[length];
+
+        if (-1 != defaultQuality) {
+            Arrays.fill(quals, (byte) defaultQuality);
+        } else {
+            for (int i = 0; i < length; ++i) {
+                quals[i] = (byte) this.random.nextInt(50);
+            }
+        }
+        rec.setBaseQualities(quals);
+        fillInBases(rec);
+    }
+
+    /**
+     * Creates samFileReader from the data in instance of this class
+     *
+     * @return SAMFileReader
+     */
+    public SamReader getSamReader() {
+
+        final File tempFile;
+
+        try {
+            tempFile = File.createTempFile("temp", ".sam");
+        } catch (final IOException e) {
+            throw new RuntimeIOException("problems creating tempfile", e);
+        }
+
+        this.header.setAttribute("VN", "1.0");
+        final SAMFileWriter w = new SAMFileWriterFactory().makeBAMWriter(this.header, true, tempFile);
+        for (final SAMRecord r : this.getRecords()) {
+            w.addAlignment(r);
+        }
+
+        w.close();
+
+        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(tempFile);
+        tempFile.deleteOnExit();
+
+        return reader;
+    }
+
+    public SAMFileHeader getHeader() {
+        return header;
+    }
+
+    public void setReadLength(final int readLength) { this.readLength = readLength; }
+
+}
diff --git a/src/java/htsjdk/samtools/SAMRecordUtil.java b/src/main/java/htsjdk/samtools/SAMRecordUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMRecordUtil.java
rename to src/main/java/htsjdk/samtools/SAMRecordUtil.java
diff --git a/src/java/htsjdk/samtools/SAMSequenceDictionary.java b/src/main/java/htsjdk/samtools/SAMSequenceDictionary.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMSequenceDictionary.java
rename to src/main/java/htsjdk/samtools/SAMSequenceDictionary.java
diff --git a/src/java/htsjdk/samtools/SAMSequenceRecord.java b/src/main/java/htsjdk/samtools/SAMSequenceRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMSequenceRecord.java
rename to src/main/java/htsjdk/samtools/SAMSequenceRecord.java
diff --git a/src/java/htsjdk/samtools/SAMSortOrderChecker.java b/src/main/java/htsjdk/samtools/SAMSortOrderChecker.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMSortOrderChecker.java
rename to src/main/java/htsjdk/samtools/SAMSortOrderChecker.java
diff --git a/src/java/htsjdk/samtools/SAMTag.java b/src/main/java/htsjdk/samtools/SAMTag.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMTag.java
rename to src/main/java/htsjdk/samtools/SAMTag.java
diff --git a/src/java/htsjdk/samtools/SAMTagUtil.java b/src/main/java/htsjdk/samtools/SAMTagUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMTagUtil.java
rename to src/main/java/htsjdk/samtools/SAMTagUtil.java
diff --git a/src/java/htsjdk/samtools/SAMTestUtil.java b/src/main/java/htsjdk/samtools/SAMTestUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMTestUtil.java
rename to src/main/java/htsjdk/samtools/SAMTestUtil.java
diff --git a/src/java/htsjdk/samtools/SAMTextHeaderCodec.java b/src/main/java/htsjdk/samtools/SAMTextHeaderCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMTextHeaderCodec.java
rename to src/main/java/htsjdk/samtools/SAMTextHeaderCodec.java
diff --git a/src/java/htsjdk/samtools/SAMTextReader.java b/src/main/java/htsjdk/samtools/SAMTextReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMTextReader.java
rename to src/main/java/htsjdk/samtools/SAMTextReader.java
diff --git a/src/main/java/htsjdk/samtools/SAMTextWriter.java b/src/main/java/htsjdk/samtools/SAMTextWriter.java
new file mode 100644
index 0000000..0786d67
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SAMTextWriter.java
@@ -0,0 +1,221 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.AsciiWriter;
+import htsjdk.samtools.util.RuntimeIOException;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.StringWriter;
+import java.io.Writer;
+
+/**
+ * Writer for text-format SAM files.
+ */
+public class SAMTextWriter extends SAMFileWriterImpl {
+    private static final String FIELD_SEPARATOR = "\t";
+
+    private final Writer out;
+    // For error reporting only.
+    private final File file;
+    private final TextTagCodec tagCodec = new TextTagCodec();
+    private final SAMTagUtil tagUtil = new SAMTagUtil();
+
+    private final SamFlagField samFlagFieldOutput;
+    
+    /**
+     * Constructs a SAMTextWriter that outputs to a Writer.
+     * @param out Writer.
+     */
+    public SAMTextWriter(final Writer out) {
+        this(out, SamFlagField.DECIMAL);
+    }
+
+    /**
+     * Constructs a SAMTextWriter that writes to a File.
+     * @param file Where to write the output.
+     */
+    public SAMTextWriter(final File file) {
+        this(file, SamFlagField.DECIMAL);
+    }
+
+    /**
+     * Returns the Writer used by this instance.  Useful for flushing the output.
+     */
+    public Writer getWriter() {
+	return out;
+    }
+
+    /**
+     * Constructs a SAMTextWriter that writes to an OutputStream.  The OutputStream
+     * is wrapped in an AsciiWriter, which can be retrieved with getWriter().
+     * @param stream Need not be buffered because this class provides buffering. 
+     */
+    public SAMTextWriter(final OutputStream stream) {
+        this(stream, SamFlagField.DECIMAL);
+    }
+
+    /**
+     * Constructs a SAMTextWriter that outputs to a Writer.
+     * @param out Writer.
+     */
+    public SAMTextWriter(final Writer out, final SamFlagField samFlagFieldOutput) {
+        if (samFlagFieldOutput == null) throw new IllegalArgumentException("Sam flag field was null");
+        this.out = out;
+        this.file = null;
+        this.samFlagFieldOutput = samFlagFieldOutput;
+    }
+
+    /**
+     * Constructs a SAMTextWriter that writes to a File.
+     * @param file Where to write the output.
+     */
+    public SAMTextWriter(final File file, final SamFlagField samFlagFieldOutput) {
+        if (samFlagFieldOutput == null) throw new IllegalArgumentException("Sam flag field was null");
+        try {
+            this.file = file;
+            this.out = new AsciiWriter(new FileOutputStream(file));
+        } catch (final IOException e) {
+            throw new RuntimeIOException(e);
+        }
+        this.samFlagFieldOutput = samFlagFieldOutput;
+    }
+
+    /**
+     * Constructs a SAMTextWriter that writes to an OutputStream.  The OutputStream
+     * is wrapped in an AsciiWriter, which can be retrieved with getWriter().
+     * @param stream Need not be buffered because this class provides buffering.
+     */
+    public SAMTextWriter(final OutputStream stream, final SamFlagField samFlagFieldOutput) {
+        if (samFlagFieldOutput == null) throw new IllegalArgumentException("Sam flag field was null");
+        this.file = null;
+        this.out = new AsciiWriter(stream);
+        this.samFlagFieldOutput = samFlagFieldOutput;
+    }
+
+    /**
+     * Write the record.
+     *
+     * @param alignment SAMRecord.
+     */
+    public void writeAlignment(final SAMRecord alignment) {
+        try {
+            out.write(alignment.getReadName());
+            out.write(FIELD_SEPARATOR);
+            out.write(this.samFlagFieldOutput.format(alignment.getFlags()));
+            out.write(FIELD_SEPARATOR);
+            out.write(alignment.getReferenceName());
+            out.write(FIELD_SEPARATOR);
+            out.write(Integer.toString(alignment.getAlignmentStart()));
+            out.write(FIELD_SEPARATOR);
+            out.write(Integer.toString(alignment.getMappingQuality()));
+            out.write(FIELD_SEPARATOR);
+            out.write(alignment.getCigarString());
+            out.write(FIELD_SEPARATOR);
+
+            //  == is OK here because these strings are interned
+            if (alignment.getReferenceName() == alignment.getMateReferenceName() &&
+                    SAMRecord.NO_ALIGNMENT_REFERENCE_NAME != alignment.getReferenceName()) {
+                out.write("=");
+            } else {
+                out.write(alignment.getMateReferenceName());
+            }
+            out.write(FIELD_SEPARATOR);
+            out.write(Integer.toString(alignment.getMateAlignmentStart()));
+            out.write(FIELD_SEPARATOR);
+            out.write(Integer.toString(alignment.getInferredInsertSize()));
+            out.write(FIELD_SEPARATOR);
+            out.write(alignment.getReadString());
+            out.write(FIELD_SEPARATOR);
+            out.write(alignment.getBaseQualityString());
+            SAMBinaryTagAndValue attribute = alignment.getBinaryAttributes();
+            while (attribute != null) {
+                out.write(FIELD_SEPARATOR);
+                final String encodedTag;
+                if (attribute.isUnsignedArray()) {
+                    encodedTag = tagCodec.encodeUnsignedArray(tagUtil.makeStringTag(attribute.tag), attribute.value);
+                } else {
+                    encodedTag = tagCodec.encode(tagUtil.makeStringTag(attribute.tag), attribute.value);
+                }
+                out.write(encodedTag);
+                attribute = attribute.getNext();
+            }
+            out.write("\n");
+
+        } catch (final IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    /* This method is called by SAMRecord.getSAMString(). */
+    private static SAMTextWriter textWriter = null;
+    private static StringWriter stringWriter = null;
+    static synchronized String getSAMString(final SAMRecord alignment) {
+        if (stringWriter == null) stringWriter = new StringWriter();
+        if (textWriter == null) textWriter = new SAMTextWriter(stringWriter);
+        stringWriter.getBuffer().setLength(0);
+        textWriter.writeAlignment(alignment);
+        return stringWriter.toString();
+    }
+
+    /**
+     * Write the header text.  This method can also be used to write
+     * an arbitrary String, not necessarily the header.
+     *
+     * @param textHeader String containing the text to write.
+     */
+    public void writeHeader(final String textHeader) {
+        try {
+            out.write(textHeader);
+        } catch (final IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    /**
+     * Do any required flushing here.
+     */
+    public void finish() {
+        try {
+            out.close();
+        } catch (final IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    /**
+     * For producing error messages.
+     *
+     * @return Output filename, or null if there isn't one.
+     */
+    public String getFilename() {
+        if (file == null) {
+            return null;
+        }
+        return file.getAbsolutePath();
+    }
+}
diff --git a/src/java/htsjdk/samtools/SAMTools.java b/src/main/java/htsjdk/samtools/SAMTools.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMTools.java
rename to src/main/java/htsjdk/samtools/SAMTools.java
diff --git a/src/java/htsjdk/samtools/SAMUtils.java b/src/main/java/htsjdk/samtools/SAMUtils.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMUtils.java
rename to src/main/java/htsjdk/samtools/SAMUtils.java
diff --git a/src/java/htsjdk/samtools/SAMValidationError.java b/src/main/java/htsjdk/samtools/SAMValidationError.java
similarity index 100%
rename from src/java/htsjdk/samtools/SAMValidationError.java
rename to src/main/java/htsjdk/samtools/SAMValidationError.java
diff --git a/src/java/htsjdk/samtools/SQTagUtil.java b/src/main/java/htsjdk/samtools/SQTagUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/SQTagUtil.java
rename to src/main/java/htsjdk/samtools/SQTagUtil.java
diff --git a/src/main/java/htsjdk/samtools/SRAFileReader.java b/src/main/java/htsjdk/samtools/SRAFileReader.java
new file mode 100644
index 0000000..6925ffc
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SRAFileReader.java
@@ -0,0 +1,308 @@
+/*===========================================================================
+*
+*                            PUBLIC DOMAIN NOTICE
+*               National Center for Biotechnology Information
+*
+*  This software/database is a "United States Government Work" under the
+*  terms of the United States Copyright Act.  It was written as part of
+*  the author's official duties as a United States Government employee and
+*  thus cannot be copyrighted.  This software/database is freely available
+*  to the public for use. The National Library of Medicine and the U.S.
+*  Government have not placed any restriction on its use or reproduction.
+*
+*  Although all reasonable efforts have been taken to ensure the accuracy
+*  and reliability of the software and data, the NLM and the U.S.
+*  Government do not and cannot warrant the performance or results that
+*  may be obtained by using this software or data. The NLM and the U.S.
+*  Government disclaim all warranties, express or implied, including
+*  warranties of performance, merchantability or fitness for any particular
+*  purpose.
+*
+*  Please cite the author in any work or product based on this material.
+*
+* ===========================================================================
+*
+*/
+
+/**
+ * Created by andrii.nikitiuk on 8/11/15.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.sra.ReferenceCache;
+import htsjdk.samtools.sra.SRAAccession;
+import htsjdk.samtools.util.CloseableIterator;
+
+import htsjdk.samtools.SamReader.Type;
+
+import htsjdk.samtools.util.Log;
+import ngs.ErrorMsg;
+import ngs.ReadCollection;
+import ngs.ReadGroupIterator;
+import ngs.ReferenceIterator;
+import ngs.Reference;
+
+import java.util.ArrayList;
+import java.util.List;
+
+
+public class SRAFileReader extends SamReader.ReaderImplementation implements SamReader.Indexing {
+    private static final Log log = Log.getInstance(SRAFileReader.class);
+    private SRAAccession acc;
+    private SAMFileHeader virtualHeader;
+    private ReadCollection run;
+    private ValidationStringency validationStringency;
+    private SRAIterator.RecordRangeInfo recordRangeInfo;
+    private SRAIndex index;
+    private ReferenceCache cachedReferences;
+
+    public SRAFileReader(final SRAAccession acc) {
+        this.acc = acc;
+
+        if (!acc.isValid()) {
+            throw new IllegalArgumentException("Invalid SRA accession was passed to SRA reader: " + acc);
+        }
+
+        try {
+            run = gov.nih.nlm.ncbi.ngs.NGS.openReadCollection(acc.toString());
+            virtualHeader = loadSamHeader();
+        } catch (final Exception e) {
+            throw new RuntimeException(e);
+        }
+
+        cachedReferences = new ReferenceCache(run, virtualHeader);
+        recordRangeInfo = SRAIterator.getRecordsRangeInfo(run);
+        index = new SRAIndex(virtualHeader, recordRangeInfo);
+    }
+
+    @Override
+    public Type type() {
+        return Type.SRA_TYPE;
+    }
+
+    @Override
+    public boolean hasIndex() {
+        return true;
+    }
+
+    @Override
+    public BAMIndex getIndex() {
+        return index;
+    }
+
+    @Override
+    public SAMFileHeader getFileHeader() {
+        return virtualHeader;
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> getIterator() {
+        return getIterator(getFilePointerSpanningReads());
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> getIterator(SAMFileSpan chunks) {
+        if (run == null) {
+            throw new RuntimeException("Cannot create iterator - SRA run is uninitialized");
+        }
+
+        if (virtualHeader == null) {
+            throw new RuntimeException("Cannot create iterator - SAM file header is uninitialized");
+        }
+
+        List<Chunk> chunkList = ((BAMFileSpan) chunks).getChunks();
+
+        final SRAIterator newIterator = new SRAIterator(acc, run, virtualHeader, cachedReferences, recordRangeInfo, chunkList);
+        if (validationStringency != null) {
+            newIterator.setValidationStringency(validationStringency);
+        }
+
+        return newIterator;
+    }
+
+    @Override
+    public SAMFileSpan getFilePointerSpanningReads() {
+        if (recordRangeInfo.getTotalRecordRangeLength() <= 0) {
+            throw new RuntimeException("Cannot create file span - SRA file is empty");
+        }
+
+        return new BAMFileSpan(new Chunk(0, recordRangeInfo.getTotalRecordRangeLength()));
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> query(QueryInterval[] intervals, boolean contained) {
+        BAMFileSpan span = new BAMFileSpan();
+        BrowseableBAMIndex index = getBrowseableIndex();
+
+        for (QueryInterval interval : intervals) {
+            BAMFileSpan intervalSpan;
+            if (!contained) {
+                intervalSpan = index.getSpanOverlapping(interval.referenceIndex, interval.start, interval.end);
+
+            } else {
+                intervalSpan = getSpanContained(interval.referenceIndex, interval.start, interval.end);
+            }
+            span.add(intervalSpan);
+        }
+
+        return getIterator(span);
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> queryAlignmentStart(String sequence, int start) {
+        int sequenceIndex = virtualHeader.getSequenceIndex(sequence);
+        if (sequenceIndex == -1) {
+            throw new IllegalArgumentException("Unknown sequence '" + sequence + "' was passed to SRAFileReader");
+        }
+
+        return getIterator(getSpanContained(sequenceIndex, start, -1));
+    }
+
+    @Override
+    public CloseableIterator<SAMRecord> queryUnmapped() {
+        if (recordRangeInfo.getTotalRecordRangeLength() <= 0) {
+            throw new RuntimeException("Cannot create file span - SRA file is empty");
+        }
+
+        SAMFileSpan span = new BAMFileSpan(new Chunk(recordRangeInfo.getTotalReferencesLength(), recordRangeInfo.getTotalRecordRangeLength()));
+        return getIterator(span);
+    }
+
+    @Override
+    public void close() {
+        run = null;
+    }
+
+    @Override
+    public ValidationStringency getValidationStringency() {
+        return validationStringency;
+    }
+
+
+    /** INDEXING */
+
+
+    /**
+     * Returns true if the supported index is browseable, meaning the bins in it can be traversed
+     * and chunk data inspected and retrieved.
+     *
+     * @return True if the index supports the BrowseableBAMIndex interface.  False otherwise.
+     */
+    @Override
+    public boolean hasBrowseableIndex() {
+        return true;
+    }
+
+    /**
+     * Gets an index tagged with the BrowseableBAMIndex interface.  Throws an exception if no such
+     * index is available.
+     *
+     * @return An index with a browseable interface, if possible.
+     * @throws SAMException if no such index is available.
+     */
+    @Override
+    public BrowseableBAMIndex getBrowseableIndex() {
+        return index;
+    }
+
+    /**
+     * Iterate through the given chunks in the file.
+     *
+     * @param chunks List of chunks for which to retrieve data.
+     * @return An iterator over the given chunks.
+     */
+    @Override
+    public SAMRecordIterator iterator(final SAMFileSpan chunks) {
+        CloseableIterator<SAMRecord> it = getIterator(chunks);
+        if (it == null) {
+            return null;
+        }
+        return (SAMRecordIterator) it;
+    }
+
+    /** ReaderImplementation */
+    @Override
+    void enableFileSource(final SamReader reader, final boolean enabled) {
+        log.info("enableFileSource is not supported");
+    }
+
+    @Override
+    void enableIndexCaching(final boolean enabled) {
+        log.info("enableIndexCaching is not supported");
+    }
+
+    @Override
+    void enableIndexMemoryMapping(final boolean enabled) {
+        log.info("enableIndexMemoryMapping is not supported");
+    }
+
+    @Override
+    void enableCrcChecking(final boolean enabled) {
+        log.info("enableCrcChecking is not supported");
+    }
+
+    @Override
+    void setSAMRecordFactory(final SAMRecordFactory factory) {
+        log.info("setSAMRecordFactory is not supported");
+    }
+
+    @Override
+    void setValidationStringency(final ValidationStringency validationStringency) {
+        this.validationStringency = validationStringency;
+    }
+
+    protected SRAIterator.RecordRangeInfo getRecordsRangeInfo() {
+        return recordRangeInfo;
+    }
+
+    private SAMFileHeader loadSamHeader() throws ErrorMsg {
+        if (run == null) {
+            throw new RuntimeException("Cannot load SAMFileHeader - SRA run is uninitialized");
+        }
+
+        String runName = run.getName();
+
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+
+        ReadGroupIterator itRg = run.getReadGroups();
+        while (itRg.nextReadGroup()) {
+            String rgName = itRg.getName();
+            if (rgName.isEmpty())
+                rgName = runName;
+            SAMReadGroupRecord rg = new SAMReadGroupRecord(rgName);
+            rg.setSample(runName);
+            header.addReadGroup(rg);
+        }
+
+        ReferenceIterator itRef = run.getReferences();
+        while (itRef.nextReference()) {
+            header.addSequence(new SAMSequenceRecord(itRef.getCanonicalName(), (int) itRef.getLength()));
+        }
+
+        return header;
+    }
+
+    private BAMFileSpan getSpanContained(int sequenceIndex, long start, long end) {
+        if (recordRangeInfo.getTotalRecordRangeLength() <= 0) {
+            throw new RuntimeException("Cannot create file span - SRA file is empty");
+        }
+
+        long sequenceOffset = recordRangeInfo.getReferenceOffsets().get(sequenceIndex);
+        long sequenceLength = recordRangeInfo.getReferenceLengthsAligned().get(sequenceIndex);
+        if (end == -1) {
+            end = sequenceLength;
+        }
+
+        if (start > sequenceLength) {
+            throw new IllegalArgumentException("Sequence start position is larger than its length");
+        }
+
+        if (end > sequenceLength) {
+            throw new IllegalArgumentException("Sequence end position is larger than its length");
+        }
+
+        return new BAMFileSpan(new Chunk(sequenceOffset + start, sequenceOffset + end));
+    }
+}
diff --git a/src/java/htsjdk/samtools/SRAIndex.java b/src/main/java/htsjdk/samtools/SRAIndex.java
similarity index 100%
rename from src/java/htsjdk/samtools/SRAIndex.java
rename to src/main/java/htsjdk/samtools/SRAIndex.java
diff --git a/src/main/java/htsjdk/samtools/SRAIterator.java b/src/main/java/htsjdk/samtools/SRAIterator.java
new file mode 100644
index 0000000..9fbbc97
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SRAIterator.java
@@ -0,0 +1,256 @@
+/*===========================================================================
+*
+*                            PUBLIC DOMAIN NOTICE
+*               National Center for Biotechnology Information
+*
+*  This software/database is a "United States Government Work" under the
+*  terms of the United States Copyright Act.  It was written as part of
+*  the author's official duties as a United States Government employee and
+*  thus cannot be copyrighted.  This software/database is freely available
+*  to the public for use. The National Library of Medicine and the U.S.
+*  Government have not placed any restriction on its use or reproduction.
+*
+*  Although all reasonable efforts have been taken to ensure the accuracy
+*  and reliability of the software and data, the NLM and the U.S.
+*  Government do not and cannot warrant the performance or results that
+*  may be obtained by using this software or data. The NLM and the U.S.
+*  Government disclaim all warranties, express or implied, including
+*  warranties of performance, merchantability or fitness for any particular
+*  purpose.
+*
+*  Please cite the author in any work or product based on this material.
+*
+* ===========================================================================
+*
+*/
+
+/**
+ * Created by andrii.nikitiuk on 8/11/15.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.SAMFileHeader.SortOrder;
+
+import htsjdk.samtools.sra.ReferenceCache;
+import htsjdk.samtools.sra.SRAAccession;
+import htsjdk.samtools.sra.SRAAlignmentIterator;
+import htsjdk.samtools.sra.SRAUnalignmentIterator;
+import htsjdk.samtools.sra.SRAUtils;
+import ngs.ErrorMsg;
+import ngs.ReadCollection;
+import ngs.Reference;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+/**
+ * SRA iterator which returns SAMRecords for requested list of chunks
+ */
+public class SRAIterator implements SAMRecordIterator {
+    private ValidationStringency validationStringency;
+
+    private SRAAccession accession;
+    private ReadCollection run;
+    private SAMFileHeader header;
+    private ReferenceCache cachedReferences;
+    private RecordRangeInfo recordRangeInfo;
+    private Iterator<Chunk> chunksIterator;
+    private Chunk currentChunk;
+
+    private SRAAlignmentIterator alignmentIterator;
+    private SRAUnalignmentIterator unalignmentIterator;
+
+    /**
+     * Describes record ranges info needed for emulating BAM index
+     */
+    public static class RecordRangeInfo {
+        private List<Long> referenceOffsets;
+        private List<Long> referenceLengthsAligned;
+        private long totalReferencesLength;
+        private long numberOfReads; // is used for unaligned read space
+        private long totalRecordRangeLength;
+
+        /**
+         * @param referenceLengthsAligned a list with lengths of each reference
+         * @param numberOfReads total number of reads within SRA archive
+         */
+        public RecordRangeInfo(List<Long> referenceLengthsAligned, long numberOfReads) {
+            this.numberOfReads = numberOfReads;
+            this.referenceLengthsAligned = referenceLengthsAligned;
+
+            referenceOffsets = new ArrayList<Long>();
+
+            totalReferencesLength = 0;
+            for (Long refLen : referenceLengthsAligned) {
+                referenceOffsets.add(totalReferencesLength);
+                totalReferencesLength += refLen;
+            }
+
+            totalRecordRangeLength = totalReferencesLength + this.numberOfReads;
+        }
+
+        public long getNumberOfReads() {
+            return numberOfReads;
+        }
+
+        public long getTotalReferencesLength() {
+            return totalReferencesLength;
+        }
+
+        public long getTotalRecordRangeLength() {
+            return totalRecordRangeLength;
+        }
+
+        public final List<Long> getReferenceOffsets() {
+            return Collections.unmodifiableList(referenceOffsets);
+        }
+
+        public final List<Long> getReferenceLengthsAligned() {
+            return Collections.unmodifiableList(referenceLengthsAligned);
+        }
+    }
+
+    /**
+     * Loads record ranges needed for emulating BAM index
+     * @param run read collection
+     * @return record ranges
+     */
+    public static RecordRangeInfo getRecordsRangeInfo(ReadCollection run) {
+        try {
+            return new RecordRangeInfo(SRAUtils.getReferencesLengthsAligned(run), SRAUtils.getNumberOfReads(run));
+        } catch (ErrorMsg e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * @param run opened read collection
+     * @param header sam header
+     * @param cachedReferences list of cached references shared among all iterators from a single SRAFileReader
+     * @param recordRangeInfo info about record ranges withing SRA archive
+     * @param chunks used to determine which records the iterator should return
+     */
+    public SRAIterator(SRAAccession accession, final ReadCollection run, final SAMFileHeader header, ReferenceCache cachedReferences,
+                       final RecordRangeInfo recordRangeInfo, final List<Chunk> chunks) {
+        this.accession = accession;
+        this.run = run;
+        this.header = header;
+        this.cachedReferences = cachedReferences;
+        this.recordRangeInfo = recordRangeInfo;
+        chunksIterator = chunks.iterator();
+        if (chunksIterator.hasNext()) {
+            currentChunk = chunksIterator.next();
+        }
+
+        hasNext();
+    }
+
+    /**
+     * NGS iterators implement a single method "nextObject" which return true if the operation was successful or
+     * false if there are no more objects available.
+     * That means that there is no way to check "hasNext" without actually moving the iterator forward.
+     * Because of that all the logic of moving iterator forward is actually happens in "hasNext".
+     *
+     * Here is explanation of how it works:
+     *  Iterator holds a list of chunks of requested records. Here we have chunksIterator that walks though that list.
+     *  We walk though that list using chunksIterator. If current chunk can represent aligned fragments then we create
+     *  SRAAlignmentIterator iterator, pass the chunk into it and ask if it can find any record. If record was found,
+     *  we say that we have next; otherwise we check if the chunk can represent unaligned fragments and then create
+     *  SRAUnalignmentIterator if so and do the same steps as with alignemnt iterator.
+     *
+     *  If record was not found in both SRAAlignmentIterator and SRAUnalignmentIterator (it is possible that reference
+     *  range has no alignments or that reads range has all aligned fragment), we try the next chunk.
+     *
+     *  When there are no more chunks and both iterators have no more records we return false.
+     *
+     * @return true if there are more records available
+     */
+    @Override
+    public boolean hasNext() {
+        while (currentChunk != null) {
+            if (alignmentIterator == null) {
+                if (currentChunk.getChunkStart() < recordRangeInfo.getTotalReferencesLength()) {
+                    alignmentIterator = new SRAAlignmentIterator(accession, run, header, cachedReferences, recordRangeInfo, currentChunk);
+                    if (validationStringency != null) {
+                        alignmentIterator.setValidationStringency(validationStringency);
+                    }
+                }
+            }
+
+            if (alignmentIterator != null && alignmentIterator.hasNext()) {
+                return true;
+            }
+
+            if (unalignmentIterator == null) {
+                if (currentChunk.getChunkEnd() > recordRangeInfo.getTotalReferencesLength()) {
+                    unalignmentIterator = new SRAUnalignmentIterator(accession, run, header, recordRangeInfo, currentChunk);
+                    if (validationStringency != null) {
+                        unalignmentIterator.setValidationStringency(validationStringency);
+                    }
+                }
+            }
+            if (unalignmentIterator != null && unalignmentIterator.hasNext()) {
+                return true;
+            }
+
+            if (alignmentIterator != null) {
+                alignmentIterator.close();
+            }
+            alignmentIterator = null;
+            unalignmentIterator = null;
+            if (chunksIterator.hasNext()) {
+                currentChunk = chunksIterator.next();
+            } else {
+                currentChunk = null;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Call hasNext to make sure that one of inner iterators points to the next record, the retrieve the record from
+     * one of them.
+     * @return lazy SRA record
+     */
+    @Override
+    public SAMRecord next() {
+        if (!hasNext()) {
+            throw new NoSuchElementException("No more records are available in SRAIterator");
+        }
+
+        if (alignmentIterator != null && alignmentIterator.hasNext()) {
+            return alignmentIterator.next();
+        }
+
+        return unalignmentIterator.next();
+    }
+
+    @Override
+    public void remove() { throw new UnsupportedOperationException("Removal of records not implemented."); }
+
+    @Override
+    public void close() {
+        if (alignmentIterator != null) {
+            alignmentIterator.close();
+            alignmentIterator = null;
+        }
+    }
+
+    @Override
+    public SAMRecordIterator assertSorted(final SortOrder sortOrder) { throw new UnsupportedOperationException("assertSorted is not implemented."); }
+
+    public void setValidationStringency(ValidationStringency validationStringency) {
+        this.validationStringency = validationStringency;
+
+        if (alignmentIterator != null) {
+            alignmentIterator.setValidationStringency(validationStringency);
+        }
+        if (unalignmentIterator != null) {
+            unalignmentIterator.setValidationStringency(validationStringency);
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/SamFileHeaderMerger.java b/src/main/java/htsjdk/samtools/SamFileHeaderMerger.java
similarity index 100%
rename from src/java/htsjdk/samtools/SamFileHeaderMerger.java
rename to src/main/java/htsjdk/samtools/SamFileHeaderMerger.java
diff --git a/src/main/java/htsjdk/samtools/SamFileValidator.java b/src/main/java/htsjdk/samtools/SamFileValidator.java
new file mode 100644
index 0000000..3a6deb0
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SamFileValidator.java
@@ -0,0 +1,856 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.SAMValidationError.Type;
+import htsjdk.samtools.BamIndexValidator.IndexValidationStringency;
+import htsjdk.samtools.metrics.MetricBase;
+import htsjdk.samtools.metrics.MetricsFile;
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequenceFileWalker;
+import htsjdk.samtools.util.BlockCompressedInputStream;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.FastqQualityFormat;
+import htsjdk.samtools.util.Histogram;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.ProgressLogger;
+import htsjdk.samtools.util.QualityEncodingDetector;
+import htsjdk.samtools.util.SequenceUtil;
+import htsjdk.samtools.util.StringUtil;
+
+import java.io.BufferedInputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Validates SAM files as follows:
+ * <ul>
+ * <li>checks sam file header for sequence dictionary</li>
+ * <li>checks sam file header for read groups</li>
+ * <li>for each sam record
+ * <ul>
+ * <li>reports error detected by SAMRecord.isValid()</li>
+ * <li>validates NM (nucleotide differences) exists and matches reality</li>
+ * <li>validates mate fields agree with data in the mate record</li>
+ * </ul>
+ * </li>
+ * </ul>
+ *
+ * @author Doug Voet
+ * @see SAMRecord#isValid()
+ */
+public class SamFileValidator {
+    private Histogram<Type> errorsByType = new Histogram<Type>();
+    private final PrintWriter out;
+    private PairEndInfoMap pairEndInfoByName;
+    private ReferenceSequenceFileWalker refFileWalker = null;
+    private boolean verbose = false;
+    private int maxVerboseOutput = 100;
+    private SAMSortOrderChecker orderChecker;
+    private Set<Type> errorsToIgnore = EnumSet.noneOf(Type.class);
+    private boolean ignoreWarnings = false;
+    private boolean bisulfiteSequenced = false;
+    private IndexValidationStringency indexValidationStringency = IndexValidationStringency.NONE;
+    private boolean sequenceDictionaryEmptyAndNoWarningEmitted = false;
+    private final int maxTempFiles;
+
+    private final static Log log = Log.getInstance(SamFileValidator.class);
+
+    public SamFileValidator(final PrintWriter out, final int maxTempFiles) {
+        this.out = out;
+        this.maxTempFiles = maxTempFiles;
+    }
+
+    /**
+     * Sets one or more error types that should not be reported on.
+     */
+    public void setErrorsToIgnore(final Collection<Type> types) {
+        if (!types.isEmpty()) {
+            this.errorsToIgnore = EnumSet.copyOf(types);
+        }
+    }
+
+    public void setIgnoreWarnings(final boolean ignoreWarnings) {
+        this.ignoreWarnings = ignoreWarnings;
+    }
+
+    /**
+     * Outputs validation summary report to out.
+     *
+     * @param samReader records to validate
+     * @param reference if null, NM tag validation is skipped
+     * @return boolean  true if there are no validation errors, otherwise false
+     */
+    public boolean validateSamFileSummary(final SamReader samReader, final ReferenceSequenceFile reference) {
+        init(reference, samReader.getFileHeader());
+
+        validateSamFile(samReader, out);
+
+        boolean result = errorsByType.isEmpty();
+
+        if (errorsByType.getCount() > 0) {
+            // Convert to a histogram with String IDs so that WARNING: or ERROR: can be prepended to the error type.
+            final Histogram<String> errorsAndWarningsByType = new Histogram<>("Error Type", "Count");
+            for (final Histogram.Bin<Type> bin : errorsByType.values()) {
+                errorsAndWarningsByType.increment(bin.getId().getHistogramString(), bin.getValue());
+            }
+            final MetricsFile<ValidationMetrics, String> metricsFile = new MetricsFile<ValidationMetrics, String>();
+            errorsByType.setBinLabel("Error Type");
+            errorsByType.setValueLabel("Count");
+            metricsFile.setHistogram(errorsAndWarningsByType);
+            metricsFile.write(out);
+        }
+        cleanup();
+        return result;
+    }
+
+    /**
+     * Outputs validation error details to out.
+     *
+     * @param samReader records to validate
+     * @param reference if null, NM tag validation is skipped
+     *                  processing will stop after this threshold has been reached
+     * @return boolean  true if there are no validation errors, otherwise false
+     */
+    public boolean validateSamFileVerbose(final SamReader samReader, final ReferenceSequenceFile reference) {
+        init(reference, samReader.getFileHeader());
+
+        try {
+            validateSamFile(samReader, out);
+        } catch (MaxOutputExceededException e) {
+            out.println("Maximum output of [" + maxVerboseOutput + "] errors reached.");
+        }
+        boolean result = errorsByType.isEmpty();
+        cleanup();
+        return result;
+    }
+
+    public void validateBamFileTermination(final File inputFile) {
+        BufferedInputStream inputStream = null;
+        try {
+            inputStream = IOUtil.toBufferedStream(new FileInputStream(inputFile));
+            if (!BlockCompressedInputStream.isValidFile(inputStream)) {
+                return;
+            }
+            final BlockCompressedInputStream.FileTermination terminationState =
+                    BlockCompressedInputStream.checkTermination(inputFile);
+            if (terminationState.equals(BlockCompressedInputStream.FileTermination.DEFECTIVE)) {
+                addError(new SAMValidationError(Type.TRUNCATED_FILE, "BAM file has defective last gzip block",
+                        inputFile.getPath()));
+            } else if (terminationState.equals(BlockCompressedInputStream.FileTermination.HAS_HEALTHY_LAST_BLOCK)) {
+                addError(new SAMValidationError(Type.BAM_FILE_MISSING_TERMINATOR_BLOCK,
+                        "Older BAM file -- does not have terminator block",
+                        inputFile.getPath()));
+
+            }
+        } catch (IOException e) {
+            throw new SAMException("IOException", e);
+        } finally {
+            if (inputStream != null) {
+                CloserUtil.close(inputStream);
+            }
+        }
+    }
+
+    private void validateSamFile(final SamReader samReader, final PrintWriter out) {
+        try {
+            validateHeader(samReader.getFileHeader());
+            orderChecker = new SAMSortOrderChecker(samReader.getFileHeader().getSortOrder());
+            validateSamRecordsAndQualityFormat(samReader, samReader.getFileHeader());
+            validateUnmatchedPairs();
+            if (indexValidationStringency != IndexValidationStringency.NONE) {
+                try {
+                    if (indexValidationStringency == IndexValidationStringency.LESS_EXHAUSTIVE) {
+                        BamIndexValidator.lessExhaustivelyTestIndex(samReader);
+                    }
+                    else {
+                        BamIndexValidator.exhaustivelyTestIndex(samReader);
+                    }
+                } catch (Exception e) {
+                    addError(new SAMValidationError(Type.INVALID_INDEX_FILE_POINTER, e.getMessage(), null));
+                }
+            }
+
+            if (errorsByType.isEmpty()) {
+                out.println("No errors found");
+            }
+        } finally {
+            out.flush();
+        }
+    }
+
+
+    /**
+     * Report on reads marked as paired, for which the mate was not found.
+     */
+    private void validateUnmatchedPairs() {
+        final InMemoryPairEndInfoMap inMemoryPairMap;
+        if (pairEndInfoByName instanceof CoordinateSortedPairEndInfoMap) {
+            // For the coordinate-sorted map, need to detect mate pairs in which the mateReferenceIndex on one end
+            // does not match the readReference index on the other end, so the pairs weren't united and validated.
+            inMemoryPairMap = new InMemoryPairEndInfoMap();
+            CloseableIterator<Map.Entry<String, PairEndInfo>> it = ((CoordinateSortedPairEndInfoMap) pairEndInfoByName).iterator();
+            while (it.hasNext()) {
+                Map.Entry<String, PairEndInfo> entry = it.next();
+                PairEndInfo pei = inMemoryPairMap.remove(entry.getValue().readReferenceIndex, entry.getKey());
+                if (pei != null) {
+                    // Found a mismatch btw read.mateReferenceIndex and mate.readReferenceIndex
+                    List<SAMValidationError> errors = pei.validateMates(entry.getValue(), entry.getKey());
+                    for (final SAMValidationError error : errors) {
+                        addError(error);
+                    }
+                } else {
+                    // Mate not found.
+                    inMemoryPairMap.put(entry.getValue().mateReferenceIndex, entry.getKey(), entry.getValue());
+                }
+            }
+            it.close();
+        } else {
+            inMemoryPairMap = (InMemoryPairEndInfoMap) pairEndInfoByName;
+        }
+        // At this point, everything in InMemoryMap is a read marked as a pair, for which a mate was not found.
+        for (final Map.Entry<String, PairEndInfo> entry : inMemoryPairMap) {
+            addError(new SAMValidationError(Type.MATE_NOT_FOUND, "Mate not found for paired read", entry.getKey()));
+        }
+    }
+
+    /**
+     * SAM record and quality format validations are combined into a single method because validation must be completed
+     * in only a single pass of the SamRecords (because a SamReader's iterator() method may not return the same
+     * records on a subsequent call).
+     */
+    private void validateSamRecordsAndQualityFormat(final Iterable<SAMRecord> samRecords, final SAMFileHeader header) {
+        final SAMRecordIterator iter = (SAMRecordIterator) samRecords.iterator();
+        final ProgressLogger progress = new ProgressLogger(log, 10000000, "Validated Read");
+        final QualityEncodingDetector qualityDetector = new QualityEncodingDetector();
+        try {
+            while (iter.hasNext()) {
+                final SAMRecord record = iter.next();
+
+                qualityDetector.add(record);
+
+                final long recordNumber = progress.getCount() + 1;
+                final Collection<SAMValidationError> errors = record.isValid();
+                if (errors != null) {
+                    for (final SAMValidationError error : errors) {
+                        error.setRecordNumber(recordNumber);
+                        addError(error);
+                    }
+                }
+
+                validateMateFields(record, recordNumber);
+                final boolean hasValidSortOrder = validateSortOrder(record, recordNumber);
+                validateReadGroup(record, header);
+                final boolean cigarIsValid = validateCigar(record, recordNumber);
+                if (cigarIsValid) {
+                    try {
+                        validateNmTag(record, recordNumber);
+                    }
+                    catch (SAMException e) {
+                        if (hasValidSortOrder) {
+                            // If a CRAM file has an invalid sort order, the ReferenceFileWalker will throw a
+                            // SAMException due to an out of order request when retrieving reference bases during NM
+                            // tag validation; rethrow the exception only if the sort order is valid, otherwise
+                            // swallow the exception and carry on validating
+                            throw e;
+                        }
+                    }
+                }
+                validateSecondaryBaseCalls(record, recordNumber);
+                validateTags(record, recordNumber);
+                if (sequenceDictionaryEmptyAndNoWarningEmitted && !record.getReadUnmappedFlag()) {
+                    addError(new SAMValidationError(Type.MISSING_SEQUENCE_DICTIONARY, "Sequence dictionary is empty", null));
+                    sequenceDictionaryEmptyAndNoWarningEmitted = false;
+
+                }
+                progress.record(record);
+            }
+
+            try {
+                if (progress.getCount() > 0) { // Avoid exception being thrown as a result of no qualities being read
+                    final FastqQualityFormat format = qualityDetector.generateBestGuess(QualityEncodingDetector.FileContext.SAM, FastqQualityFormat.Standard);
+                    if (format != FastqQualityFormat.Standard) {
+                        addError(new SAMValidationError(Type.INVALID_QUALITY_FORMAT, String.format("Detected %s quality score encoding, but expected %s.", format, FastqQualityFormat.Standard), null));
+                    }
+                }
+            } catch (SAMException e) {
+                addError(new SAMValidationError(Type.INVALID_QUALITY_FORMAT, e.getMessage(), null));
+            }
+        } catch (SAMFormatException e) {
+            // increment record number because the iterator behind the SAMFileReader
+            // reads one record ahead so we will get this failure one record ahead
+            final String msg = "SAMFormatException on record " + progress.getCount() + 1;
+            out.println(msg);
+            throw new SAMException(msg, e);
+        } catch (FileTruncatedException e) {
+            addError(new SAMValidationError(Type.TRUNCATED_FILE, "File is truncated", null));
+        } finally {
+            iter.close();
+        }
+    }
+
+    private void validateReadGroup(final SAMRecord record, final SAMFileHeader header) {
+        final SAMReadGroupRecord rg = record.getReadGroup();
+        if (rg == null) {
+            addError(new SAMValidationError(Type.RECORD_MISSING_READ_GROUP,
+                    "A record is missing a read group", record.getReadName()));
+        } else if (header.getReadGroup(rg.getId()) == null) {
+            addError(new SAMValidationError(Type.READ_GROUP_NOT_FOUND,
+                    "A record has a read group not found in the header: ",
+                    record.getReadName() + ", " + rg.getReadGroupId()));
+        }
+    }
+
+    /**
+     * Report error if a tag value is a Long.
+     */
+    private void validateTags(final SAMRecord record, final long recordNumber) {
+        for (final SAMRecord.SAMTagAndValue tagAndValue : record.getAttributes()) {
+            if (tagAndValue.value instanceof Long) {
+                addError(new SAMValidationError(Type.TAG_VALUE_TOO_LARGE,
+                        "Numeric value too large for tag " + tagAndValue.tag,
+                        record.getReadName(), recordNumber));
+            }
+        }
+    }
+
+    private void validateSecondaryBaseCalls(final SAMRecord record, final long recordNumber) {
+        final String e2 = (String) record.getAttribute(SAMTag.E2.name());
+        if (e2 != null) {
+            if (e2.length() != record.getReadLength()) {
+                addError(new SAMValidationError(Type.MISMATCH_READ_LENGTH_AND_E2_LENGTH,
+                        String.format("E2 tag length (%d) != read length (%d)", e2.length(), record.getReadLength()),
+                        record.getReadName(), recordNumber));
+            }
+            final byte[] bases = record.getReadBases();
+            final byte[] secondaryBases = StringUtil.stringToBytes(e2);
+            for (int i = 0; i < Math.min(bases.length, secondaryBases.length); ++i) {
+                if (SequenceUtil.isNoCall(bases[i]) || SequenceUtil.isNoCall(secondaryBases[i])) {
+                    continue;
+                }
+                if (SequenceUtil.basesEqual(bases[i], secondaryBases[i])) {
+                    addError(new SAMValidationError(Type.E2_BASE_EQUALS_PRIMARY_BASE,
+                            String.format("Secondary base call  (%c) == primary base call (%c)",
+                                    (char) secondaryBases[i], (char) bases[i]),
+                            record.getReadName(), recordNumber));
+                    break;
+                }
+            }
+        }
+        final String u2 = (String) record.getAttribute(SAMTag.U2.name());
+        if (u2 != null && u2.length() != record.getReadLength()) {
+            addError(new SAMValidationError(Type.MISMATCH_READ_LENGTH_AND_U2_LENGTH,
+                    String.format("U2 tag length (%d) != read length (%d)", u2.length(), record.getReadLength()),
+                    record.getReadName(), recordNumber));
+        }
+    }
+
+    private boolean validateCigar(final SAMRecord record, final long recordNumber) {
+        if (record.getReadUnmappedFlag()) {
+            return true;
+        }
+        return validateCigar(record, recordNumber, true);
+    }
+
+    private boolean validateMateCigar(final SAMRecord record, final long recordNumber) {
+        return validateCigar(record, recordNumber, false);
+    }
+
+    private boolean validateCigar(final SAMRecord record, final long recordNumber, final boolean isReadCigar) {
+        final ValidationStringency savedStringency = record.getValidationStringency();
+        record.setValidationStringency(ValidationStringency.LENIENT);
+        final List<SAMValidationError> errors = isReadCigar ? record.validateCigar(recordNumber) : SAMUtils.validateMateCigar(record, recordNumber);
+        record.setValidationStringency(savedStringency);
+        if (errors == null) {
+            return true;
+        }
+        boolean valid = true;
+        for (final SAMValidationError error : errors) {
+            addError(error);
+            valid = false;
+        }
+        return valid;
+    }
+
+
+    private boolean validateSortOrder(final SAMRecord record, final long recordNumber) {
+        final SAMRecord prev = orderChecker.getPreviousRecord();
+        boolean isValidSortOrder = orderChecker.isSorted(record);
+        if (!isValidSortOrder) {
+            addError(new SAMValidationError(
+                    Type.RECORD_OUT_OF_ORDER,
+                    String.format(
+                            "The record is out of [%s] order, prior read name [%s], prior coodinates [%d:%d]",
+                            record.getHeader().getSortOrder().name(),
+                            prev.getReadName(),
+                            prev.getReferenceIndex(),
+                            prev.getAlignmentStart()),
+                    record.getReadName(),
+                    recordNumber));
+        }
+        return isValidSortOrder;
+    }
+
+    private void init(final ReferenceSequenceFile reference, final SAMFileHeader header) {
+        if (header.getSortOrder() == SAMFileHeader.SortOrder.coordinate) {
+            this.pairEndInfoByName = new CoordinateSortedPairEndInfoMap();
+        } else {
+            this.pairEndInfoByName = new InMemoryPairEndInfoMap();
+        }
+        if (reference != null) {
+            this.refFileWalker = new ReferenceSequenceFileWalker(reference);
+        }
+    }
+
+    private void cleanup() {
+        this.errorsByType = null;
+        this.pairEndInfoByName = null;
+        this.refFileWalker = null;
+    }
+
+    private void validateNmTag(final SAMRecord record, final long recordNumber) {
+        if (!record.getReadUnmappedFlag()) {
+            final Integer tagNucleotideDiffs = record.getIntegerAttribute(ReservedTagConstants.NM);
+            if (tagNucleotideDiffs == null) {
+                addError(new SAMValidationError(
+                        Type.MISSING_TAG_NM,
+                        "NM tag (nucleotide differences) is missing",
+                        record.getReadName(),
+                        recordNumber));
+            } else if (refFileWalker != null) {
+                final ReferenceSequence refSequence = refFileWalker.get(record.getReferenceIndex());
+                final int actualNucleotideDiffs = SequenceUtil.calculateSamNmTag(record, refSequence.getBases(),
+                        0, isBisulfiteSequenced());
+
+                if (!tagNucleotideDiffs.equals(actualNucleotideDiffs)) {
+                    addError(new SAMValidationError(
+                            Type.INVALID_TAG_NM,
+                            "NM tag (nucleotide differences) in file [" + tagNucleotideDiffs +
+                                    "] does not match reality [" + actualNucleotideDiffs + "]",
+                            record.getReadName(),
+                            recordNumber));
+                }
+            }
+        }
+    }
+
+    private void validateMateFields(final SAMRecord record, final long recordNumber) {
+        if (!record.getReadPairedFlag() || record.isSecondaryOrSupplementary()) {
+            return;
+        }
+        validateMateCigar(record, recordNumber);
+
+        final PairEndInfo pairEndInfo = pairEndInfoByName.remove(record.getReferenceIndex(), record.getReadName());
+        if (pairEndInfo == null) {
+            pairEndInfoByName.put(record.getMateReferenceIndex(), record.getReadName(), new PairEndInfo(record, recordNumber));
+        } else {
+            final List<SAMValidationError> errors =
+                    pairEndInfo.validateMates(new PairEndInfo(record, recordNumber), record.getReadName());
+            for (final SAMValidationError error : errors) {
+                addError(error);
+            }
+        }
+    }
+
+    private void validateHeader(final SAMFileHeader fileHeader) {
+        for (final SAMValidationError error : fileHeader.getValidationErrors()) {
+            addError(error);
+        }
+        if (fileHeader.getVersion() == null) {
+            addError(new SAMValidationError(Type.MISSING_VERSION_NUMBER, "Header has no version number", null));
+        } else if (!SAMFileHeader.ACCEPTABLE_VERSIONS.contains(fileHeader.getVersion())) {
+            addError(new SAMValidationError(Type.INVALID_VERSION_NUMBER, "Header version: " +
+                    fileHeader.getVersion() + " does not match any of the acceptable versions: " +
+                    StringUtil.join(", ", SAMFileHeader.ACCEPTABLE_VERSIONS.toArray(new String[0])),
+                    null));
+        }
+        if (fileHeader.getSequenceDictionary().isEmpty()) {
+            sequenceDictionaryEmptyAndNoWarningEmitted = true;
+        }
+        if (fileHeader.getReadGroups().isEmpty()) {
+            addError(new SAMValidationError(Type.MISSING_READ_GROUP, "Read groups is empty", null));
+        }
+        final List<SAMProgramRecord> pgs = fileHeader.getProgramRecords();
+        for (int i = 0; i < pgs.size() - 1; i++) {
+            for (int j = i + 1; j < pgs.size(); j++) {
+                if (pgs.get(i).getProgramGroupId().equals(pgs.get(j).getProgramGroupId())) {
+                    addError(new SAMValidationError(Type.DUPLICATE_PROGRAM_GROUP_ID, "Duplicate " +
+                            "program group id: " + pgs.get(i).getProgramGroupId(), null));
+                }
+            }
+        }
+
+        final List<SAMReadGroupRecord> rgs = fileHeader.getReadGroups();
+        final Set<String> readGroupIDs = new HashSet<String>();
+
+        for (final SAMReadGroupRecord record : rgs) {
+            final String readGroupID = record.getReadGroupId();
+            if (readGroupIDs.contains(readGroupID)) {
+                addError(new SAMValidationError(Type.DUPLICATE_READ_GROUP_ID, "Duplicate " +
+                        "read group id: " + readGroupID, null));
+            } else {
+                readGroupIDs.add(readGroupID);
+            }
+
+            final String platformValue = record.getPlatform();
+            if (platformValue == null || "".equals(platformValue)) {
+                addError(new SAMValidationError(Type.MISSING_PLATFORM_VALUE,
+                        "A platform (PL) attribute was not found for read group ",
+                        readGroupID));
+            }
+            else { 
+                // NB: cannot be null, so not catching a NPE
+                try {
+                    SAMReadGroupRecord.PlatformValue.valueOf(platformValue.toUpperCase());
+                } catch (IllegalArgumentException e) {
+                    addError(new SAMValidationError(Type.INVALID_PLATFORM_VALUE, 
+                            "The platform (PL) attribute (" + platformValue + ") + was not one of the valid values for read group ",
+                            readGroupID));
+                }
+            }
+        }
+    }
+
+    private void addError(final SAMValidationError error) {
+        // Just ignore an error if it's of a type we're not interested in
+        if (this.errorsToIgnore.contains(error.getType())) return;
+
+        if (this.ignoreWarnings && error.getType().severity == SAMValidationError.Severity.WARNING) return;
+
+        this.errorsByType.increment(error.getType());
+        if (verbose) {
+            out.println(error);
+            out.flush();
+            if (this.errorsByType.getCount() >= maxVerboseOutput) {
+                throw new MaxOutputExceededException();
+            }
+        }
+    }
+
+    /**
+     * Control verbosity
+     *
+     * @param verbose          True in order to emit a message per error or warning.
+     * @param maxVerboseOutput If verbose, emit no more than this many messages.  Ignored if !verbose.
+     */
+    public void setVerbose(final boolean verbose, final int maxVerboseOutput) {
+        this.verbose = verbose;
+        this.maxVerboseOutput = maxVerboseOutput;
+    }
+
+    public boolean isBisulfiteSequenced() {
+        return bisulfiteSequenced;
+    }
+
+    public void setBisulfiteSequenced(boolean bisulfiteSequenced) {
+        this.bisulfiteSequenced = bisulfiteSequenced;
+    }
+
+    /**
+     * @deprecated use setIndexValidationStringency instead
+     */
+    public SamFileValidator setValidateIndex(final boolean validateIndex) {
+        // The SAMFileReader must also have IndexCaching enabled to have the index validated,
+        return this.setIndexValidationStringency(validateIndex ? IndexValidationStringency.EXHAUSTIVE : IndexValidationStringency.NONE);
+    }
+
+    public SamFileValidator setIndexValidationStringency(final IndexValidationStringency stringency) {
+        this.indexValidationStringency = stringency;
+        return this;
+    }
+
+    public static class ValidationMetrics extends MetricBase {
+    }
+
+    /**
+     * This class is used so we don't have to store the entire SAMRecord in memory while we wait
+     * to find a record's mate and also to store the record number.
+     */
+    private static class PairEndInfo {
+        private final int readAlignmentStart;
+        private final int readReferenceIndex;
+        private final boolean readNegStrandFlag;
+        private final boolean readUnmappedFlag;
+        private final String readCigarString;
+
+        private final int mateAlignmentStart;
+        private final int mateReferenceIndex;
+        private final boolean mateNegStrandFlag;
+        private final boolean mateUnmappedFlag;
+        private final String mateCigarString;
+
+        private final boolean firstOfPairFlag;
+
+        private final long recordNumber;
+
+        public PairEndInfo(final SAMRecord record, final long recordNumber) {
+            this.recordNumber = recordNumber;
+
+            this.readAlignmentStart = record.getAlignmentStart();
+            this.readNegStrandFlag = record.getReadNegativeStrandFlag();
+            this.readReferenceIndex = record.getReferenceIndex();
+            this.readUnmappedFlag = record.getReadUnmappedFlag();
+            this.readCigarString = record.getCigarString();
+
+            this.mateAlignmentStart = record.getMateAlignmentStart();
+            this.mateNegStrandFlag = record.getMateNegativeStrandFlag();
+            this.mateReferenceIndex = record.getMateReferenceIndex();
+            this.mateUnmappedFlag = record.getMateUnmappedFlag();
+            final Object mcs = record.getAttribute(SAMTag.MC.name());
+            this.mateCigarString = (mcs != null) ? (String) mcs : null;
+
+            this.firstOfPairFlag = record.getFirstOfPairFlag();
+        }
+
+        private PairEndInfo(int readAlignmentStart, int readReferenceIndex, boolean readNegStrandFlag, boolean readUnmappedFlag,
+                            String readCigarString,
+                            int mateAlignmentStart, int mateReferenceIndex, boolean mateNegStrandFlag, boolean mateUnmappedFlag,
+                            String mateCigarString,
+                            boolean firstOfPairFlag, long recordNumber) {
+            this.readAlignmentStart = readAlignmentStart;
+            this.readReferenceIndex = readReferenceIndex;
+            this.readNegStrandFlag = readNegStrandFlag;
+            this.readUnmappedFlag = readUnmappedFlag;
+            this.readCigarString = readCigarString;
+            this.mateAlignmentStart = mateAlignmentStart;
+            this.mateReferenceIndex = mateReferenceIndex;
+            this.mateNegStrandFlag = mateNegStrandFlag;
+            this.mateUnmappedFlag = mateUnmappedFlag;
+            this.mateCigarString = mateCigarString;
+            this.firstOfPairFlag = firstOfPairFlag;
+            this.recordNumber = recordNumber;
+        }
+
+        public List<SAMValidationError> validateMates(final PairEndInfo mate, final String readName) {
+            final List<SAMValidationError> errors = new ArrayList<SAMValidationError>();
+            validateMateFields(this, mate, readName, errors);
+            validateMateFields(mate, this, readName, errors);
+            // Validations that should not be repeated on both ends
+            if (this.firstOfPairFlag == mate.firstOfPairFlag) {
+                final String whichEnd = this.firstOfPairFlag ? "first" : "second";
+                errors.add(new SAMValidationError(
+                        Type.MATES_ARE_SAME_END,
+                        "Both mates are marked as " + whichEnd + " of pair",
+                        readName,
+                        this.recordNumber
+                ));
+            }
+            return errors;
+        }
+
+        private void validateMateFields(final PairEndInfo end1, final PairEndInfo end2, final String readName, final List<SAMValidationError> errors) {
+            if (end1.mateAlignmentStart != end2.readAlignmentStart) {
+                errors.add(new SAMValidationError(
+                        Type.MISMATCH_MATE_ALIGNMENT_START,
+                        "Mate alignment does not match alignment start of mate",
+                        readName,
+                        end1.recordNumber));
+            }
+            if (end1.mateNegStrandFlag != end2.readNegStrandFlag) {
+                errors.add(new SAMValidationError(
+                        Type.MISMATCH_FLAG_MATE_NEG_STRAND,
+                        "Mate negative strand flag does not match read negative strand flag of mate",
+                        readName,
+                        end1.recordNumber));
+            }
+            if (end1.mateReferenceIndex != end2.readReferenceIndex) {
+                errors.add(new SAMValidationError(
+                        Type.MISMATCH_MATE_REF_INDEX,
+                        "Mate reference index (MRNM) does not match reference index of mate",
+                        readName,
+                        end1.recordNumber));
+            }
+            if (end1.mateUnmappedFlag != end2.readUnmappedFlag) {
+                errors.add(new SAMValidationError(
+                        Type.MISMATCH_FLAG_MATE_UNMAPPED,
+                        "Mate unmapped flag does not match read unmapped flag of mate",
+                        readName,
+                        end1.recordNumber));
+            }
+            if ((end1.mateCigarString != null) && (!end1.mateCigarString.equals(end2.readCigarString))) {
+                errors.add(new SAMValidationError(
+                        Type.MISMATCH_MATE_CIGAR_STRING,
+                        "Mate CIGAR string does not match CIGAR string of mate",
+                        readName,
+                        end1.recordNumber));
+            }
+            // Note - don't need to validate that the mateCigarString is a valid cigar string, since this
+            // will be validated by validateCigar on the mate's record itself.
+        }
+    }
+
+    /**
+     * Thrown in addError indicating that maxVerboseOutput has been exceeded and processing should stop
+     */
+    private static class MaxOutputExceededException extends SAMException {
+        MaxOutputExceededException() {
+            super("maxVerboseOutput exceeded.");
+        }
+    }
+
+    interface PairEndInfoMap extends Iterable<Map.Entry<String, PairEndInfo>> {
+        void put(int mateReferenceIndex, String key, PairEndInfo value);
+
+        PairEndInfo remove(int mateReferenceIndex, String key);
+
+        CloseableIterator<Map.Entry<String, PairEndInfo>> iterator();
+    }
+
+    private class CoordinateSortedPairEndInfoMap implements PairEndInfoMap {
+        private final CoordinateSortedPairInfoMap<String, PairEndInfo> onDiskMap =
+                new CoordinateSortedPairInfoMap<String, PairEndInfo>(maxTempFiles, new Codec());
+
+        public void put(int mateReferenceIndex, String key, PairEndInfo value) {
+            onDiskMap.put(mateReferenceIndex, key, value);
+        }
+
+        public PairEndInfo remove(int mateReferenceIndex, String key) {
+            return onDiskMap.remove(mateReferenceIndex, key);
+        }
+
+        public CloseableIterator<Map.Entry<String, PairEndInfo>> iterator() {
+            return onDiskMap.iterator();
+        }
+
+        private class Codec implements CoordinateSortedPairInfoMap.Codec<String, PairEndInfo> {
+            private DataInputStream in;
+            private DataOutputStream out;
+
+            public void setOutputStream(final OutputStream os) {
+                this.out = new DataOutputStream(os);
+            }
+
+            public void setInputStream(final InputStream is) {
+                this.in = new DataInputStream(is);
+            }
+
+            public void encode(final String key, final PairEndInfo record) {
+                try {
+                    out.writeUTF(key);
+                    out.writeInt(record.readAlignmentStart);
+                    out.writeInt(record.readReferenceIndex);
+                    out.writeBoolean(record.readNegStrandFlag);
+                    out.writeBoolean(record.readUnmappedFlag);
+                    out.writeUTF(record.readCigarString);
+                    out.writeInt(record.mateAlignmentStart);
+                    out.writeInt(record.mateReferenceIndex);
+                    out.writeBoolean(record.mateNegStrandFlag);
+                    out.writeBoolean(record.mateUnmappedFlag);
+                    // writeUTF can't take null, so store a null mateCigarString as an empty string
+                    out.writeUTF(record.mateCigarString != null ? record.mateCigarString : "");
+                    out.writeBoolean(record.firstOfPairFlag);
+                    out.writeLong(record.recordNumber);
+                } catch (IOException e) {
+                    throw new SAMException("Error spilling PairInfo to disk", e);
+                }
+            }
+
+            public Map.Entry<String, PairEndInfo> decode() {
+                try {
+                    final String key = in.readUTF();
+                    final int readAlignmentStart = in.readInt();
+                    final int readReferenceIndex = in.readInt();
+                    final boolean readNegStrandFlag = in.readBoolean();
+                    final boolean readUnmappedFlag = in.readBoolean();
+                    final String readCigarString = in.readUTF();
+
+                    final int mateAlignmentStart = in.readInt();
+                    final int mateReferenceIndex = in.readInt();
+                    final boolean mateNegStrandFlag = in.readBoolean();
+                    final boolean mateUnmappedFlag = in.readBoolean();
+
+                    // read mateCigarString - note that null value is stored as an empty string
+                    final String mcs = in.readUTF();
+                    final String mateCigarString = !mcs.isEmpty() ? mcs : null;
+
+                    final boolean firstOfPairFlag = in.readBoolean();
+
+                    final long recordNumber = in.readLong();
+                    final PairEndInfo rec = new PairEndInfo(readAlignmentStart, readReferenceIndex, readNegStrandFlag,
+                            readUnmappedFlag, readCigarString, mateAlignmentStart, mateReferenceIndex, mateNegStrandFlag,
+                            mateUnmappedFlag, mateCigarString,
+                            firstOfPairFlag, recordNumber);
+                    return new AbstractMap.SimpleEntry(key, rec);
+                } catch (IOException e) {
+                    throw new SAMException("Error reading PairInfo from disk", e);
+                }
+            }
+        }
+    }
+
+    private static class InMemoryPairEndInfoMap implements PairEndInfoMap {
+        private final Map<String, PairEndInfo> map = new HashMap<String, PairEndInfo>();
+
+        public void put(int mateReferenceIndex, String key, PairEndInfo value) {
+            if (mateReferenceIndex != value.mateReferenceIndex)
+                throw new IllegalArgumentException("mateReferenceIndex does not agree with PairEndInfo");
+            map.put(key, value);
+        }
+
+        public PairEndInfo remove(int mateReferenceIndex, String key) {
+            return map.remove(key);
+        }
+
+        public CloseableIterator<Map.Entry<String, PairEndInfo>> iterator() {
+            final Iterator<Map.Entry<String, PairEndInfo>> it = map.entrySet().iterator();
+            return new CloseableIterator<Map.Entry<String, PairEndInfo>>() {
+                public void close() {
+                    // do nothing
+                }
+
+                public boolean hasNext() {
+                    return it.hasNext();
+                }
+
+                public Map.Entry<String, PairEndInfo> next() {
+                    return it.next();
+                }
+
+                public void remove() {
+                    it.remove();
+                }
+            };
+        }
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/SamFiles.java b/src/main/java/htsjdk/samtools/SamFiles.java
new file mode 100644
index 0000000..874fc10
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SamFiles.java
@@ -0,0 +1,98 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.CRAIIndex;
+import htsjdk.samtools.cram.build.CramIO;
+
+import htsjdk.samtools.util.Log;
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+/**
+ * @author mccowan
+ */
+public class SamFiles {
+
+    private final static Log LOG = Log.getInstance(SamFiles.class);
+
+    /**
+     * Finds the index file associated with the provided SAM file.  The index file must exist and be reachable to be found.
+     *
+     * If the file is a symlink and the index cannot be found, try to unsymlink the file and look for the bai in the actual file path.
+     *
+     * @return The index for the provided SAM, or null if one was not found.
+     */
+    public static File findIndex(final File samFile) {
+        Path path = findIndex(samFile.toPath());
+        return path == null ? null : path.toFile();
+    }
+
+    /**
+     * Finds the index file associated with the provided SAM file.  The index file must exist and be reachable to be found.
+     *
+     * If the file is a symlink and the index cannot be found, try to unsymlink the file and look for the bai in the actual file path.
+     *
+     * @return The index for the provided SAM, or null if one was not found.
+     */
+    public static Path findIndex(final Path samPath) {
+        final Path indexPath = lookForIndex(samPath); //try to find the index
+        if (indexPath == null) {
+            return unsymlinkAndLookForIndex(samPath);
+        } else {
+            return indexPath;
+        }
+    }
+
+    /**
+     * resolve the canonical path of samFile and attempt to find an index there.
+     * @return an index file or null if no index is found.
+     */
+    private static Path unsymlinkAndLookForIndex(Path samPath) {
+        try {
+            final Path canonicalSamPath = samPath.toRealPath(); // resolve symbolic links
+            final Path canonicalIndexPath = lookForIndex(canonicalSamPath);
+            if ( canonicalIndexPath != null) {
+                LOG.warn("The index file " + canonicalIndexPath.toAbsolutePath()
+                        + " was found by resolving the canonical path of a symlink: "
+                        + samPath.toAbsolutePath() + " -> " + samPath.toRealPath());
+            }
+            return canonicalIndexPath;
+        } catch (IOException e) {
+            return null;
+        }
+    }
+
+    private static Path lookForIndex(final Path samPath) {// If input is foo.bam, look for foo.bai
+        Path indexPath;
+        final String fileName = samPath.getFileName().toString(); // works for all path types (e.g. HDFS)
+        if (fileName.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION)) {
+            final String bai = fileName.substring(0, fileName.length() - BamFileIoUtils.BAM_FILE_EXTENSION.length()) + BAMIndex.BAMIndexSuffix;
+            indexPath = samPath.resolveSibling(bai);
+            if (Files.isRegularFile(indexPath)) { // works for all path types (e.g. HDFS)
+                return indexPath;
+            }
+
+
+        } else if (fileName.endsWith(CramIO.CRAM_FILE_EXTENSION)) {
+            final String crai = fileName.substring(0, fileName.length() - CramIO.CRAM_FILE_EXTENSION.length()) + CRAIIndex.CRAI_INDEX_SUFFIX;
+            indexPath = samPath.resolveSibling(crai);
+            if (Files.isRegularFile(indexPath)) {
+                return indexPath;
+            }
+
+            indexPath = samPath.resolveSibling(fileName + CRAIIndex.CRAI_INDEX_SUFFIX);
+            if (Files.isRegularFile(indexPath)) {
+                return indexPath;
+            }
+        }
+
+        // If foo.bai doesn't exist look for foo.bam.bai or foo.cram.bai
+        indexPath = samPath.resolveSibling(fileName + BAMIndex.BAMIndexSuffix);
+        if (Files.isRegularFile(indexPath)) {
+            return indexPath;
+        }
+
+        return null;
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/SamFlagField.java b/src/main/java/htsjdk/samtools/SamFlagField.java
new file mode 100644
index 0000000..82f2a29
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SamFlagField.java
@@ -0,0 +1,203 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 Nils Homer
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools;
+
+/**
+ * This determines how flag fields are represented in the SAM file.
+ *
+ * In a string FLAG, each character represents one bit with
+ * p=0x1 (paired), P=0x2 (properly paired), u=0x4 (unmapped),
+ * U=0x8 (mate unmapped), r=0x10 (reverse), R=0x20 (mate reverse)
+ * 1=0x40 (first), 2=0x80 (second), s=0x100 (not primary), 
+ * x=0x200 (failure), d=0x400 (duplicate), and S=0x800 (secondary).
+ * This was inspired by 'samtools view -X'.
+ *
+ * We also output a character when the following bits *are not* set:
+ * m=0x4 (mapped), M=0x8 (mate mapped), f=0x10 (forward), F=0x20 
+ * (mate forward).
+ * 
+ * @author nhomer
+ */
+public enum SamFlagField {
+    NONE {
+        @Override
+        public String format(final int flag) {
+            throw new SAMFormatException("NONE not allowed for the SamFlagField when writing the SAM flag field.");
+        }
+        @Override
+        protected int parseWithoutValidation(final String flag) {
+            throw new SAMFormatException("NONE not allowed for the SamFlagField when reading the SAM flag field.");
+        } 
+    },
+    DECIMAL {
+        @Override
+        public String format(final int flag) {
+            return Integer.toString(flag);
+        }
+        /** Throws NumberFormatException if it can't parse the flag **/
+        @Override
+        protected int parseWithoutValidation(final String flag) {
+            return Integer.parseInt(flag);
+        }
+    },
+    HEXADECIMAL {
+        @Override
+        public String format(final int flag) {
+            return String.format("%#x", flag);
+        }
+        @Override
+        protected int parseWithoutValidation(final String flag) {
+            return Integer.valueOf(flag.substring(2), 16);
+        }
+    },
+    OCTAL {
+        @Override
+        public String format(final int flag) {
+            return String.format("%#o", flag);
+        }
+        @Override
+        protected int parseWithoutValidation(final String flag) {
+            return Integer.valueOf(flag, 8);
+        }
+    },
+    STRING {
+        /*
+        It is important that the first character of a string does not start with a digit, so we can
+        determine which format given an input flag value.  See of.
+         */
+
+        @Override
+        public String format(final int flag) {
+            // Adapted from the the implementation here:
+            //   https://github.com/jmarshall/cansam/blob/master/lib/alignment.cpp
+            final StringBuilder value = new StringBuilder();
+
+            if ((flag & SAMFlag.READ_UNMAPPED.flag) != 0)                   value.append('u');
+            else                                                            value.append('m');
+
+            if ((flag & SAMFlag.READ_REVERSE_STRAND.flag) != 0)             value.append('r');
+            else if ((flag & SAMFlag.READ_UNMAPPED.flag) == 0)              value.append('f');
+
+            if ((flag & SAMFlag.MATE_UNMAPPED.flag) != 0)                   value.append('U');
+            else if ((flag & SAMFlag.READ_PAIRED.flag) != 0)                value.append('M');
+
+            if ((flag & SAMFlag.MATE_REVERSE_STRAND.flag) != 0)             value.append('R');
+            else if ((flag & SAMFlag.READ_PAIRED.flag) != 0)                value.append('F');
+
+            if ((flag & SAMFlag.READ_PAIRED.flag) != 0)                     value.append('p');
+            if ((flag & SAMFlag.PROPER_PAIR.flag) != 0)                     value.append('P');
+            if ((flag & SAMFlag.FIRST_OF_PAIR.flag) != 0)                   value.append('1');
+            if ((flag & SAMFlag.SECOND_OF_PAIR.flag) != 0)                  value.append('2');
+
+            if ((flag & SAMFlag.NOT_PRIMARY_ALIGNMENT.flag) != 0)           value.append('s');
+            if ((flag & SAMFlag.SUPPLEMENTARY_ALIGNMENT.flag) != 0)         value.append('S');
+            if ((flag & SAMFlag.READ_FAILS_VENDOR_QUALITY_CHECK.flag) != 0) value.append('x');
+            if ((flag & SAMFlag.DUPLICATE_READ.flag) != 0)                  value.append('d');
+
+            return value.toString();
+        }
+
+        @Override
+        protected int parseWithoutValidation(final String flag) {
+            SamFlagField.validate(flag, STRING);
+
+            // Adapted from the the implementation here:
+            //   https://github.com/jmarshall/cansam/blob/master/lib/alignment.cpp
+
+            int value = 0;
+
+            for (int i = 0; i < flag.length(); i++) {
+                switch (flag.charAt(i)) {
+                    case 'p':  value |= SAMFlag.READ_PAIRED.flag;  break;
+                    case 'P':  value |= SAMFlag.PROPER_PAIR.flag;  break;
+                    case 'u':  value |= SAMFlag.READ_UNMAPPED.flag;  break;
+                    case 'U':  value |= SAMFlag.MATE_UNMAPPED.flag;  break;
+                    case 'r':  value |= SAMFlag.READ_REVERSE_STRAND.flag;  break;
+                    case 'R':  value |= SAMFlag.MATE_REVERSE_STRAND.flag;  break;
+                    case '1':  value |= SAMFlag.FIRST_OF_PAIR.flag;  break;
+                    case '2':  value |= SAMFlag.SECOND_OF_PAIR.flag;  break;
+                    case 's':  value |= SAMFlag.NOT_PRIMARY_ALIGNMENT.flag;  break;
+                    case 'x':  value |= SAMFlag.READ_FAILS_VENDOR_QUALITY_CHECK.flag;  break;
+                    case 'd':  value |= SAMFlag.DUPLICATE_READ.flag;  break;
+                    case 'S':  value |= SAMFlag.SUPPLEMENTARY_ALIGNMENT.flag;  break;
+                    case 'f':
+                    case 'F':
+                    case 'm':
+                    case 'M':
+                    case '_':
+                        break;
+                    default:
+                        throw new SAMFormatException("Unknown flag character '" + flag.charAt(i) + "' in flag '" + flag + "'");
+                }
+            }
+
+            return value;
+        }
+    };
+
+    /** Returns the string associated with this flag field. */
+    abstract public String format(final int flag);
+
+    /** Parses the flag.  Validates that the flag is of the correct type. */
+    public final int parse(final String flag) {
+        return parse(flag, true);
+    }
+
+    /** Infers the format from the flag string and parses the flag. */
+    public static int parseDefault(final String flag) {
+        return SamFlagField.of(flag).parse(flag, false);
+    }
+
+    /** Performs the actual parsing based on the radix.  No validation that the flag is of the correct radix
+     * should be performed.
+     */
+    abstract protected int parseWithoutValidation(final String flag);
+
+    /** Parses the flag.  Performs optional validation that the flag is of the correct type. */
+    private int parse(final String flag, final boolean withValidation) {
+        if (withValidation) SamFlagField.validate(flag, this);
+        return parseWithoutValidation(flag);
+    }
+
+    /**
+     * Returns the type of flag field for this string.  This does not guarantee it is of the flag field,
+     * as it only checks the first two characters.
+     */
+    public static SamFlagField of(final String s) {
+        if (s.isEmpty()) throw new SAMFormatException("Could not determine flag field type; saw an empty flag field");
+        else if (s.startsWith("0x")) return HEXADECIMAL;
+        else if (s.startsWith("0X")) return HEXADECIMAL;
+        else if (s.startsWith("0") && s.length() > 1) return OCTAL;
+        else if (Character.isDigit(s.charAt(0))) return DECIMAL;
+        else return STRING;
+    }
+
+    private static void validate(final String flag, final SamFlagField expectedField) {
+        final SamFlagField actualField = SamFlagField.of(flag);
+        if (actualField != expectedField) {
+            throw new SAMFormatException(expectedField.name() + " sam flag must start with [1-9] but found '" + flag + "' (" + actualField.name() + ")");
+        }
+    }
+}
\ No newline at end of file
diff --git a/src/java/htsjdk/samtools/SamIndexes.java b/src/main/java/htsjdk/samtools/SamIndexes.java
similarity index 100%
rename from src/java/htsjdk/samtools/SamIndexes.java
rename to src/main/java/htsjdk/samtools/SamIndexes.java
diff --git a/src/main/java/htsjdk/samtools/SamInputResource.java b/src/main/java/htsjdk/samtools/SamInputResource.java
new file mode 100644
index 0000000..39d679d
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SamInputResource.java
@@ -0,0 +1,473 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.seekablestream.SeekablePathStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.seekablestream.SeekableStreamFactory;
+import htsjdk.samtools.sra.SRAAccession;
+import htsjdk.samtools.util.Lazy;
+import htsjdk.samtools.util.RuntimeIOException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.file.FileSystemNotFoundException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * Describes a SAM-like resource, including its data (where the records are), and optionally an index.
+ * <p/>
+ * A data or index source may originate from a {@link java.io.File}, {@link java.io.InputStream}, {@link URL}, or
+ * {@link htsjdk.samtools.seekablestream.SeekableStream}; look for the appropriate overload for
+ * {@code htsjdk.samtools.SamInputResource#of()}.
+ *
+ * @author mccowan
+ */
+public class SamInputResource {
+    private final InputResource source;
+    private InputResource index;
+
+    SamInputResource(final InputResource data) {
+        this(data, null);
+    }
+
+    SamInputResource(final InputResource source, final InputResource index) {
+        if (source == null) throw new NullPointerException("source");
+        this.source = source;
+        this.index = index;
+    }
+
+    /** The resource that is the SAM data (e.g., records) */
+    InputResource data() {
+        return source;
+    }
+
+    /**
+     * The resource that is the SAM index
+     *
+     * @return null, if no index is defined for this resource
+     */
+    InputResource indexMaybe() {
+        return index;
+    }
+
+    @Override
+    public String toString() {
+        return String.format("data=%s;index=%s", source, index);
+    }
+
+    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
+    public static SamInputResource of(final File file) { return new SamInputResource(new FileInputResource(file)); }
+
+    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
+    public static SamInputResource of(final Path path) { return new SamInputResource(new PathInputResource(path)); }
+
+    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
+    public static SamInputResource of(final InputStream inputStream) { return new SamInputResource(new InputStreamInputResource(inputStream)); }
+
+    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
+    public static SamInputResource of(final URL url) { return new SamInputResource(new UrlInputResource(url)); }
+
+    /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
+    public static SamInputResource of(final SeekableStream seekableStream) { return new SamInputResource(new SeekableStreamInputResource(seekableStream)); }
+
+    public static SamInputResource of(final SRAAccession acc) { return new SamInputResource(new SRAInputResource(acc)); }
+
+    /** Creates a {@link SamInputResource} from a string specifying *either* a url or a file path */
+    public static SamInputResource of(final String string) { 
+      try {
+        URL url = new URL(string);    // this will throw if its not a url
+        return of(url); 
+      } catch (MalformedURLException e) {
+       // ignore
+      }
+      return of(new File(string));
+    }
+    
+    /** Updates the index to point at the provided resource, then returns itself. */
+    public SamInputResource index(final File file) {
+        this.index = new FileInputResource(file);
+        return this;
+    }
+
+    /** Updates the index to point at the provided resource, then returns itself. */
+    public SamInputResource index(final Path path) {
+        this.index = new PathInputResource(path);
+        return this;
+    }
+
+    /** Updates the index to point at the provided resource, then returns itself. */
+    public SamInputResource index(final InputStream inputStream) {
+        this.index = new InputStreamInputResource(inputStream);
+        return this;
+    }
+
+    /** Updates the index to point at the provided resource, then returns itself. */
+    public SamInputResource index(final URL url) {
+        this.index = new UrlInputResource(url);
+        return this;
+    }
+
+    /** Updates the index to point at the provided resource, then returns itself. */
+    public SamInputResource index(final SeekableStream seekableStream) {
+        this.index = new SeekableStreamInputResource(seekableStream);
+        return this;
+    }
+
+}
+
+/**
+ * Describes an arbitrary input source, which is something that can be accessed as either a
+ * {@link htsjdk.samtools.seekablestream.SeekableStream} or {@link java.io.InputStream}.  A concrete implementation of this class exists for
+ * each of {@link InputResource.Type}.
+ */
+abstract class InputResource {
+    protected InputResource(final Type type) {this.type = type;}
+
+    enum Type {
+        FILE, PATH, URL, SEEKABLE_STREAM, INPUT_STREAM, SRA_ACCESSION
+    }
+
+    private final Type type;
+
+    final Type type() {
+        return type;
+    }
+
+    /** Returns null if this resource cannot be represented as a {@link File}. */
+    abstract File asFile();
+
+    /** Returns null if this resource cannot be represented as a {@link Path}. */
+    abstract Path asPath();
+
+    /** Returns null if this resource cannot be represented as a {@link URL}. */
+    abstract URL asUrl();
+
+    /** Returns null if this resource cannot be represented as a {@link htsjdk.samtools.seekablestream.SeekableStream}. */
+    abstract SeekableStream asUnbufferedSeekableStream();
+
+    /** All resource types support {@link java.io.InputStream} generation. */
+    abstract InputStream asUnbufferedInputStream();
+
+    /** SRA archive resource */
+    abstract SRAAccession asSRAAccession();
+
+    @Override
+    public String toString() {
+        final String childToString;
+        switch (type()) {
+            case FILE:
+                childToString = asFile().toString();
+                break;
+            case PATH:
+                childToString = asPath().toString();
+                break;
+            case INPUT_STREAM:
+                childToString = asUnbufferedInputStream().toString();
+                break;
+            case SEEKABLE_STREAM:
+                childToString = asUnbufferedSeekableStream().toString();
+                break;
+            case URL:
+                childToString = asUrl().toString();
+                break;
+            case SRA_ACCESSION:
+                childToString = asSRAAccession().toString();
+                break;
+            default:
+                throw new IllegalStateException();
+        }
+        return String.format("%s:%s", type(), childToString);
+    }
+}
+
+class FileInputResource extends InputResource {
+
+    final File fileResource;
+    final Lazy<SeekableStream> lazySeekableStream = new Lazy<SeekableStream>(new Lazy.LazyInitializer<SeekableStream>() {
+        @Override
+        public SeekableStream make() {
+            try {
+                return new SeekableFileStream(fileResource);
+            } catch (final FileNotFoundException e) {
+                throw new RuntimeIOException(e);
+            }
+        }
+    });
+
+
+    FileInputResource(final File fileResource) {
+        super(Type.FILE);
+        this.fileResource = fileResource;
+    }
+
+    @Override
+    public File asFile() {
+        return fileResource;
+    }
+
+    @Override
+    public Path asPath() {
+        return fileResource.toPath();
+    }
+
+    @Override
+    public URL asUrl() {
+        try {
+            return asPath().toUri().toURL();
+        } catch (MalformedURLException e) {
+            return null;
+        }
+    }
+
+    @Override
+    public SeekableStream asUnbufferedSeekableStream() {
+        return lazySeekableStream.get();
+    }
+
+    @Override
+    public InputStream asUnbufferedInputStream() {
+        return asUnbufferedSeekableStream();
+    }
+
+    @Override
+    public SRAAccession asSRAAccession() {
+        return null;
+    }
+}
+
+class PathInputResource extends InputResource {
+
+    final Path pathResource;
+    final Lazy<SeekableStream> lazySeekableStream = new Lazy<SeekableStream>(new Lazy.LazyInitializer<SeekableStream>() {
+        @Override
+        public SeekableStream make() {
+            try {
+                return new SeekablePathStream(pathResource);
+            } catch (final IOException e) {
+                throw new RuntimeIOException(e);
+            }
+        }
+    });
+
+
+    PathInputResource(final Path pathResource) {
+        super(Type.PATH);
+        this.pathResource = pathResource;
+    }
+
+    @Override
+    public File asFile() {
+        try {
+            return asPath().toFile();
+        } catch (UnsupportedOperationException e) {
+            return null;
+        }
+    }
+
+    @Override
+    public Path asPath() {
+        return pathResource;
+    }
+
+    @Override
+    public URL asUrl() {
+        try {
+            return asPath().toUri().toURL();
+        } catch (MalformedURLException e) {
+            return null;
+        }
+    }
+
+    @Override
+    public SeekableStream asUnbufferedSeekableStream() {
+        return lazySeekableStream.get();
+    }
+
+    @Override
+    public InputStream asUnbufferedInputStream() {
+        return asUnbufferedSeekableStream();
+    }
+
+    @Override
+    public SRAAccession asSRAAccession() {
+        return null;
+    }
+}
+
+class UrlInputResource extends InputResource {
+
+    final URL urlResource;
+    final Lazy<SeekableStream> lazySeekableStream = new Lazy<SeekableStream>(new Lazy.LazyInitializer<SeekableStream>() {
+        @Override
+        public SeekableStream make() {
+            try { return SeekableStreamFactory.getInstance().getStreamFor(urlResource); }
+            catch (final IOException ioe) { throw new RuntimeIOException(ioe); }
+        }
+    });
+
+    UrlInputResource(final URL urlResource) {
+        super(Type.URL);
+        this.urlResource = urlResource;
+    }
+
+    @Override
+    public File asFile() {
+        return null;
+    }
+
+    @Override
+    public Path asPath() {
+        try {
+            return Paths.get(urlResource.toURI());
+        } catch (URISyntaxException | IllegalArgumentException |
+            FileSystemNotFoundException | SecurityException e) {
+            return null;
+        }
+    }
+
+    @Override
+    public URL asUrl() {
+        return urlResource;
+    }
+
+    @Override
+    public SeekableStream asUnbufferedSeekableStream() {
+        return lazySeekableStream.get();
+    }
+
+    @Override
+    public InputStream asUnbufferedInputStream() {
+        return asUnbufferedSeekableStream();
+    }
+
+    @Override
+    public SRAAccession asSRAAccession() {
+        return null;
+    }
+}
+
+class SeekableStreamInputResource extends InputResource {
+
+    final SeekableStream seekableStreamResource;
+
+    SeekableStreamInputResource(final SeekableStream seekableStreamResource) {
+        super(Type.SEEKABLE_STREAM);
+        this.seekableStreamResource = seekableStreamResource;
+    }
+
+    @Override
+    File asFile() {
+        return null;
+    }
+
+    @Override
+    Path asPath() {
+        return null;
+    }
+
+    @Override
+    URL asUrl() {
+        return null;
+    }
+
+    @Override
+    SeekableStream asUnbufferedSeekableStream() {
+        return seekableStreamResource;
+    }
+
+    @Override
+    InputStream asUnbufferedInputStream() {
+        return asUnbufferedSeekableStream();
+    }
+
+    @Override
+    public SRAAccession asSRAAccession() {
+        return null;
+    }
+}
+
+class InputStreamInputResource extends InputResource {
+
+    final InputStream inputStreamResource;
+
+    InputStreamInputResource(final InputStream inputStreamResource) {
+        super(Type.INPUT_STREAM);
+        this.inputStreamResource = inputStreamResource;
+    }
+
+    @Override
+    File asFile() {
+        return null;
+    }
+
+    @Override
+    Path asPath() {
+        return null;
+    }
+
+    @Override
+    URL asUrl() {
+        return null;
+    }
+
+    @Override
+    SeekableStream asUnbufferedSeekableStream() {
+        return null;
+    }
+
+    @Override
+    InputStream asUnbufferedInputStream() {
+        return inputStreamResource;
+    }
+
+    @Override
+    public SRAAccession asSRAAccession() {
+        return null;
+    }
+}
+
+class SRAInputResource extends InputResource {
+
+    final SRAAccession accession;
+
+    SRAInputResource(final SRAAccession accession) {
+        super(Type.SRA_ACCESSION);
+        this.accession = accession;
+    }
+
+    @Override
+    File asFile() {
+        return null;
+    }
+
+    @Override
+    Path asPath() {
+        return null;
+    }
+
+    @Override
+    URL asUrl() {
+        return null;
+    }
+
+    @Override
+    SeekableStream asUnbufferedSeekableStream() {
+        return null;
+    }
+
+    @Override
+    InputStream asUnbufferedInputStream() {
+        return null;
+    }
+
+    @Override
+    public SRAAccession asSRAAccession() {
+        return accession;
+    }
+}
\ No newline at end of file
diff --git a/src/java/htsjdk/samtools/SamPairUtil.java b/src/main/java/htsjdk/samtools/SamPairUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/SamPairUtil.java
rename to src/main/java/htsjdk/samtools/SamPairUtil.java
diff --git a/src/java/htsjdk/samtools/SamReader.java b/src/main/java/htsjdk/samtools/SamReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/SamReader.java
rename to src/main/java/htsjdk/samtools/SamReader.java
diff --git a/src/main/java/htsjdk/samtools/SamReaderFactory.java b/src/main/java/htsjdk/samtools/SamReaderFactory.java
new file mode 100644
index 0000000..40f7113
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SamReaderFactory.java
@@ -0,0 +1,528 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.ref.CRAMReferenceSource;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.sra.SRAAccession;
+import htsjdk.samtools.util.*;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.zip.GZIPInputStream;
+
+/**
+ * <p>Describes the functionality for producing {@link SamReader}, and offers a
+ * handful of static generators.</p>
+ * <pre>
+ *     SamReaderFactory.makeDefault().open(new File("/my/bam.bam");
+ * </pre>
+ * <p>Example: Configure a factory</p>
+ * <pre>
+ *      final {@link SamReaderFactory} factory =
+ *          SamReaderFactory.makeDefault()
+ *              .enable({@link Option#INCLUDE_SOURCE_IN_RECORDS}, {@link Option#VALIDATE_CRC_CHECKSUMS})
+ *              .validationStringency({@link ValidationStringency#SILENT});
+ *
+ * </pre>
+ * <p>Example: Open two bam files from different sources, using different options</p>
+ * <pre>
+ *     final {@link SamReaderFactory} factory =
+ *          SamReaderFactory.makeDefault()
+ *              .enable({@link Option#INCLUDE_SOURCE_IN_RECORDS}, {@link Option#VALIDATE_CRC_CHECKSUMS})
+ *              .validationStringency({@link ValidationStringency#SILENT});
+ *
+ *     // File-based bam
+ *     final {@link SamReader} fileReader = factory.open(new File("/my/bam.bam"));
+ *
+ *     // HTTP-hosted BAM with index from an arbitrary stream
+ *     final SeekableStream myBamIndexStream = ...
+ *     final {@link SamInputResource} resource =
+ *          {@link SamInputResource}.of(new URL("http://example.com/data.bam")).index(myBamIndexStream);
+ *     final {@link SamReader} complicatedReader = factory.open(resource);
+ * </pre>
+ *
+ * @author mccowan
+ */
+public abstract class SamReaderFactory {
+
+    private static ValidationStringency defaultValidationStringency = ValidationStringency.DEFAULT_STRINGENCY;
+    
+    abstract public SamReader open(final File file);
+
+    public SamReader open(final Path path) {
+        final SamInputResource r = SamInputResource.of(path);
+        final Path indexMaybe = SamFiles.findIndex(path);
+        if (indexMaybe != null) r.index(indexMaybe);
+        return open(r);
+    }
+
+    abstract public SamReader open(final SamInputResource resource);
+
+    abstract public ValidationStringency validationStringency();
+
+    abstract public CRAMReferenceSource referenceSource();
+
+    /** Set this factory's {@link htsjdk.samtools.SAMRecordFactory} to the provided one, then returns itself. */
+    abstract public SamReaderFactory samRecordFactory(final SAMRecordFactory samRecordFactory);
+
+    /** Enables the provided {@link Option}s, then returns itself. */
+    abstract public SamReaderFactory enable(final Option... options);
+
+    /** Disables the provided {@link Option}s, then returns itself. */
+    abstract public SamReaderFactory disable(final Option... options);
+
+    /** Sets a specific Option to a boolean value. * */
+    abstract public SamReaderFactory setOption(final Option option, boolean value);
+
+    /** Sets the specified reference sequence * */
+    abstract public SamReaderFactory referenceSequence(File referenceSequence);
+
+    /** Sets the specified reference sequence * */
+    abstract public SamReaderFactory referenceSource(CRAMReferenceSource referenceSequence);
+
+    /** Utility method to open the file get the header and close the file */
+    abstract public SAMFileHeader getFileHeader(File samFile);
+
+    /** Reapplies any changed options to the reader * */
+    abstract public void reapplyOptions(SamReader reader);
+
+    /** Set this factory's {@link ValidationStringency} to the provided one, then returns itself. */
+    abstract public SamReaderFactory validationStringency(final ValidationStringency validationStringency);
+
+    /** Set whether readers created by this factory will use asynchronous IO.
+     * If this methods is not called, this flag will default to the value of {@link Defaults#USE_ASYNC_IO_FOR_SAMTOOLS}.
+     * Note that this option may not be applicable to all readers returned from this factory.
+     * Returns the factory itself. */
+    abstract public SamReaderFactory setUseAsyncIo(final boolean asynchronousIO);
+
+    private static SamReaderFactoryImpl DEFAULT =
+            new SamReaderFactoryImpl(Option.DEFAULTS, defaultValidationStringency, DefaultSAMRecordFactory.getInstance());
+
+    public static void setDefaultValidationStringency(final ValidationStringency defaultValidationStringency) {
+        SamReaderFactory.defaultValidationStringency = defaultValidationStringency;
+        // The default may have changed, so reset the default SamReader
+        DEFAULT = new SamReaderFactoryImpl(Option.DEFAULTS, defaultValidationStringency, DefaultSAMRecordFactory.getInstance());
+    }
+
+    /** Creates a copy of the default {@link SamReaderFactory}. */
+    public static SamReaderFactory makeDefault() {
+        return SamReaderFactoryImpl.copyOf(DEFAULT);
+    }
+
+    /**
+     * Creates an "empty" factory with no enabled {@link Option}s, {@link ValidationStringency#DEFAULT_STRINGENCY}, and
+     * {@link htsjdk.samtools.DefaultSAMRecordFactory}.
+     */
+    public static SamReaderFactory make() {
+        return new SamReaderFactoryImpl(EnumSet.noneOf(Option.class), ValidationStringency.DEFAULT_STRINGENCY, DefaultSAMRecordFactory.getInstance());
+    }
+
+    private static class SamReaderFactoryImpl extends SamReaderFactory {
+        private final static Log LOG = Log.getInstance(SamReaderFactory.class);
+        private final EnumSet<Option> enabledOptions;
+        private ValidationStringency validationStringency;
+        private boolean asynchronousIO = Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS;
+        private SAMRecordFactory samRecordFactory;
+        private CustomReaderFactory customReaderFactory;
+        private CRAMReferenceSource referenceSource;
+
+        private SamReaderFactoryImpl(final EnumSet<Option> enabledOptions, final ValidationStringency validationStringency, final SAMRecordFactory samRecordFactory) {
+            this.enabledOptions = EnumSet.copyOf(enabledOptions);
+            this.samRecordFactory = samRecordFactory;
+            this.validationStringency = validationStringency;
+            this.customReaderFactory = CustomReaderFactory.getInstance();
+        }
+   
+        @Override
+        public SamReader open(final File file) {
+            final SamInputResource r = SamInputResource.of(file);
+            final File indexMaybe = SamFiles.findIndex(file);
+            if (indexMaybe != null) r.index(indexMaybe);
+            return open(r);
+        }
+
+
+        @Override
+        public ValidationStringency validationStringency() {
+            return validationStringency;
+        }
+
+        @Override
+        public CRAMReferenceSource referenceSource() {
+            return referenceSource;
+        }
+
+        @Override
+        public SamReaderFactory samRecordFactory(final SAMRecordFactory samRecordFactory) {
+            this.samRecordFactory = samRecordFactory;
+            return this;
+        }
+
+        @Override
+        public SamReaderFactory enable(final Option... options) {
+            Collections.addAll(this.enabledOptions, options);
+            return this;
+        }
+
+        @Override
+        public SamReaderFactory disable(final Option... options) {
+            for (final Option option : options) {
+                this.enabledOptions.remove(option);
+            }
+            return this;
+        }
+
+        @Override
+        public SamReaderFactory setOption(final Option option, final boolean value) {
+            if (value) {
+                return enable(option);
+            } else {
+                return disable(option);
+            }
+        }
+
+        @Override
+        public SamReaderFactory referenceSequence(final File referenceSequence) {
+            this.referenceSource = new ReferenceSource(referenceSequence);
+            return this;
+        }
+
+        @Override
+        public SamReaderFactory referenceSource(final CRAMReferenceSource referenceSource) {
+            this.referenceSource = referenceSource;
+            return this;
+        }
+
+        @Override
+        public SAMFileHeader getFileHeader(final File samFile) {
+            final SamReader reader = open(samFile);
+            final SAMFileHeader header = reader.getFileHeader();
+            CloserUtil.close(reader);
+            return header;
+        }
+
+        @Override
+        public void reapplyOptions(final SamReader reader) {
+            for (final Option option : enabledOptions) {
+                option.applyTo((SamReader.PrimitiveSamReaderToSamReaderAdapter) reader);
+            }
+        }
+
+        @Override
+        public SamReaderFactory validationStringency(final ValidationStringency validationStringency) {
+            this.validationStringency = validationStringency;
+            return this;
+        }
+
+        @Override
+        public SamReaderFactory setUseAsyncIo(final boolean asynchronousIO){
+            this.asynchronousIO = asynchronousIO;
+            return this;
+        }
+
+        @Override
+        public SamReader open(final SamInputResource resource) {
+            final SamReader.PrimitiveSamReader primitiveSamReader;
+            try {
+                final InputResource data = resource.data();
+                final InputResource indexMaybe = resource.indexMaybe();
+                final boolean indexDefined = indexMaybe != null;
+
+                final InputResource.Type type = data.type();
+                if (type == InputResource.Type.URL) {
+                  SamReader reader = customReaderFactory.maybeOpen(
+                      data.asUrl());
+                  if (reader != null) {
+                    return reader;
+                  }
+                }
+                if (type == InputResource.Type.SEEKABLE_STREAM || type == InputResource.Type.URL) {
+                    if (SamStreams.sourceLikeBam(data.asUnbufferedSeekableStream())) {
+                        final SeekableStream bufferedIndexStream;
+                        if (indexDefined && indexMaybe.asUnbufferedSeekableStream() != null) {
+                            bufferedIndexStream = IOUtil.maybeBufferedSeekableStream(indexMaybe.asUnbufferedSeekableStream());
+                        } else {
+                            // TODO: Throw an exception here?  An index _may_ have been provided, but we're ignoring it
+                            bufferedIndexStream = null;
+                        }
+                        primitiveSamReader = new BAMFileReader(
+                                IOUtil.maybeBufferedSeekableStream(data.asUnbufferedSeekableStream()),
+                                bufferedIndexStream,
+                                false,
+                                asynchronousIO,
+                                validationStringency,
+                                this.samRecordFactory
+                        );
+                    } else if (SamStreams.sourceLikeCram(data.asUnbufferedSeekableStream())) {
+                        if (referenceSource == null) {
+                            referenceSource = ReferenceSource.getDefaultCRAMReferenceSource();
+                        }
+                        SeekableStream bufferedIndexStream = indexDefined ?
+                                IOUtil.maybeBufferedSeekableStream(indexMaybe.asUnbufferedSeekableStream()) :
+                                null;
+                        primitiveSamReader = new CRAMFileReader(
+                                IOUtil.maybeBufferedSeekableStream(data.asUnbufferedSeekableStream()),
+                                bufferedIndexStream, referenceSource, validationStringency);
+                    } else {
+                        // assume its a SAM file/no index
+                        LOG.warn("Unable to detect file format from input URL or stream, assuming SAM format.");
+                        primitiveSamReader = new SAMTextReader(
+                                IOUtil.toBufferedStream(data.asUnbufferedInputStream()),
+                                validationStringency, this.samRecordFactory);
+                    }
+                } else if (type == InputResource.Type.SRA_ACCESSION) {
+                    primitiveSamReader = new SRAFileReader(data.asSRAAccession());
+                } else {
+                    InputStream bufferedStream =
+                            IOUtil.maybeBufferInputStream(
+                                    data.asUnbufferedInputStream(),
+                                    Math.max(Defaults.BUFFER_SIZE, BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE)
+                            );
+                    File sourceFile = data.asFile();
+                    final File indexFile = indexMaybe == null ? null : indexMaybe.asFile();
+                    if (SamStreams.isBAMFile(bufferedStream)) {
+                        if (sourceFile == null || !sourceFile.isFile()) {
+                            // Handle case in which file is a named pipe, e.g. /dev/stdin or created by mkfifo
+                            primitiveSamReader = new BAMFileReader(bufferedStream, indexFile, false, asynchronousIO, validationStringency, this.samRecordFactory);
+                        } else {
+                            bufferedStream.close();
+                            primitiveSamReader = new BAMFileReader(sourceFile, indexFile, false, asynchronousIO, validationStringency, this.samRecordFactory);
+                        }
+                    } else if (BlockCompressedInputStream.isValidFile(bufferedStream)) {
+                        primitiveSamReader = new SAMTextReader(new BlockCompressedInputStream(bufferedStream), validationStringency, this.samRecordFactory);
+                    } else if (SamStreams.isGzippedSAMFile(bufferedStream)) {
+                        primitiveSamReader = new SAMTextReader(new GZIPInputStream(bufferedStream), validationStringency, this.samRecordFactory);
+                    } else if (SamStreams.isCRAMFile(bufferedStream)) {
+                        if (referenceSource == null) {
+                            referenceSource = ReferenceSource.getDefaultCRAMReferenceSource();
+                        }
+                        if (sourceFile == null || !sourceFile.isFile()) {
+                            primitiveSamReader = new CRAMFileReader(bufferedStream, indexFile, referenceSource, validationStringency);
+                        } else {
+                            bufferedStream.close();
+                            primitiveSamReader = new CRAMFileReader(sourceFile, indexFile, referenceSource, validationStringency);
+                        }
+                    } else if (sourceFile != null && isSra(sourceFile)) {
+                        if (bufferedStream != null) {
+                            bufferedStream.close();
+                        }
+                        primitiveSamReader = new SRAFileReader(new SRAAccession(sourceFile.getPath()));
+                    } else {
+                        if (indexDefined) {
+                            bufferedStream.close();
+                            throw new RuntimeException("Cannot use index file with textual SAM file");
+                        }
+                        primitiveSamReader = new SAMTextReader(bufferedStream, sourceFile, validationStringency, this.samRecordFactory);
+                    }
+                }
+
+                // Apply the options defined by this factory to this reader
+                final SamReader.PrimitiveSamReaderToSamReaderAdapter reader =
+                        new SamReader.PrimitiveSamReaderToSamReaderAdapter(primitiveSamReader, resource);
+
+                for (final Option option : enabledOptions) {
+                    option.applyTo(reader);
+                }
+
+                return reader;
+            } catch (final IOException e) {
+                throw new RuntimeIOException(e);
+            }
+        }
+
+        /** Attempts to detect whether the file is an SRA accessioned file. If SRA support is not available, returns false. */
+        private boolean isSra(final File sourceFile) {
+            try {
+                // if SRA fails to initialize (the most common reason is a failure to find/load native libraries),
+                // it will throw a subclass of java.lang.Error and here we only catch subclasses of java.lang.Exception
+                //
+                // Note: SRA initialization errors should not be ignored, but rather shown to user
+                return SRAAccession.isValid(sourceFile.getPath());
+            } catch (final Exception e) {
+                return false;
+            }
+        }
+
+        public static SamReaderFactory copyOf(final SamReaderFactoryImpl target) {
+            return new SamReaderFactoryImpl(target.enabledOptions, target.validationStringency, target.samRecordFactory);
+        }
+    }
+
+    /** A collection of binary {@link SamReaderFactory} options. */
+    public enum Option {
+        /**
+         * The factory's {@link SamReader}s will produce populated (non-null) values when calling {@link SAMRecord#getFileSource()}.
+         * <p/>
+         * This option increases memory footprint slightly per {@link htsjdk.samtools.SAMRecord}.
+         */
+        INCLUDE_SOURCE_IN_RECORDS {
+            @Override
+            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableFileSource(reader, true);
+            }
+
+            @Override
+            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableFileSource(reader, true);
+            }
+
+            @Override
+            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableFileSource(reader, true);
+            }
+
+            @Override
+            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableFileSource(reader, true);
+            }
+        },
+
+        /**
+         * The factory's {@link SamReader}s' {@link SamReader#indexing()}'s calls to {@link SamReader.Indexing#getIndex()} will produce
+         * {@link BAMIndex}es that do some caching in memory instead of reading the index from the disk for each query operation.
+         *
+         * @see SamReader#indexing()
+         * @see htsjdk.samtools.SamReader.Indexing#getIndex()
+         */
+        CACHE_FILE_BASED_INDEXES {
+            @Override
+            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableIndexCaching(true);
+            }
+
+            @Override
+            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
+                logDebugIgnoringOption(reader, this);
+            }
+
+            @Override
+            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableIndexCaching(true);
+            }
+
+            @Override
+            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableIndexCaching(true);
+            }
+        },
+
+        /**
+         * The factory's {@link SamReader}s' will not use memory mapping for accessing index files (which is used by default).  This is
+         * slower but more scalable when accessing large numbers of BAM files sequentially.
+         *
+         * @see SamReader#indexing()
+         * @see htsjdk.samtools.SamReader.Indexing#getIndex()
+         */
+        DONT_MEMORY_MAP_INDEX {
+            @Override
+            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableIndexMemoryMapping(false);
+            }
+
+            @Override
+            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
+                logDebugIgnoringOption(reader, this);
+            }
+
+            @Override
+            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableIndexMemoryMapping(false);
+            }
+
+            @Override
+            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableIndexMemoryMapping(false);
+            }
+        },
+
+        /**
+         * Eagerly decode {@link htsjdk.samtools.SamReader}'s {@link htsjdk.samtools.SAMRecord}s, which can reduce memory footprint if many
+         * fields are being read per record, or if fields are going to be updated.
+         */
+        EAGERLY_DECODE {
+            @Override
+            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.setEagerDecode(true);
+            }
+
+            @Override
+            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
+                logDebugIgnoringOption(reader, this);
+            }
+
+            @Override
+            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
+                logDebugIgnoringOption(reader, this);
+            }
+
+            @Override
+            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
+                logDebugIgnoringOption(reader, this);
+            }
+        },
+
+        /**
+         * For {@link htsjdk.samtools.SamReader}s backed by block-compressed streams, enable CRC validation of those streams.  This is an
+         * expensive operation, but serves to ensure validity of the stream.
+         */
+        VALIDATE_CRC_CHECKSUMS {
+            @Override
+            void applyTo(final BAMFileReader underlyingReader, final SamReader reader) {
+                underlyingReader.enableCrcChecking(true);
+            }
+
+            @Override
+            void applyTo(final SAMTextReader underlyingReader, final SamReader reader) {
+                logDebugIgnoringOption(reader, this);
+            }
+
+            @Override
+            void applyTo(final CRAMFileReader underlyingReader, final SamReader reader) {
+                logDebugIgnoringOption(reader, this);
+            }
+
+            @Override
+            void applyTo(final SRAFileReader underlyingReader, final SamReader reader) {
+                logDebugIgnoringOption(reader, this);
+            }
+
+        };
+
+        public static EnumSet<Option> DEFAULTS = EnumSet.noneOf(Option.class);
+
+        /** Applies this option to the provided reader, if applicable. */
+        void applyTo(final SamReader.PrimitiveSamReaderToSamReaderAdapter reader) {
+            final SamReader.PrimitiveSamReader underlyingReader = reader.underlyingReader();
+            if (underlyingReader instanceof BAMFileReader) {
+                applyTo((BAMFileReader) underlyingReader, reader);
+            } else if (underlyingReader instanceof SAMTextReader) {
+                applyTo((SAMTextReader) underlyingReader, reader);
+            } else if (underlyingReader instanceof CRAMFileReader) {
+                applyTo((CRAMFileReader) underlyingReader, reader);
+            } else if (underlyingReader instanceof SRAFileReader) {
+                applyTo((SRAFileReader) underlyingReader, reader);
+            } else {
+                throw new IllegalArgumentException(String.format("Unrecognized reader type: %s.", underlyingReader.getClass()));
+            }
+
+        }
+
+        private static void logDebugIgnoringOption(final SamReader r, final Option option) {
+            LOG.debug(String.format("Ignoring %s option; does not apply to %s readers.", option, r.getClass().getSimpleName()));
+        }
+
+        private final static Log LOG = Log.getInstance(Option.class);
+
+        abstract void applyTo(final BAMFileReader underlyingReader, final SamReader reader);
+
+        abstract void applyTo(final SAMTextReader underlyingReader, final SamReader reader);
+
+        abstract void applyTo(final CRAMFileReader underlyingReader, final SamReader reader);
+
+        abstract void applyTo(final SRAFileReader underlyingReader, final SamReader reader);
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/SamStreams.java b/src/main/java/htsjdk/samtools/SamStreams.java
new file mode 100644
index 0000000..114d23b
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/SamStreams.java
@@ -0,0 +1,164 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.structure.CramHeader;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.BlockCompressedInputStream;
+import htsjdk.samtools.util.BlockCompressedStreamConstants;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.zip.GZIPInputStream;
+
+/**
+ * Utilities related to processing of {@link java.io.InputStream}s encoding SAM data
+ * 
+ * @author mccowan
+ */
+public class SamStreams {
+    private static int readBytes(final InputStream stream, final byte[] buffer, final int offset, final int length)
+            throws IOException {
+        int bytesRead = 0;
+        while (bytesRead < length) {
+            final int count = stream.read(buffer, offset + bytesRead, length - bytesRead);
+            if (count <= 0) {
+                break;
+            }
+            bytesRead += count;
+        }
+        return bytesRead;
+    }
+
+    public static boolean isCRAMFile(final InputStream stream) throws IOException {
+        stream.mark(4);
+        final int buffSize = CramHeader.MAGIC.length;
+        final byte[] buffer = new byte[buffSize];
+        readBytes(stream, buffer, 0, buffSize);
+        stream.reset();
+
+        return Arrays.equals(buffer, CramHeader.MAGIC);
+    }
+
+    /**
+     * @param stream stream.markSupported() must be true
+     * @return true if this looks like a BAM file.
+     */
+    public static boolean isBAMFile(final InputStream stream)
+            throws IOException {
+        if (!BlockCompressedInputStream.isValidFile(stream)) {
+            return false;
+        }
+        final int buffSize = BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE;
+        stream.mark(buffSize);
+        final byte[] buffer = new byte[buffSize];
+        readBytes(stream, buffer, 0, buffSize);
+        stream.reset();
+        try(final BlockCompressedInputStream bcis = new BlockCompressedInputStream(new ByteArrayInputStream(buffer))){
+            final byte[] magicBuf = new byte[4];
+            final int magicLength = readBytes(bcis, magicBuf, 0, 4);
+            return magicLength == BAMFileConstants.BAM_MAGIC.length && Arrays.equals(BAMFileConstants.BAM_MAGIC, magicBuf);
+        }
+    }
+
+    /**
+     * Checks whether the file is a gzipped sam file.  Returns true if it
+     * is and false otherwise.
+     */
+    public static boolean isGzippedSAMFile(final InputStream stream) {
+        if (!stream.markSupported()) {
+            throw new IllegalArgumentException("Cannot test a stream that doesn't support marking.");
+        }
+        stream.mark(8000);
+
+        try {
+            final GZIPInputStream gunzip = new GZIPInputStream(stream);
+            final int ch = gunzip.read();
+            return true;
+        } catch (final IOException ioe) {
+            return false;
+        } finally {
+            try {
+                stream.reset();
+            } catch (final IOException ioe) {
+                throw new IllegalStateException("Could not reset stream.");
+            }
+        }
+    }
+
+    // Its too expensive to examine the remote file to determine type.
+    // Rely on file extension.
+    public static boolean sourceLikeBam(final SeekableStream strm) {
+        String source = strm.getSource();
+        if (source == null) {
+            // assume any stream with a null source is a BAM file
+            // (https://github.com/samtools/htsjdk/issues/619)
+            return true;
+        }
+
+        //Source will typically be a file path or URL
+        //If it's a URL we require one of the query parameters to be a cram file
+        try {
+            final URL sourceURL = new URL(source);
+            final String urlPath = sourceURL.getPath().toLowerCase();
+            String queryParams = sourceURL.getQuery();
+            if (queryParams != null) {
+                queryParams = queryParams.toLowerCase();
+            }
+            return urlPath.endsWith(".bam") ||
+                    (queryParams != null &&
+                            (queryParams.endsWith(".bam") ||
+                                    queryParams.contains(".bam?") ||
+                                    queryParams.contains(".bam&") ||
+                                    queryParams.contains(".bam%26"))
+                    );
+        }
+        catch (MalformedURLException e) {
+            source = source.toLowerCase();
+            return source.endsWith(".bam") ||
+                    source.contains(".bam?") ||
+                    source.contains(".bam&") ||
+                    source.contains(".bam%26");
+        }
+    }
+
+    // Its too expensive to examine the remote file to determine type.
+    // Rely on file extension.
+    public static boolean sourceLikeCram(final SeekableStream strm) {
+        String source = strm.getSource();
+        if (source == null) {
+            // sourceLikeBam assumes any stream with a null source is a BAM file
+            // (https://github.com/samtools/htsjdk/issues/619); in order to not
+            // propagate more chaos we return false here
+            return false;
+        }
+
+        // Source will typically be a file path or URL
+        // If it's a URL we require one of the query parameters to be a cram file
+        try {
+            final URL sourceURL = new URL(source);
+            final String urlPath = sourceURL.getPath().toLowerCase();
+            String queryParams = sourceURL.getQuery();
+            if (queryParams != null) {
+                queryParams = queryParams.toLowerCase();
+            }
+            return urlPath.endsWith(".cram") ||
+                    (queryParams != null &&
+                            (queryParams.endsWith(".cram") ||
+                             queryParams.contains(".cram?") ||
+                             queryParams.contains(".cram&") ||
+                             queryParams.contains(".cram%26"))
+                    );
+        }
+        catch (MalformedURLException e) {
+            source = source.toLowerCase();
+            return source.endsWith(".cram") ||
+                    source.contains(".cram?") ||
+                    source.contains(".cram&") ||
+                    source.contains(".cram%26");
+        }
+    }
+
+}
diff --git a/src/java/htsjdk/samtools/SecondaryOrSupplementarySkippingIterator.java b/src/main/java/htsjdk/samtools/SecondaryOrSupplementarySkippingIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/SecondaryOrSupplementarySkippingIterator.java
rename to src/main/java/htsjdk/samtools/SecondaryOrSupplementarySkippingIterator.java
diff --git a/src/java/htsjdk/samtools/StreamInflatingIndexingOutputStream.java b/src/main/java/htsjdk/samtools/StreamInflatingIndexingOutputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/StreamInflatingIndexingOutputStream.java
rename to src/main/java/htsjdk/samtools/StreamInflatingIndexingOutputStream.java
diff --git a/src/java/htsjdk/samtools/TagValueAndUnsignedArrayFlag.java b/src/main/java/htsjdk/samtools/TagValueAndUnsignedArrayFlag.java
similarity index 100%
rename from src/java/htsjdk/samtools/TagValueAndUnsignedArrayFlag.java
rename to src/main/java/htsjdk/samtools/TagValueAndUnsignedArrayFlag.java
diff --git a/src/java/htsjdk/samtools/TextCigarCodec.java b/src/main/java/htsjdk/samtools/TextCigarCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/TextCigarCodec.java
rename to src/main/java/htsjdk/samtools/TextCigarCodec.java
diff --git a/src/java/htsjdk/samtools/TextTagCodec.java b/src/main/java/htsjdk/samtools/TextTagCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/TextTagCodec.java
rename to src/main/java/htsjdk/samtools/TextTagCodec.java
diff --git a/src/java/htsjdk/samtools/TextualBAMIndexWriter.java b/src/main/java/htsjdk/samtools/TextualBAMIndexWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/TextualBAMIndexWriter.java
rename to src/main/java/htsjdk/samtools/TextualBAMIndexWriter.java
diff --git a/src/java/htsjdk/samtools/ValidationStringency.java b/src/main/java/htsjdk/samtools/ValidationStringency.java
similarity index 100%
rename from src/java/htsjdk/samtools/ValidationStringency.java
rename to src/main/java/htsjdk/samtools/ValidationStringency.java
diff --git a/src/java/htsjdk/samtools/apps/TimeChannel.java b/src/main/java/htsjdk/samtools/apps/TimeChannel.java
similarity index 100%
rename from src/java/htsjdk/samtools/apps/TimeChannel.java
rename to src/main/java/htsjdk/samtools/apps/TimeChannel.java
diff --git a/src/java/htsjdk/samtools/apps/TimeRandomAccessFile.java b/src/main/java/htsjdk/samtools/apps/TimeRandomAccessFile.java
similarity index 100%
rename from src/java/htsjdk/samtools/apps/TimeRandomAccessFile.java
rename to src/main/java/htsjdk/samtools/apps/TimeRandomAccessFile.java
diff --git a/src/main/java/htsjdk/samtools/cram/CRAIEntry.java b/src/main/java/htsjdk/samtools/cram/CRAIEntry.java
new file mode 100644
index 0000000..dd22a00
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/cram/CRAIEntry.java
@@ -0,0 +1,195 @@
+package htsjdk.samtools.cram;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.cram.structure.Container;
+import htsjdk.samtools.cram.structure.Slice;
+import htsjdk.samtools.util.RuntimeIOException;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * A class representing CRAI index entry: file and alignment offsets for each slice.
+ * Created by vadim on 10/08/2015.
+ */
+public class CRAIEntry implements Comparable<CRAIEntry>, Cloneable {
+    public int sequenceId;
+    public int alignmentStart;
+    public int alignmentSpan;
+    public long containerStartOffset;
+    public int sliceOffset;
+    public int sliceSize;
+    public int sliceIndex;
+
+    private static int CRAI_INDEX_COLUMNS = 6;
+    private static String entryFormat = "%d\t%d\t%d\t%d\t%d\t%d";
+
+    public CRAIEntry() {
+    }
+
+    /**
+     * Create a CRAI Entry from a serialized CRAI index line.
+     *
+     * @param line string formatted as a CRAI index entry
+     * @throws CRAIIndex.CRAIIndexException
+     */
+    public CRAIEntry(final String line) throws CRAIIndex.CRAIIndexException {
+        final String[] chunks = line.split("\t");
+        if (chunks.length != CRAI_INDEX_COLUMNS) {
+            throw new CRAIIndex.CRAIIndexException(
+                    "Malformed CRAI index entry: expecting " + CRAI_INDEX_COLUMNS + " columns but got " + chunks.length);
+        }
+
+        try {
+            sequenceId = Integer.parseInt(chunks[0]);
+            alignmentStart = Integer.parseInt(chunks[1]);
+            alignmentSpan = Integer.parseInt(chunks[2]);
+            containerStartOffset = Long.parseLong(chunks[3]);
+            sliceOffset = Integer.parseInt(chunks[4]);
+            sliceSize = Integer.parseInt(chunks[5]);
+        } catch (final NumberFormatException e) {
+            throw new CRAIIndex.CRAIIndexException(e);
+        }
+    }
+
+    /**
+     * Serialize the entry to a CRAI index stream.
+     * @param os stream to write to
+     */
+    public void writeToStream(OutputStream os) {
+        try {
+            os.write(serializeToString().getBytes());
+            os.write('\n');
+        }
+        catch (IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    /**
+     * Format the entry as a string suitable for serialization in the CRAI index
+     */
+    private String serializeToString() {
+        return String.format(entryFormat,
+                sequenceId, alignmentStart, alignmentSpan,
+                containerStartOffset, sliceOffset, sliceSize);
+    }
+
+    @Override
+    public String toString() { return serializeToString(); }
+
+    public static List<CRAIEntry> fromContainer(final Container container) {
+        final List<CRAIEntry> entries = new ArrayList<>(container.slices.length);
+        for (int i = 0; i < container.slices.length; i++) {
+            final Slice s = container.slices[i];
+            final CRAIEntry e = new CRAIEntry();
+            e.sequenceId = s.sequenceId;
+            e.alignmentStart = s.alignmentStart;
+            e.alignmentSpan = s.alignmentSpan;
+            e.containerStartOffset = s.containerOffset;
+            e.sliceOffset = container.landmarks[i];
+            e.sliceSize = s.size;
+
+            e.sliceIndex = i;
+            entries.add(e);
+        }
+        return entries;
+    }
+
+    @Override
+    public int compareTo(final CRAIEntry o) {
+        if (o == null) {
+            return 1;
+        }
+        if (sequenceId != o.sequenceId) {
+            return sequenceId - o.sequenceId;
+        }
+        if (alignmentStart != o.alignmentStart) {
+            return alignmentStart - o.alignmentStart;
+        }
+
+        return (int) (containerStartOffset - o.containerStartOffset);
+    }
+
+    @Override
+    public CRAIEntry clone() throws CloneNotSupportedException {
+        super.clone();
+        final CRAIEntry entry = new CRAIEntry();
+        entry.sequenceId = sequenceId;
+        entry.alignmentStart = alignmentStart;
+        entry.alignmentSpan = alignmentSpan;
+        entry.containerStartOffset = containerStartOffset;
+        entry.sliceOffset = sliceOffset;
+        entry.sliceSize = sliceSize;
+        return entry;
+    }
+
+    public static Comparator<CRAIEntry> byEnd = new Comparator<CRAIEntry>() {
+
+        @Override
+        public int compare(final CRAIEntry o1, final CRAIEntry o2) {
+            if (o1.sequenceId != o2.sequenceId) {
+                return o2.sequenceId - o1.sequenceId;
+            }
+            if (o1.alignmentStart + o1.alignmentSpan != o2.alignmentStart + o2.alignmentSpan) {
+                return o1.alignmentStart + o1.alignmentSpan - o2.alignmentStart - o2.alignmentSpan;
+            }
+
+            return (int) (o1.containerStartOffset - o2.containerStartOffset);
+        }
+    };
+
+    public static final Comparator<CRAIEntry> byStart = new Comparator<CRAIEntry>() {
+
+        @Override
+        public int compare(final CRAIEntry o1, final CRAIEntry o2) {
+            if (o1.sequenceId != o2.sequenceId) {
+                return o2.sequenceId - o1.sequenceId;
+            }
+            if (o1.alignmentStart != o2.alignmentStart) {
+                return o1.alignmentStart - o2.alignmentStart;
+            }
+
+            return (int) (o1.containerStartOffset - o2.containerStartOffset);
+        }
+    };
+
+    public static Comparator<CRAIEntry> byStartDesc = new Comparator<CRAIEntry>() {
+
+        @Override
+        public int compare(CRAIEntry o1, CRAIEntry o2) {
+            if (o1.sequenceId != o2.sequenceId) {
+                if (o1.sequenceId == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX)
+                    return 1;
+                if (o2.sequenceId == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX)
+                    return -1;
+                return -o2.sequenceId + o1.sequenceId;
+            }
+            if (o1.alignmentStart != o2.alignmentStart)
+                return o1.alignmentStart - o2.alignmentStart;
+
+            return (int) (o1.containerStartOffset - o2.containerStartOffset);
+        }
+    };
+
+    public static boolean intersect(final CRAIEntry e0, final CRAIEntry e1) {
+        if (e0.sequenceId != e1.sequenceId) {
+            return false;
+        }
+        if (e0.sequenceId < 0) {
+            return false;
+        }
+
+        final int a0 = e0.alignmentStart;
+        final int a1 = e1.alignmentStart;
+
+        final int b0 = a0 + e0.alignmentSpan;
+        final int b1 = a1 + e1.alignmentSpan;
+
+        return Math.abs(a0 + b0 - a1 - b1) < (e0.alignmentSpan + e1.alignmentSpan);
+
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/cram/CRAIIndex.java b/src/main/java/htsjdk/samtools/cram/CRAIIndex.java
new file mode 100644
index 0000000..256b35e
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/cram/CRAIIndex.java
@@ -0,0 +1,267 @@
+package htsjdk.samtools.cram;
+
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.CRAMBAIIndexer;
+import htsjdk.samtools.CRAMCRAIIndexer;
+import htsjdk.samtools.cram.encoding.reader.DataReaderFactory;
+import htsjdk.samtools.cram.encoding.reader.RefSeqIdReader;
+import htsjdk.samtools.cram.io.DefaultBitInputStream;
+import htsjdk.samtools.cram.structure.*;
+import htsjdk.samtools.seekablestream.SeekableMemoryStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.ValidationStringency;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import java.util.List;
+
+/**
+ * CRAI index used for CRAM files.
+ */
+public class CRAIIndex {
+    public static final String CRAI_INDEX_SUFFIX = ".crai";
+    final private List<CRAIEntry> entries = new ArrayList<>();
+
+    /**
+     * Add a single entry to the CRAI index.
+     * @param entry entry to be added
+     */
+    public void addEntry(CRAIEntry entry) {
+        entries.add(entry);
+    }
+
+    // This is used for testing and should be removed when there are no more
+    // consumers that know about the internal structure of a CRAI
+    public List<CRAIEntry> getCRAIEntries() {
+        return entries;
+    }
+
+    /**
+     * Write out the index to an output stream;
+     * @param os Stream to write index to
+     */
+    public void writeIndex(final OutputStream os) {
+        Collections.sort(entries, CRAIEntry.byStartDesc);
+        entries.stream().forEach(e -> e.writeToStream(os));
+    }
+
+    /**
+     * Create index entries for a single container.
+     * @param c the container to index
+     */
+    public void processContainer(final Container c) {
+        // TODO: this should be refactored and delegate to container/slice
+        if (!c.isEOF()) {
+            for (int i = 0; i < c.slices.length; i++) {
+                Slice s = c.slices[i];
+                if (s.sequenceId == Slice.MULTI_REFERENCE) {
+                    this.entries.addAll(getCRAIEntriesForMultiRefSlice(s, c.header, c.offset, c.landmarks));
+                }
+                else {
+                    CRAIEntry e = new CRAIEntry();
+
+                    e.sequenceId = c.sequenceId;
+                    e.alignmentStart = s.alignmentStart;
+                    e.alignmentSpan = s.alignmentSpan;
+                    e.containerStartOffset = c.offset;
+                    e.sliceOffset = c.landmarks[i];
+                    e.sliceSize = s.size;
+                    e.sliceIndex = i;
+
+                    entries.add(e);
+                }
+            }
+        }
+    }
+
+    /**
+     * Return a list of CRAI Entries; one for each reference in the multireference slice.
+     * TODO: this should be refactored and delegate to container/slice
+     */
+    private static Collection<CRAIEntry> getCRAIEntriesForMultiRefSlice(
+            final Slice slice,
+            final CompressionHeader header,
+            final long containerOffset,
+            final int[] landmarks)
+    {
+        final DataReaderFactory dataReaderFactory = new DataReaderFactory();
+        final Map<Integer, InputStream> inputMap = new HashMap<>();
+        for (final Integer exId : slice.external.keySet()) {
+            inputMap.put(exId, new ByteArrayInputStream(slice.external.get(exId).getRawContent()));
+        }
+
+        final RefSeqIdReader reader = new RefSeqIdReader(
+                slice.sequenceId,
+                slice.alignmentStart,
+                ValidationStringency.DEFAULT_STRINGENCY);
+        dataReaderFactory.buildReader(
+                reader,
+                new DefaultBitInputStream(new ByteArrayInputStream(slice.coreBlock.getRawContent())),
+                inputMap,
+                header,
+                slice.sequenceId
+        );
+        reader.APDelta = header.APDelta;
+
+        for (int i = 0; i < slice.nofRecords; i++) {
+            final CramCompressionRecord record = new CramCompressionRecord();
+            record.sliceIndex = slice.index;
+            record.index = i;
+
+            reader.read();
+
+            if (record.sequenceId == slice.sequenceId) {
+                record.sequenceId = slice.sequenceId;
+            }
+            else if (record.sequenceId == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
+                record.sequenceName = SAMRecord.NO_ALIGNMENT_REFERENCE_NAME;
+            }
+        }
+
+        Map<Integer, AlignmentSpan> spans = reader.getReferenceSpans();
+        List<CRAIEntry> entries = new ArrayList<>(spans.size());
+        for (int seqId : spans.keySet()) {
+            CRAIEntry e = new CRAIEntry();
+            e.sequenceId = seqId;
+            AlignmentSpan span = spans.get(seqId);
+            e.alignmentStart = span.getStart();
+            e.alignmentSpan = span.getSpan();
+            e.sliceSize = slice.size;
+            e.sliceIndex = slice.index;
+            e.containerStartOffset = containerOffset;
+            e.sliceOffset = landmarks[slice.index];
+
+            entries.add(e);
+        }
+
+        return entries;
+    }
+
+    public static SeekableStream openCraiFileAsBaiStream(final File cramIndexFile, final SAMSequenceDictionary dictionary) throws IOException {
+        return openCraiFileAsBaiStream(new FileInputStream(cramIndexFile), dictionary);
+    }
+
+    public static SeekableStream openCraiFileAsBaiStream(final InputStream indexStream, final SAMSequenceDictionary dictionary) throws IOException, CRAIIndexException {
+        final List<CRAIEntry> full = CRAMCRAIIndexer.readIndex(indexStream).getCRAIEntries();
+        Collections.sort(full);
+
+        final SAMFileHeader header = new SAMFileHeader();
+        header.setSequenceDictionary(dictionary);
+
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        final CRAMBAIIndexer indexer = new CRAMBAIIndexer(baos, header);
+
+        for (final CRAIEntry entry : full) {
+            final Slice slice = new Slice();
+            slice.containerOffset = entry.containerStartOffset;
+            slice.alignmentStart = entry.alignmentStart;
+            slice.alignmentSpan = entry.alignmentSpan;
+            slice.sequenceId = entry.sequenceId;
+            // https://github.com/samtools/htsjdk/issues/531
+            // entry.sliceSize is the slice size in bytes, not the number of
+            // records; this results in the BAMIndex metadata being wrong
+            slice.nofRecords = entry.sliceSize;
+            slice.index = entry.sliceIndex;
+            slice.offset = entry.sliceOffset;
+
+            indexer.processSingleReferenceSlice(slice);
+        }
+        indexer.finish();
+
+        return new SeekableMemoryStream(baos.toByteArray(), "CRAI to BAI converter");
+    }
+
+    public static List<CRAIEntry> find(final List<CRAIEntry> list, final int seqId, final int start, final int span) {
+        final boolean whole = start < 1 || span < 1;
+        final CRAIEntry query = new CRAIEntry();
+        query.sequenceId = seqId;
+        query.alignmentStart = start < 1 ? 1 : start;
+        query.alignmentSpan = span < 1 ? Integer.MAX_VALUE : span;
+        query.containerStartOffset = Long.MAX_VALUE;
+        query.sliceOffset = Integer.MAX_VALUE;
+        query.sliceSize = Integer.MAX_VALUE;
+
+        final List<CRAIEntry> l = new ArrayList<>();
+        for (final CRAIEntry e : list) {
+            if (e.sequenceId != seqId) {
+                continue;
+            }
+            if (whole || CRAIEntry.intersect(e, query)) {
+                l.add(e);
+            }
+        }
+        Collections.sort(l, CRAIEntry.byStart);
+        return l;
+    }
+
+    public static CRAIEntry getLeftmost(final List<CRAIEntry> list) {
+        if (list == null || list.isEmpty()) {
+            return null;
+        }
+        CRAIEntry left = list.get(0);
+
+        for (final CRAIEntry e : list) {
+            if (e.alignmentStart < left.alignmentStart) {
+                left = e;
+            }
+        }
+
+        return left;
+    }
+
+    /**
+     * Find index of the last aligned entry in the list. Assumes the index is sorted by coordinate and unmapped entries (with sequence id = -1) follow the mapped entries.
+     *
+     * @param list a list of CRAI entries
+     * @return integer index of the last entry with sequence id not equal to -1
+     */
+    public static int findLastAlignedEntry(final List<CRAIEntry> list) {
+        if (list.isEmpty()) {
+            return -1;
+        }
+
+        int low = 0;
+        int high = list.size() - 1;
+
+        while (low <= high) {
+            final int mid = (low + high) >>> 1;
+            final CRAIEntry midVal = list.get(mid);
+
+            if (midVal.sequenceId >= 0) {
+                low = mid + 1;
+            } else {
+                high = mid - 1;
+            }
+        }
+        if (low >= list.size()) {
+            return list.size() - 1;
+        }
+        for (; low >= 0 && list.get(low).sequenceId == -1; low--) {
+        }
+        return low;
+    }
+
+    public static class CRAIIndexException extends RuntimeException {
+
+        public CRAIIndexException(final String s) {
+            super(s);
+        }
+
+        public CRAIIndexException(final NumberFormatException e) {
+            super(e);
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/cram/CRAMException.java b/src/main/java/htsjdk/samtools/cram/CRAMException.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/CRAMException.java
rename to src/main/java/htsjdk/samtools/cram/CRAMException.java
diff --git a/src/java/htsjdk/samtools/cram/build/CompressionHeaderFactory.java b/src/main/java/htsjdk/samtools/cram/build/CompressionHeaderFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/CompressionHeaderFactory.java
rename to src/main/java/htsjdk/samtools/cram/build/CompressionHeaderFactory.java
diff --git a/src/java/htsjdk/samtools/cram/build/ContainerFactory.java b/src/main/java/htsjdk/samtools/cram/build/ContainerFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/ContainerFactory.java
rename to src/main/java/htsjdk/samtools/cram/build/ContainerFactory.java
diff --git a/src/java/htsjdk/samtools/cram/build/ContainerParser.java b/src/main/java/htsjdk/samtools/cram/build/ContainerParser.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/ContainerParser.java
rename to src/main/java/htsjdk/samtools/cram/build/ContainerParser.java
diff --git a/src/java/htsjdk/samtools/cram/build/Cram2SamRecordFactory.java b/src/main/java/htsjdk/samtools/cram/build/Cram2SamRecordFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/Cram2SamRecordFactory.java
rename to src/main/java/htsjdk/samtools/cram/build/Cram2SamRecordFactory.java
diff --git a/src/java/htsjdk/samtools/cram/build/CramContainerIterator.java b/src/main/java/htsjdk/samtools/cram/build/CramContainerIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/CramContainerIterator.java
rename to src/main/java/htsjdk/samtools/cram/build/CramContainerIterator.java
diff --git a/src/java/htsjdk/samtools/cram/build/CramIO.java b/src/main/java/htsjdk/samtools/cram/build/CramIO.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/CramIO.java
rename to src/main/java/htsjdk/samtools/cram/build/CramIO.java
diff --git a/src/java/htsjdk/samtools/cram/build/CramNormalizer.java b/src/main/java/htsjdk/samtools/cram/build/CramNormalizer.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/CramNormalizer.java
rename to src/main/java/htsjdk/samtools/cram/build/CramNormalizer.java
diff --git a/src/java/htsjdk/samtools/cram/build/CramSpanContainerIterator.java b/src/main/java/htsjdk/samtools/cram/build/CramSpanContainerIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/CramSpanContainerIterator.java
rename to src/main/java/htsjdk/samtools/cram/build/CramSpanContainerIterator.java
diff --git a/src/java/htsjdk/samtools/cram/build/Sam2CramRecordFactory.java b/src/main/java/htsjdk/samtools/cram/build/Sam2CramRecordFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/Sam2CramRecordFactory.java
rename to src/main/java/htsjdk/samtools/cram/build/Sam2CramRecordFactory.java
diff --git a/src/java/htsjdk/samtools/cram/build/Utils.java b/src/main/java/htsjdk/samtools/cram/build/Utils.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/build/Utils.java
rename to src/main/java/htsjdk/samtools/cram/build/Utils.java
diff --git a/src/java/htsjdk/samtools/cram/common/CramVersionPolicies.java b/src/main/java/htsjdk/samtools/cram/common/CramVersionPolicies.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/common/CramVersionPolicies.java
rename to src/main/java/htsjdk/samtools/cram/common/CramVersionPolicies.java
diff --git a/src/main/java/htsjdk/samtools/cram/common/CramVersions.java b/src/main/java/htsjdk/samtools/cram/common/CramVersions.java
new file mode 100644
index 0000000..26d5685
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/cram/common/CramVersions.java
@@ -0,0 +1,11 @@
+package htsjdk.samtools.cram.common;
+
+public class CramVersions {
+    public static final Version CRAM_v2_1 = new Version(2, 1, 0);
+    public static final Version CRAM_v3 = new Version(3, 0, 0);
+
+    /**
+     * The default CRAM version when creating a new CRAM output file or stream.
+     */
+    public static final Version DEFAULT_CRAM_VERSION = CRAM_v3;
+}
diff --git a/src/java/htsjdk/samtools/cram/common/IntHashMap.java b/src/main/java/htsjdk/samtools/cram/common/IntHashMap.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/common/IntHashMap.java
rename to src/main/java/htsjdk/samtools/cram/common/IntHashMap.java
diff --git a/src/java/htsjdk/samtools/cram/common/MutableInt.java b/src/main/java/htsjdk/samtools/cram/common/MutableInt.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/common/MutableInt.java
rename to src/main/java/htsjdk/samtools/cram/common/MutableInt.java
diff --git a/src/java/htsjdk/samtools/cram/common/Version.java b/src/main/java/htsjdk/samtools/cram/common/Version.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/common/Version.java
rename to src/main/java/htsjdk/samtools/cram/common/Version.java
diff --git a/src/java/htsjdk/samtools/cram/digest/AbstractSerialDigest.java b/src/main/java/htsjdk/samtools/cram/digest/AbstractSerialDigest.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/digest/AbstractSerialDigest.java
rename to src/main/java/htsjdk/samtools/cram/digest/AbstractSerialDigest.java
diff --git a/src/java/htsjdk/samtools/cram/digest/ByteSumCombine.java b/src/main/java/htsjdk/samtools/cram/digest/ByteSumCombine.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/digest/ByteSumCombine.java
rename to src/main/java/htsjdk/samtools/cram/digest/ByteSumCombine.java
diff --git a/src/java/htsjdk/samtools/cram/digest/Combine.java b/src/main/java/htsjdk/samtools/cram/digest/Combine.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/digest/Combine.java
rename to src/main/java/htsjdk/samtools/cram/digest/Combine.java
diff --git a/src/java/htsjdk/samtools/cram/digest/ContentDigests.java b/src/main/java/htsjdk/samtools/cram/digest/ContentDigests.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/digest/ContentDigests.java
rename to src/main/java/htsjdk/samtools/cram/digest/ContentDigests.java
diff --git a/src/java/htsjdk/samtools/cram/digest/Crc32Hasher.java b/src/main/java/htsjdk/samtools/cram/digest/Crc32Hasher.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/digest/Crc32Hasher.java
rename to src/main/java/htsjdk/samtools/cram/digest/Crc32Hasher.java
diff --git a/src/java/htsjdk/samtools/cram/digest/IntegerSumCombine.java b/src/main/java/htsjdk/samtools/cram/digest/IntegerSumCombine.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/digest/IntegerSumCombine.java
rename to src/main/java/htsjdk/samtools/cram/digest/IntegerSumCombine.java
diff --git a/src/java/htsjdk/samtools/cram/digest/MessageDigestHasher.java b/src/main/java/htsjdk/samtools/cram/digest/MessageDigestHasher.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/digest/MessageDigestHasher.java
rename to src/main/java/htsjdk/samtools/cram/digest/MessageDigestHasher.java
diff --git a/src/java/htsjdk/samtools/cram/digest/SERIES.java b/src/main/java/htsjdk/samtools/cram/digest/SERIES.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/digest/SERIES.java
rename to src/main/java/htsjdk/samtools/cram/digest/SERIES.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/AbstractBitCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/AbstractBitCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/AbstractBitCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/AbstractBitCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/BetaIntegerCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/BetaIntegerCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/BetaIntegerCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/BetaIntegerCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/BetaIntegerEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/BetaIntegerEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/BetaIntegerEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/BetaIntegerEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/BitCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/BitCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/BitCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/BitCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ByteArrayLenEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayLenEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ByteArrayLenEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ByteArrayLenEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ByteArrayStopEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayStopEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ByteArrayStopEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ByteArrayStopEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/DataSeries.java b/src/main/java/htsjdk/samtools/cram/encoding/DataSeries.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/DataSeries.java
rename to src/main/java/htsjdk/samtools/cram/encoding/DataSeries.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/DataSeriesMap.java b/src/main/java/htsjdk/samtools/cram/encoding/DataSeriesMap.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/DataSeriesMap.java
rename to src/main/java/htsjdk/samtools/cram/encoding/DataSeriesMap.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/DataSeriesType.java b/src/main/java/htsjdk/samtools/cram/encoding/DataSeriesType.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/DataSeriesType.java
rename to src/main/java/htsjdk/samtools/cram/encoding/DataSeriesType.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/Encoding.java b/src/main/java/htsjdk/samtools/cram/encoding/Encoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/Encoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/Encoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/EncodingFactory.java b/src/main/java/htsjdk/samtools/cram/encoding/EncodingFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/EncodingFactory.java
rename to src/main/java/htsjdk/samtools/cram/encoding/EncodingFactory.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalByteArrayCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteArrayCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalByteArrayCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalByteArrayCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalByteArrayEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteArrayEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalByteArrayEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalByteArrayEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalByteCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalByteCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalByteCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalByteEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalByteEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalByteEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalCompressor.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalCompressor.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalCompressor.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalCompressor.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalIntegerCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalIntegerCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalIntegerCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalIntegerCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalIntegerEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalIntegerEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalIntegerEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalIntegerEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalLongCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalLongCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalLongCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalLongCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/ExternalLongEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalLongEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/ExternalLongEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/ExternalLongEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/GammaIntegerCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/GammaIntegerCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/GammaIntegerCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/GammaIntegerCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/GammaIntegerEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/GammaIntegerEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/GammaIntegerEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/GammaIntegerEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/GolombIntegerCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/GolombIntegerCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/GolombIntegerCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/GolombIntegerCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/GolombIntegerEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/GolombIntegerEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/GolombIntegerEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/GolombIntegerEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/GolombLongCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/GolombLongCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/GolombLongCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/GolombLongCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/GolombLongEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/GolombLongEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/GolombLongEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/GolombLongEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/NullCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/NullCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/NullCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/NullCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/NullEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/NullEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/NullEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/NullEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/SubexponentialIntegerCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/SubexponentialIntegerCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/SubexponentialIntegerCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/SubexponentialIntegerCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/SubexponentialIntegerEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/SubexponentialIntegerEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/SubexponentialIntegerEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/SubexponentialIntegerEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/HuffmanCode.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanCode.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/HuffmanCode.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanCode.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/HuffmanLeaf.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanLeaf.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/HuffmanLeaf.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanLeaf.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/HuffmanNode.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanNode.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/HuffmanNode.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanNode.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/HuffmanTree.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanTree.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/HuffmanTree.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanTree.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/codec/CanonicalHuffmanByteCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/CanonicalHuffmanByteCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/codec/CanonicalHuffmanByteCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/CanonicalHuffmanByteCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/codec/CanonicalHuffmanIntegerCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/CanonicalHuffmanIntegerCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/codec/CanonicalHuffmanIntegerCodec.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/CanonicalHuffmanIntegerCodec.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanBitCode.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanBitCode.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanBitCode.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanBitCode.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanByteEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanByteEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanByteEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanByteEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanByteHelper.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanByteHelper.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanByteHelper.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanByteHelper.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanIntHelper.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanIntHelper.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanIntHelper.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanIntHelper.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanIntegerEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanIntegerEncoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanIntegerEncoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanIntegerEncoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanParamsCalculator.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanParamsCalculator.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanParamsCalculator.java
rename to src/main/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanParamsCalculator.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/Constants.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/Constants.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/Constants.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/Constants.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/D04.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/D04.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/D04.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/D04.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/D14.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/D14.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/D14.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/D14.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/Decoding.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/Decoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/Decoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/Decoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/E04.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/E04.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/E04.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/E04.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/E14.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/E14.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/E14.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/E14.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/Encoding.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/Encoding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/Encoding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/Encoding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/Frequencies.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/Frequencies.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/Frequencies.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/Frequencies.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/RANS.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/RANS.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/RANS.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/RANS.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/rans/Utils.java b/src/main/java/htsjdk/samtools/cram/encoding/rans/Utils.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/rans/Utils.java
rename to src/main/java/htsjdk/samtools/cram/encoding/rans/Utils.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/reader/AbstractReader.java b/src/main/java/htsjdk/samtools/cram/encoding/reader/AbstractReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/reader/AbstractReader.java
rename to src/main/java/htsjdk/samtools/cram/encoding/reader/AbstractReader.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/reader/CramRecordReader.java b/src/main/java/htsjdk/samtools/cram/encoding/reader/CramRecordReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/reader/CramRecordReader.java
rename to src/main/java/htsjdk/samtools/cram/encoding/reader/CramRecordReader.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/reader/DataReader.java b/src/main/java/htsjdk/samtools/cram/encoding/reader/DataReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/reader/DataReader.java
rename to src/main/java/htsjdk/samtools/cram/encoding/reader/DataReader.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/reader/DataReaderFactory.java b/src/main/java/htsjdk/samtools/cram/encoding/reader/DataReaderFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/reader/DataReaderFactory.java
rename to src/main/java/htsjdk/samtools/cram/encoding/reader/DataReaderFactory.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/reader/RefSeqIdReader.java b/src/main/java/htsjdk/samtools/cram/encoding/reader/RefSeqIdReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/reader/RefSeqIdReader.java
rename to src/main/java/htsjdk/samtools/cram/encoding/reader/RefSeqIdReader.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/BaseQualityScore.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/BaseQualityScore.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/BaseQualityScore.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/BaseQualityScore.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/Bases.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Bases.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/Bases.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Bases.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/Deletion.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Deletion.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/Deletion.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Deletion.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/HardClip.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/HardClip.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/HardClip.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/HardClip.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/InsertBase.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/InsertBase.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/InsertBase.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/InsertBase.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/Insertion.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Insertion.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/Insertion.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Insertion.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/Padding.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Padding.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/Padding.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Padding.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/ReadBase.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/ReadBase.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/ReadBase.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/ReadBase.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/ReadFeature.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/ReadFeature.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/ReadFeature.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/ReadFeature.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/RefSkip.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/RefSkip.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/RefSkip.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/RefSkip.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/Scores.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Scores.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/Scores.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Scores.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/SoftClip.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/SoftClip.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/SoftClip.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/SoftClip.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/readfeatures/Substitution.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Substitution.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/readfeatures/Substitution.java
rename to src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Substitution.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/writer/DataWriter.java b/src/main/java/htsjdk/samtools/cram/encoding/writer/DataWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/writer/DataWriter.java
rename to src/main/java/htsjdk/samtools/cram/encoding/writer/DataWriter.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/writer/DataWriterFactory.java b/src/main/java/htsjdk/samtools/cram/encoding/writer/DataWriterFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/writer/DataWriterFactory.java
rename to src/main/java/htsjdk/samtools/cram/encoding/writer/DataWriterFactory.java
diff --git a/src/java/htsjdk/samtools/cram/encoding/writer/Writer.java b/src/main/java/htsjdk/samtools/cram/encoding/writer/Writer.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/encoding/writer/Writer.java
rename to src/main/java/htsjdk/samtools/cram/encoding/writer/Writer.java
diff --git a/src/java/htsjdk/samtools/cram/io/BitInputStream.java b/src/main/java/htsjdk/samtools/cram/io/BitInputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/BitInputStream.java
rename to src/main/java/htsjdk/samtools/cram/io/BitInputStream.java
diff --git a/src/java/htsjdk/samtools/cram/io/BitOutputStream.java b/src/main/java/htsjdk/samtools/cram/io/BitOutputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/BitOutputStream.java
rename to src/main/java/htsjdk/samtools/cram/io/BitOutputStream.java
diff --git a/src/java/htsjdk/samtools/cram/io/CRC32InputStream.java b/src/main/java/htsjdk/samtools/cram/io/CRC32InputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/CRC32InputStream.java
rename to src/main/java/htsjdk/samtools/cram/io/CRC32InputStream.java
diff --git a/src/java/htsjdk/samtools/cram/io/CRC32OutputStream.java b/src/main/java/htsjdk/samtools/cram/io/CRC32OutputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/CRC32OutputStream.java
rename to src/main/java/htsjdk/samtools/cram/io/CRC32OutputStream.java
diff --git a/src/java/htsjdk/samtools/cram/io/CountingInputStream.java b/src/main/java/htsjdk/samtools/cram/io/CountingInputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/CountingInputStream.java
rename to src/main/java/htsjdk/samtools/cram/io/CountingInputStream.java
diff --git a/src/java/htsjdk/samtools/cram/io/CramArray.java b/src/main/java/htsjdk/samtools/cram/io/CramArray.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/CramArray.java
rename to src/main/java/htsjdk/samtools/cram/io/CramArray.java
diff --git a/src/java/htsjdk/samtools/cram/io/CramInt.java b/src/main/java/htsjdk/samtools/cram/io/CramInt.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/CramInt.java
rename to src/main/java/htsjdk/samtools/cram/io/CramInt.java
diff --git a/src/java/htsjdk/samtools/cram/io/DefaultBitInputStream.java b/src/main/java/htsjdk/samtools/cram/io/DefaultBitInputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/DefaultBitInputStream.java
rename to src/main/java/htsjdk/samtools/cram/io/DefaultBitInputStream.java
diff --git a/src/java/htsjdk/samtools/cram/io/DefaultBitOutputStream.java b/src/main/java/htsjdk/samtools/cram/io/DefaultBitOutputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/DefaultBitOutputStream.java
rename to src/main/java/htsjdk/samtools/cram/io/DefaultBitOutputStream.java
diff --git a/src/java/htsjdk/samtools/cram/io/ExposedByteArrayOutputStream.java b/src/main/java/htsjdk/samtools/cram/io/ExposedByteArrayOutputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/ExposedByteArrayOutputStream.java
rename to src/main/java/htsjdk/samtools/cram/io/ExposedByteArrayOutputStream.java
diff --git a/src/main/java/htsjdk/samtools/cram/io/ExternalCompression.java b/src/main/java/htsjdk/samtools/cram/io/ExternalCompression.java
new file mode 100644
index 0000000..4905a96
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/cram/io/ExternalCompression.java
@@ -0,0 +1,152 @@
+package htsjdk.samtools.cram.io;
+
+import htsjdk.samtools.cram.encoding.rans.RANS;
+import htsjdk.samtools.util.IOUtil;
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
+import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
+import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * Methods to provide CRAM external compression/decompression features.
+ */
+public class ExternalCompression {
+    private static final int GZIP_COMPRESSION_LEVEL = Integer.valueOf(System.getProperty("gzip.compression.level", "5"));
+
+    /**
+     * Compress a byte array into GZIP blob. The method obeys {@link ExternalCompression#GZIP_COMPRESSION_LEVEL} compression level.
+     *
+     * @param data byte array to compress
+     * @return compressed blob
+     */
+    public static byte[] gzip(final byte[] data) throws IOException {
+        final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+        final GZIPOutputStream gos = new GZIPOutputStream(byteArrayOutputStream) {
+            {
+                def.setLevel(GZIP_COMPRESSION_LEVEL);
+            }
+        };
+        IOUtil.copyStream(new ByteArrayInputStream(data), gos);
+        gos.close();
+
+        return byteArrayOutputStream.toByteArray();
+    }
+
+    /**
+     * Uncompress a GZIP data blob into a new byte array.
+     *
+     * @param data compressed data blob
+     * @return uncompressed data
+     * @throws IOException as per java IO contract
+     */
+    public static byte[] gunzip(final byte[] data) throws IOException {
+        final GZIPInputStream gzipInputStream = new GZIPInputStream(new ByteArrayInputStream(data));
+        return InputStreamUtils.readFully(gzipInputStream);
+    }
+
+    /**
+     * Compress a byte array into BZIP2 blob.
+     *
+     * @param data byte array to compress
+     * @return compressed blob
+     */
+    public static byte[] bzip2(final byte[] data) throws IOException {
+        final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+        final BZip2CompressorOutputStream bos = new BZip2CompressorOutputStream(byteArrayOutputStream);
+        IOUtil.copyStream(new ByteArrayInputStream(data), bos);
+        bos.close();
+        return byteArrayOutputStream.toByteArray();
+    }
+
+    /**
+     * Uncompress a BZIP2 data blob into a new byte array.
+     *
+     * @param data compressed data blob
+     * @return uncompressed data
+     * @throws IOException as per java IO contract
+     */
+    @SuppressWarnings("ResultOfMethodCallIgnored")
+    public static byte[] unbzip2(final byte[] data) throws IOException {
+        final ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(data);
+        return InputStreamUtils.readFully(new BZip2CompressorInputStream(byteArrayInputStream));
+    }
+
+    /**
+     * Compress a byte array into rANS blob.
+     *
+     * @param data  byte array to compress
+     * @param order rANS order
+     * @return compressed blob
+     */
+    public static byte[] rans(final byte[] data, final RANS.ORDER order) {
+        final ByteBuffer buffer = RANS.compress(ByteBuffer.wrap(data), order, null);
+        return toByteArray(buffer);
+    }
+
+    /**
+     * Compress a byte array into rANS blob.
+     *
+     * @param data  byte array to compress
+     * @param order rANS order
+     * @return compressed blob
+     */
+    public static byte[] rans(final byte[] data, final int order) {
+        final ByteBuffer buffer = RANS.compress(ByteBuffer.wrap(data), RANS.ORDER.fromInt(order), null);
+        return toByteArray(buffer);
+    }
+
+    /**
+     * Uncompress a rANS data blob into a new byte array.
+     *
+     * @param data compressed data blob
+     * @return uncompressed data
+     */
+    public static byte[] unrans(final byte[] data) {
+        final ByteBuffer buf = RANS.uncompress(ByteBuffer.wrap(data), null);
+        return toByteArray(buf);
+    }
+
+
+    /**
+     * Compress a byte array into XZ blob.
+     *
+     * @param data byte array to compress
+     * @return compressed blob
+     */
+    public static byte[] xz(final byte[] data) throws IOException {
+        final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(data.length * 2);
+        final XZCompressorOutputStream xzCompressorOutputStream = new XZCompressorOutputStream(byteArrayOutputStream);
+        xzCompressorOutputStream.write(data);
+        xzCompressorOutputStream.close();
+        return byteArrayOutputStream.toByteArray();
+    }
+
+
+    /**
+     * Uncompress a XZ data blob into a new byte array.
+     *
+     * @param data compressed data blob
+     * @return uncompressed data
+     * @throws IOException as per java IO contract
+     */
+    public static byte[] unxz(final byte[] data) throws IOException {
+        final XZCompressorInputStream xzCompressorInputStream = new XZCompressorInputStream(new ByteArrayInputStream(data));
+        return InputStreamUtils.readFully(xzCompressorInputStream);
+    }
+
+
+    private static byte[] toByteArray(final ByteBuffer buffer) {
+        if (buffer.hasArray() && buffer.arrayOffset() == 0 && buffer.array().length == buffer.limit()) return buffer.array();
+
+        final byte[] bytes = new byte[buffer.remaining()];
+        buffer.get(bytes);
+        return bytes;
+    }
+}
diff --git a/src/java/htsjdk/samtools/cram/io/ITF8.java b/src/main/java/htsjdk/samtools/cram/io/ITF8.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/ITF8.java
rename to src/main/java/htsjdk/samtools/cram/io/ITF8.java
diff --git a/src/java/htsjdk/samtools/cram/io/InputStreamUtils.java b/src/main/java/htsjdk/samtools/cram/io/InputStreamUtils.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/InputStreamUtils.java
rename to src/main/java/htsjdk/samtools/cram/io/InputStreamUtils.java
diff --git a/src/java/htsjdk/samtools/cram/io/LTF8.java b/src/main/java/htsjdk/samtools/cram/io/LTF8.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/io/LTF8.java
rename to src/main/java/htsjdk/samtools/cram/io/LTF8.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/BaseCategory.java b/src/main/java/htsjdk/samtools/cram/lossy/BaseCategory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/BaseCategory.java
rename to src/main/java/htsjdk/samtools/cram/lossy/BaseCategory.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/BaseCategoryType.java b/src/main/java/htsjdk/samtools/cram/lossy/BaseCategoryType.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/BaseCategoryType.java
rename to src/main/java/htsjdk/samtools/cram/lossy/BaseCategoryType.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/Binning.java b/src/main/java/htsjdk/samtools/cram/lossy/Binning.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/Binning.java
rename to src/main/java/htsjdk/samtools/cram/lossy/Binning.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/PreservationPolicy.java b/src/main/java/htsjdk/samtools/cram/lossy/PreservationPolicy.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/PreservationPolicy.java
rename to src/main/java/htsjdk/samtools/cram/lossy/PreservationPolicy.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/QualityScorePreservation.java b/src/main/java/htsjdk/samtools/cram/lossy/QualityScorePreservation.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/QualityScorePreservation.java
rename to src/main/java/htsjdk/samtools/cram/lossy/QualityScorePreservation.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/QualityScoreTreatment.java b/src/main/java/htsjdk/samtools/cram/lossy/QualityScoreTreatment.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/QualityScoreTreatment.java
rename to src/main/java/htsjdk/samtools/cram/lossy/QualityScoreTreatment.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/QualityScoreTreatmentType.java b/src/main/java/htsjdk/samtools/cram/lossy/QualityScoreTreatmentType.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/QualityScoreTreatmentType.java
rename to src/main/java/htsjdk/samtools/cram/lossy/QualityScoreTreatmentType.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/ReadCategory.java b/src/main/java/htsjdk/samtools/cram/lossy/ReadCategory.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/ReadCategory.java
rename to src/main/java/htsjdk/samtools/cram/lossy/ReadCategory.java
diff --git a/src/java/htsjdk/samtools/cram/lossy/ReadCategoryType.java b/src/main/java/htsjdk/samtools/cram/lossy/ReadCategoryType.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/lossy/ReadCategoryType.java
rename to src/main/java/htsjdk/samtools/cram/lossy/ReadCategoryType.java
diff --git a/src/java/htsjdk/samtools/cram/ref/CRAMReferenceSource.java b/src/main/java/htsjdk/samtools/cram/ref/CRAMReferenceSource.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/ref/CRAMReferenceSource.java
rename to src/main/java/htsjdk/samtools/cram/ref/CRAMReferenceSource.java
diff --git a/src/java/htsjdk/samtools/cram/ref/EnaRefService.java b/src/main/java/htsjdk/samtools/cram/ref/EnaRefService.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/ref/EnaRefService.java
rename to src/main/java/htsjdk/samtools/cram/ref/EnaRefService.java
diff --git a/src/java/htsjdk/samtools/cram/ref/InMemoryReferenceSequenceFile.java b/src/main/java/htsjdk/samtools/cram/ref/InMemoryReferenceSequenceFile.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/ref/InMemoryReferenceSequenceFile.java
rename to src/main/java/htsjdk/samtools/cram/ref/InMemoryReferenceSequenceFile.java
diff --git a/src/java/htsjdk/samtools/cram/ref/ReferenceRegion.java b/src/main/java/htsjdk/samtools/cram/ref/ReferenceRegion.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/ref/ReferenceRegion.java
rename to src/main/java/htsjdk/samtools/cram/ref/ReferenceRegion.java
diff --git a/src/main/java/htsjdk/samtools/cram/ref/ReferenceSource.java b/src/main/java/htsjdk/samtools/cram/ref/ReferenceSource.java
new file mode 100644
index 0000000..da3d43f
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/cram/ref/ReferenceSource.java
@@ -0,0 +1,274 @@
+/**
+ * ****************************************************************************
+ * Copyright 2013 EMBL-EBI
+ * <p/>
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ****************************************************************************
+ */
+package htsjdk.samtools.cram.ref;
+
+import htsjdk.samtools.Defaults;
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.cram.io.InputStreamUtils;
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.SequenceUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.ref.WeakReference;
+import java.net.URL;
+import java.nio.file.Path;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+/**
+ * Used to represent a CRAM reference, the backing source for which can either be
+ * a file or the EBI ENA reference service.
+ *
+ * NOTE: In a future release, this class will be renamed and the functionality it
+ * contains will be refactored and distributed into one or more separate reference
+ * source implementations, each corresponding to the type of resource backing the
+ * reference.
+ */
+public class ReferenceSource implements CRAMReferenceSource {
+    private static final Log log = Log.getInstance(ReferenceSource.class);
+    private ReferenceSequenceFile rsFile;
+    private int downloadTriesBeforeFailing = 2;
+
+    private final Map<String, WeakReference<byte[]>> cacheW = new HashMap<String, WeakReference<byte[]>>();
+
+    private ReferenceSource() {
+    }
+
+    public ReferenceSource(final File file) {
+        this(file == null ? null : file.toPath());
+    }
+
+    public ReferenceSource(final Path path) {
+        if (path != null)
+            rsFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(path);
+    }
+
+    public ReferenceSource(final ReferenceSequenceFile rsFile) {
+        this.rsFile = rsFile;
+    }
+
+    /**
+     * Attempts to construct a default CRAMReferenceSource for use with CRAM files when
+     * one has not been explicitly provided.
+     *
+     * @return CRAMReferenceSource if one can be acquired. Guaranteed to not be null if none
+     * of the listed exceptions is thrown.
+     * @throws IllegalStateException if no default reference source can be acquired
+     * @throws IllegalArgumentException if the reference_fasta environment variable refers to a
+     * a file that doesn't exist
+     *<p>
+     * Construct a default reference source to use when an explicit reference has not been
+     * provided by checking for fallback sources in this order:
+     *<p><ul>
+     * <li>Defaults.REFERENCE_FASTA - the value of the system property "reference_fasta". If set,
+     * must refer to a valid reference file.</li>
+     * <li>ENA Reference Service if it is enabled</li>
+     * </ul>
+     */
+     public static CRAMReferenceSource getDefaultCRAMReferenceSource() {
+        if (null != Defaults.REFERENCE_FASTA) {
+            if (Defaults.REFERENCE_FASTA.exists()) {
+                return new ReferenceSource(Defaults.REFERENCE_FASTA);
+            }
+            else {
+                throw new IllegalArgumentException(
+                        "The file specified by the reference_fasta property does not exist: " + Defaults.REFERENCE_FASTA.getName());
+            }
+        }
+        else if (Defaults.USE_CRAM_REF_DOWNLOAD) {
+            return new ReferenceSource();
+        }
+        else {
+            throw new IllegalStateException(
+                    "A valid CRAM reference was not supplied and one cannot be acquired via the property settings reference_fasta or use_cram_ref_download");
+        }
+    }
+
+    public void clearCache() {
+        cacheW.clear();
+    }
+
+    private byte[] findInCache(final String name) {
+        final WeakReference<byte[]> weakReference = cacheW.get(name);
+        if (weakReference != null) {
+            final byte[] bytes = weakReference.get();
+            if (bytes != null)
+                return bytes;
+        }
+        return null;
+    }
+
+    public synchronized byte[] getReferenceBases(final SAMSequenceRecord record,
+                                                 final boolean tryNameVariants) {
+        { // check cache by sequence name:
+            final String name = record.getSequenceName();
+            final byte[] bases = findInCache(name);
+            if (bases != null)
+                return bases;
+        }
+
+        final String md5 = record.getAttribute(SAMSequenceRecord.MD5_TAG);
+        { // check cache by md5:
+            if (md5 != null) {
+                byte[] bases = findInCache(md5);
+                if (bases != null)
+                    return bases;
+                bases = findInCache(md5.toLowerCase());
+                if (bases != null)
+                    return bases;
+                bases = findInCache(md5.toUpperCase());
+                if (bases != null)
+                    return bases;
+            }
+        }
+
+        byte[] bases;
+
+        { // try to fetch sequence by name:
+            bases = findBasesByName(record.getSequenceName(), tryNameVariants);
+            if (bases != null) {
+                SequenceUtil.upperCase(bases);
+                cacheW.put(record.getSequenceName(), new WeakReference<byte[]>(
+                        bases));
+                return bases;
+            }
+        }
+
+        {
+            if (Defaults.USE_CRAM_REF_DOWNLOAD) { // try to fetch sequence by md5:
+                if (md5 != null) {
+                    try {
+                        bases = findBasesByMD5(md5.toLowerCase());
+                    } catch (final Exception e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+                if (bases != null) {
+                    SequenceUtil.upperCase(bases);
+                    cacheW.put(md5, new WeakReference<byte[]>(bases));
+                    return bases;
+                }
+            }
+        }
+
+        // sequence not found, give up:
+        return null;
+    }
+
+    byte[] findBasesByName(final String name, final boolean tryVariants) {
+        if (rsFile == null || !rsFile.isIndexed())
+            return null;
+
+        ReferenceSequence sequence = null;
+        try {
+            sequence = rsFile.getSequence(name);
+        } catch (final SAMException e) {
+            // the only way to test if rsFile contains the sequence is to try and catch exception.
+        }
+        if (sequence != null)
+            return sequence.getBases();
+
+        if (tryVariants) {
+            for (final String variant : getVariants(name)) {
+                try {
+                    sequence = rsFile.getSequence(variant);
+                } catch (final SAMException e) {
+                    log.warn("Sequence not found: " + variant);
+                }
+                if (sequence != null)
+                    return sequence.getBases();
+            }
+        }
+        return null;
+    }
+
+    byte[] findBasesByMD5(final String md5) throws
+            IOException {
+        final String url = String.format(Defaults.EBI_REFERENCE_SERVICE_URL_MASK, md5);
+
+        for (int i = 0; i < downloadTriesBeforeFailing; i++) {
+            final InputStream is = new URL(url).openStream();
+            if (is == null)
+                return null;
+
+            log.debug("Downloading reference sequence: " + url);
+            final byte[] data = InputStreamUtils.readFully(is);
+            log.debug("Downloaded " + data.length + " bytes for md5 " + md5);
+            is.close();
+
+            try {
+                final String downloadedMD5 = SequenceUtil.calculateMD5String(data);
+                if (md5.equals(downloadedMD5)) {
+                    return data;
+                } else {
+                    final String message = String
+                            .format("Downloaded sequence is corrupt: requested md5=%s, received md5=%s",
+                                    md5, downloadedMD5);
+                    log.error(message);
+                }
+            } catch (final NoSuchAlgorithmException e) {
+                throw new RuntimeException(e);
+            }
+        }
+        throw new RuntimeException("Giving up on downloading sequence for md5 "
+                + md5);
+    }
+
+    private static final Pattern chrPattern = Pattern.compile("chr.*",
+            Pattern.CASE_INSENSITIVE);
+
+    List<String> getVariants(final String name) {
+        final List<String> variants = new ArrayList<String>();
+
+        if (name.equals("M"))
+            variants.add("MT");
+
+        if (name.equals("MT"))
+            variants.add("M");
+
+        final boolean chrPatternMatch = chrPattern.matcher(name).matches();
+        if (chrPatternMatch)
+            variants.add(name.substring(3));
+        else
+            variants.add("chr" + name);
+
+        if ("chrM".equals(name)) {
+            // chrM case:
+            variants.add("MT");
+        }
+        return variants;
+    }
+
+    public int getDownloadTriesBeforeFailing() {
+        return downloadTriesBeforeFailing;
+    }
+
+    public void setDownloadTriesBeforeFailing(final int downloadTriesBeforeFailing) {
+        this.downloadTriesBeforeFailing = downloadTriesBeforeFailing;
+    }
+}
diff --git a/src/java/htsjdk/samtools/cram/ref/ReferenceTracks.java b/src/main/java/htsjdk/samtools/cram/ref/ReferenceTracks.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/ref/ReferenceTracks.java
rename to src/main/java/htsjdk/samtools/cram/ref/ReferenceTracks.java
diff --git a/src/java/htsjdk/samtools/cram/structure/AlignmentSpan.java b/src/main/java/htsjdk/samtools/cram/structure/AlignmentSpan.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/AlignmentSpan.java
rename to src/main/java/htsjdk/samtools/cram/structure/AlignmentSpan.java
diff --git a/src/java/htsjdk/samtools/cram/structure/Block.java b/src/main/java/htsjdk/samtools/cram/structure/Block.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/Block.java
rename to src/main/java/htsjdk/samtools/cram/structure/Block.java
diff --git a/src/java/htsjdk/samtools/cram/structure/BlockCompressionMethod.java b/src/main/java/htsjdk/samtools/cram/structure/BlockCompressionMethod.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/BlockCompressionMethod.java
rename to src/main/java/htsjdk/samtools/cram/structure/BlockCompressionMethod.java
diff --git a/src/java/htsjdk/samtools/cram/structure/BlockContentType.java b/src/main/java/htsjdk/samtools/cram/structure/BlockContentType.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/BlockContentType.java
rename to src/main/java/htsjdk/samtools/cram/structure/BlockContentType.java
diff --git a/src/java/htsjdk/samtools/cram/structure/CompressionHeader.java b/src/main/java/htsjdk/samtools/cram/structure/CompressionHeader.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/CompressionHeader.java
rename to src/main/java/htsjdk/samtools/cram/structure/CompressionHeader.java
diff --git a/src/java/htsjdk/samtools/cram/structure/Container.java b/src/main/java/htsjdk/samtools/cram/structure/Container.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/Container.java
rename to src/main/java/htsjdk/samtools/cram/structure/Container.java
diff --git a/src/main/java/htsjdk/samtools/cram/structure/ContainerHeaderIO.java b/src/main/java/htsjdk/samtools/cram/structure/ContainerHeaderIO.java
new file mode 100644
index 0000000..fd6edfe
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/cram/structure/ContainerHeaderIO.java
@@ -0,0 +1,96 @@
+/**
+ * ****************************************************************************
+ * Copyright 2013 EMBL-EBI
+ * <p/>
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ****************************************************************************
+ */
+package htsjdk.samtools.cram.structure;
+
+import htsjdk.samtools.cram.io.CRC32OutputStream;
+import htsjdk.samtools.cram.io.CramArray;
+import htsjdk.samtools.cram.io.CramInt;
+import htsjdk.samtools.cram.io.ITF8;
+import htsjdk.samtools.cram.io.LTF8;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+class ContainerHeaderIO {
+
+    public boolean readContainerHeader(final Container container, final InputStream inputStream)
+            throws IOException {
+        return readContainerHeader(2, container, inputStream);
+    }
+
+    public boolean readContainerHeader(final int major, final Container container, final InputStream inputStream)
+            throws IOException {
+        final byte[] peek = new byte[4];
+        int character = inputStream.read();
+        if (character == -1)
+            return false;
+
+        peek[0] = (byte) character;
+        for (int i = 1; i < peek.length; i++) {
+            character = inputStream.read();
+            if (character == -1)
+                throw new RuntimeException("Incomplete or broken stream.");
+            peek[i] = (byte) character;
+        }
+
+        container.containerByteSize = CramInt.int32(peek);
+        container.sequenceId = ITF8.readUnsignedITF8(inputStream);
+        container.alignmentStart = ITF8.readUnsignedITF8(inputStream);
+        container.alignmentSpan = ITF8.readUnsignedITF8(inputStream);
+        container.nofRecords = ITF8.readUnsignedITF8(inputStream);
+        container.globalRecordCounter = LTF8.readUnsignedLTF8(inputStream);
+        container.bases = LTF8.readUnsignedLTF8(inputStream);
+        container.blockCount = ITF8.readUnsignedITF8(inputStream);
+        container.landmarks = CramArray.array(inputStream);
+        if (major >= 3)
+            container.checksum = CramInt.int32(inputStream);
+
+        return true;
+    }
+
+    /**
+     * Write CRAM {@link Container} out into the given {@link OutputStream}.
+     * @param major CRAM major version
+     * @param container container to be written
+     * @param outputStream the output stream to write the container to
+     * @return number of bytes written out to the output stream
+     * @throws IOException as per java IO contract
+     */
+    public int writeContainerHeader(final int major, final Container container, final OutputStream outputStream)
+            throws IOException {
+        final CRC32OutputStream crc32OutputStream = new CRC32OutputStream(outputStream);
+
+        int length = (CramInt.writeInt32(container.containerByteSize, crc32OutputStream) + 7) / 8;
+        length += (ITF8.writeUnsignedITF8(container.sequenceId, crc32OutputStream) + 7) / 8;
+        length += (ITF8.writeUnsignedITF8(container.alignmentStart, crc32OutputStream) + 7) / 8;
+        length += (ITF8.writeUnsignedITF8(container.alignmentSpan, crc32OutputStream) + 7) / 8;
+        length += (ITF8.writeUnsignedITF8(container.nofRecords, crc32OutputStream) + 7) / 8;
+        length += (LTF8.writeUnsignedLTF8(container.globalRecordCounter, crc32OutputStream) + 7) / 8;
+        length += (LTF8.writeUnsignedLTF8(container.bases, crc32OutputStream) + 7) / 8;
+        length += (ITF8.writeUnsignedITF8(container.blockCount, crc32OutputStream) + 7) / 8;
+        length += (CramArray.write(container.landmarks, crc32OutputStream) + 7) / 8;
+
+        if (major >= 3) {
+            outputStream.write(crc32OutputStream.getCrc32_LittleEndian());
+            length += 4 ;
+        }
+
+        return length;
+    }
+}
diff --git a/src/java/htsjdk/samtools/cram/structure/ContainerIO.java b/src/main/java/htsjdk/samtools/cram/structure/ContainerIO.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/ContainerIO.java
rename to src/main/java/htsjdk/samtools/cram/structure/ContainerIO.java
diff --git a/src/java/htsjdk/samtools/cram/structure/CramCompressionRecord.java b/src/main/java/htsjdk/samtools/cram/structure/CramCompressionRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/CramCompressionRecord.java
rename to src/main/java/htsjdk/samtools/cram/structure/CramCompressionRecord.java
diff --git a/src/java/htsjdk/samtools/cram/structure/CramHeader.java b/src/main/java/htsjdk/samtools/cram/structure/CramHeader.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/CramHeader.java
rename to src/main/java/htsjdk/samtools/cram/structure/CramHeader.java
diff --git a/src/java/htsjdk/samtools/cram/structure/EncodingID.java b/src/main/java/htsjdk/samtools/cram/structure/EncodingID.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/EncodingID.java
rename to src/main/java/htsjdk/samtools/cram/structure/EncodingID.java
diff --git a/src/java/htsjdk/samtools/cram/structure/EncodingKey.java b/src/main/java/htsjdk/samtools/cram/structure/EncodingKey.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/EncodingKey.java
rename to src/main/java/htsjdk/samtools/cram/structure/EncodingKey.java
diff --git a/src/java/htsjdk/samtools/cram/structure/EncodingParams.java b/src/main/java/htsjdk/samtools/cram/structure/EncodingParams.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/EncodingParams.java
rename to src/main/java/htsjdk/samtools/cram/structure/EncodingParams.java
diff --git a/src/java/htsjdk/samtools/cram/structure/ReadTag.java b/src/main/java/htsjdk/samtools/cram/structure/ReadTag.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/ReadTag.java
rename to src/main/java/htsjdk/samtools/cram/structure/ReadTag.java
diff --git a/src/java/htsjdk/samtools/cram/structure/Slice.java b/src/main/java/htsjdk/samtools/cram/structure/Slice.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/Slice.java
rename to src/main/java/htsjdk/samtools/cram/structure/Slice.java
diff --git a/src/java/htsjdk/samtools/cram/structure/SliceIO.java b/src/main/java/htsjdk/samtools/cram/structure/SliceIO.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/SliceIO.java
rename to src/main/java/htsjdk/samtools/cram/structure/SliceIO.java
diff --git a/src/java/htsjdk/samtools/cram/structure/SubstitutionMatrix.java b/src/main/java/htsjdk/samtools/cram/structure/SubstitutionMatrix.java
similarity index 100%
rename from src/java/htsjdk/samtools/cram/structure/SubstitutionMatrix.java
rename to src/main/java/htsjdk/samtools/cram/structure/SubstitutionMatrix.java
diff --git a/src/java/htsjdk/samtools/example/ExampleSamUsage.java b/src/main/java/htsjdk/samtools/example/ExampleSamUsage.java
similarity index 100%
rename from src/java/htsjdk/samtools/example/ExampleSamUsage.java
rename to src/main/java/htsjdk/samtools/example/ExampleSamUsage.java
diff --git a/src/main/java/htsjdk/samtools/example/PrintReadsExample.java b/src/main/java/htsjdk/samtools/example/PrintReadsExample.java
new file mode 100755
index 0000000..7bbec0a
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/example/PrintReadsExample.java
@@ -0,0 +1,98 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ */
+package htsjdk.samtools.example;
+
+import htsjdk.samtools.*;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.ProgressLogger;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+
+/**
+ * This is a example program showing how to use SAM readers and (optionally) writers.
+ * It's also useful for measuring time.
+ * An example invocation is:
+ * <code>java -cp dist/htsjdk-2.1.1.jar htsjdk.samtools.example.PrintReadsExample in.bam false a.bam</code>
+ * Arguments:
+ * - the first argument is the input file (SAM or BAM)
+ * - the second argument is a boolean (true or false) that indicates whether reads are to be eagerly decoded (useful for benchmarking)
+ * - the third argument is optional and is the name of the output file (nothing gets written if this argument is missing)
+ */
+public final class PrintReadsExample {
+    private PrintReadsExample() {
+    }
+
+    private static final Log log = Log.getInstance(PrintReadsExample.class);
+
+    public static void main(String[] args) throws IOException {
+        if (args.length < 2) {
+            System.out.println("Usage: " + PrintReadsExample.class.getCanonicalName() + " inFile eagerDecode [outFile]");
+            System.exit(1);
+        }
+        final File inputFile = new File(args[0]);
+        final boolean eagerDecode = Boolean.parseBoolean(args[1]); //useful to test (realistic) scenarios in which every record is always fully decoded.
+        final File outputFile = args.length >= 3 ? new File(args[2]) : null;
+
+        final long start = System.currentTimeMillis();
+
+        log.info("Start with args:" + Arrays.toString(args));
+        printConfigurationInfo();
+
+        SamReaderFactory readerFactory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT);
+        if (eagerDecode) {
+            readerFactory = readerFactory.enable(SamReaderFactory.Option.EAGERLY_DECODE);
+        }
+
+        try (final SamReader reader = readerFactory.open(inputFile)) {
+            final SAMFileHeader header = reader.getFileHeader();
+            try (final SAMFileWriter writer = outputFile != null ? new SAMFileWriterFactory().makeBAMWriter(header, true, outputFile) : null) {
+                final ProgressLogger pl = new ProgressLogger(log, 1000000);
+                for (final SAMRecord record : reader) {
+                    if (writer != null) {
+                        writer.addAlignment(record);
+                    }
+                    pl.record(record);
+                }
+            }
+        }
+        final long end = System.currentTimeMillis();
+        log.info(String.format("Done. Elapsed time %.3f seconds", (end - start) / 1000.0));
+    }
+
+    private static void printConfigurationInfo() throws IOException {
+        log.info("Executing as " +
+                System.getProperty("user.name") + '@' + InetAddress.getLocalHost().getHostName() +
+                " on " + System.getProperty("os.name") + ' ' + System.getProperty("os.version") +
+                ' ' + System.getProperty("os.arch") + "; " + System.getProperty("java.vm.name") +
+                ' ' + System.getProperty("java.runtime.version"));
+
+        final List<String> list = Defaults.allDefaults().entrySet().stream().map(e -> e.getKey() + ':' + e.getValue()).collect(Collectors.toList());
+        log.info(String.join(" ", list));
+    }
+}
diff --git a/src/java/htsjdk/samtools/fastq/AsyncFastqWriter.java b/src/main/java/htsjdk/samtools/fastq/AsyncFastqWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/fastq/AsyncFastqWriter.java
rename to src/main/java/htsjdk/samtools/fastq/AsyncFastqWriter.java
diff --git a/src/java/htsjdk/samtools/fastq/BasicFastqWriter.java b/src/main/java/htsjdk/samtools/fastq/BasicFastqWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/fastq/BasicFastqWriter.java
rename to src/main/java/htsjdk/samtools/fastq/BasicFastqWriter.java
diff --git a/src/java/htsjdk/samtools/fastq/FastqConstants.java b/src/main/java/htsjdk/samtools/fastq/FastqConstants.java
similarity index 100%
rename from src/java/htsjdk/samtools/fastq/FastqConstants.java
rename to src/main/java/htsjdk/samtools/fastq/FastqConstants.java
diff --git a/src/java/htsjdk/samtools/fastq/FastqReader.java b/src/main/java/htsjdk/samtools/fastq/FastqReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/fastq/FastqReader.java
rename to src/main/java/htsjdk/samtools/fastq/FastqReader.java
diff --git a/src/java/htsjdk/samtools/fastq/FastqRecord.java b/src/main/java/htsjdk/samtools/fastq/FastqRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/fastq/FastqRecord.java
rename to src/main/java/htsjdk/samtools/fastq/FastqRecord.java
diff --git a/src/java/htsjdk/samtools/fastq/FastqWriter.java b/src/main/java/htsjdk/samtools/fastq/FastqWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/fastq/FastqWriter.java
rename to src/main/java/htsjdk/samtools/fastq/FastqWriter.java
diff --git a/src/main/java/htsjdk/samtools/fastq/FastqWriterFactory.java b/src/main/java/htsjdk/samtools/fastq/FastqWriterFactory.java
new file mode 100644
index 0000000..273e352
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/fastq/FastqWriterFactory.java
@@ -0,0 +1,31 @@
+package htsjdk.samtools.fastq;
+
+import htsjdk.samtools.Defaults;
+
+import java.io.File;
+
+/**
+ * Factory class for creating FastqWriter objects.
+ *
+ * @author Tim Fennell
+ */
+public class FastqWriterFactory {
+    boolean useAsyncIo = Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS;
+    boolean createMd5  = Defaults.CREATE_MD5;
+
+    /** Sets whether or not to use async io (i.e. a dedicated thread per writer. */
+    public void setUseAsyncIo(final boolean useAsyncIo) { this.useAsyncIo = useAsyncIo; }
+
+    /** If true, compute MD5 and write appropriately-named file when file is closed. */
+    public void setCreateMd5(final boolean createMd5) { this.createMd5 = createMd5; }
+
+    public FastqWriter newWriter(final File out) {
+        final FastqWriter writer = new BasicFastqWriter(out, createMd5);
+        if (useAsyncIo) {
+            return new AsyncFastqWriter(writer, AsyncFastqWriter.DEFAULT_QUEUE_SIZE);
+        }
+        else {
+            return writer;
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/filter/AbstractJavascriptFilter.java b/src/main/java/htsjdk/samtools/filter/AbstractJavascriptFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/AbstractJavascriptFilter.java
rename to src/main/java/htsjdk/samtools/filter/AbstractJavascriptFilter.java
diff --git a/src/java/htsjdk/samtools/filter/AggregateFilter.java b/src/main/java/htsjdk/samtools/filter/AggregateFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/AggregateFilter.java
rename to src/main/java/htsjdk/samtools/filter/AggregateFilter.java
diff --git a/src/java/htsjdk/samtools/filter/AlignedFilter.java b/src/main/java/htsjdk/samtools/filter/AlignedFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/AlignedFilter.java
rename to src/main/java/htsjdk/samtools/filter/AlignedFilter.java
diff --git a/src/java/htsjdk/samtools/filter/DuplicateReadFilter.java b/src/main/java/htsjdk/samtools/filter/DuplicateReadFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/DuplicateReadFilter.java
rename to src/main/java/htsjdk/samtools/filter/DuplicateReadFilter.java
diff --git a/src/java/htsjdk/samtools/filter/FailsVendorReadQualityFilter.java b/src/main/java/htsjdk/samtools/filter/FailsVendorReadQualityFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/FailsVendorReadQualityFilter.java
rename to src/main/java/htsjdk/samtools/filter/FailsVendorReadQualityFilter.java
diff --git a/src/java/htsjdk/samtools/filter/FilteringIterator.java b/src/main/java/htsjdk/samtools/filter/FilteringIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/FilteringIterator.java
rename to src/main/java/htsjdk/samtools/filter/FilteringIterator.java
diff --git a/src/java/htsjdk/samtools/filter/FilteringSamIterator.java b/src/main/java/htsjdk/samtools/filter/FilteringSamIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/FilteringSamIterator.java
rename to src/main/java/htsjdk/samtools/filter/FilteringSamIterator.java
diff --git a/src/java/htsjdk/samtools/filter/InsertSizeFilter.java b/src/main/java/htsjdk/samtools/filter/InsertSizeFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/InsertSizeFilter.java
rename to src/main/java/htsjdk/samtools/filter/InsertSizeFilter.java
diff --git a/src/java/htsjdk/samtools/filter/IntervalFilter.java b/src/main/java/htsjdk/samtools/filter/IntervalFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/IntervalFilter.java
rename to src/main/java/htsjdk/samtools/filter/IntervalFilter.java
diff --git a/src/java/htsjdk/samtools/filter/JavascriptSamRecordFilter.java b/src/main/java/htsjdk/samtools/filter/JavascriptSamRecordFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/JavascriptSamRecordFilter.java
rename to src/main/java/htsjdk/samtools/filter/JavascriptSamRecordFilter.java
diff --git a/src/java/htsjdk/samtools/filter/MappingQualityFilter.java b/src/main/java/htsjdk/samtools/filter/MappingQualityFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/MappingQualityFilter.java
rename to src/main/java/htsjdk/samtools/filter/MappingQualityFilter.java
diff --git a/src/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilter.java b/src/main/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilter.java
rename to src/main/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilter.java
diff --git a/src/java/htsjdk/samtools/filter/OverclippedReadFilter.java b/src/main/java/htsjdk/samtools/filter/OverclippedReadFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/OverclippedReadFilter.java
rename to src/main/java/htsjdk/samtools/filter/OverclippedReadFilter.java
diff --git a/src/java/htsjdk/samtools/filter/ReadNameFilter.java b/src/main/java/htsjdk/samtools/filter/ReadNameFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/ReadNameFilter.java
rename to src/main/java/htsjdk/samtools/filter/ReadNameFilter.java
diff --git a/src/java/htsjdk/samtools/filter/SamRecordFilter.java b/src/main/java/htsjdk/samtools/filter/SamRecordFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/SamRecordFilter.java
rename to src/main/java/htsjdk/samtools/filter/SamRecordFilter.java
diff --git a/src/java/htsjdk/samtools/filter/SecondaryAlignmentFilter.java b/src/main/java/htsjdk/samtools/filter/SecondaryAlignmentFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/SecondaryAlignmentFilter.java
rename to src/main/java/htsjdk/samtools/filter/SecondaryAlignmentFilter.java
diff --git a/src/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilter.java b/src/main/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilter.java
rename to src/main/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilter.java
diff --git a/src/java/htsjdk/samtools/filter/SolexaNoiseFilter.java b/src/main/java/htsjdk/samtools/filter/SolexaNoiseFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/SolexaNoiseFilter.java
rename to src/main/java/htsjdk/samtools/filter/SolexaNoiseFilter.java
diff --git a/src/java/htsjdk/samtools/filter/TagFilter.java b/src/main/java/htsjdk/samtools/filter/TagFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/TagFilter.java
rename to src/main/java/htsjdk/samtools/filter/TagFilter.java
diff --git a/src/java/htsjdk/samtools/filter/WholeReadClippedFilter.java b/src/main/java/htsjdk/samtools/filter/WholeReadClippedFilter.java
similarity index 100%
rename from src/java/htsjdk/samtools/filter/WholeReadClippedFilter.java
rename to src/main/java/htsjdk/samtools/filter/WholeReadClippedFilter.java
diff --git a/src/java/htsjdk/samtools/liftover/Chain.java b/src/main/java/htsjdk/samtools/liftover/Chain.java
similarity index 100%
rename from src/java/htsjdk/samtools/liftover/Chain.java
rename to src/main/java/htsjdk/samtools/liftover/Chain.java
diff --git a/src/main/java/htsjdk/samtools/liftover/LiftOver.java b/src/main/java/htsjdk/samtools/liftover/LiftOver.java
new file mode 100644
index 0000000..e422a72
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/liftover/LiftOver.java
@@ -0,0 +1,319 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.liftover;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Interval;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.OverlapDetector;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Java port of UCSC liftOver.  Only the most basic liftOver functionality is implemented.
+ * Internally coordinates are 0-based, half-open. The API is standard Picard 1-based, inclusive.
+ *
+ * @author alecw at broadinstitute.org
+ */
+public class LiftOver {
+    private static final Log LOG = Log.getInstance(LiftOver.class);
+    
+    public static final double DEFAULT_LIFTOVER_MINMATCH = 0.95;
+
+    private double liftOverMinMatch = DEFAULT_LIFTOVER_MINMATCH;
+    private final OverlapDetector<Chain> chains;
+    private final Map<String, Set<String>> contigMap = new HashMap<>();
+
+    /**
+     * Load UCSC chain file in order to lift over Intervals.
+     */
+    public LiftOver(File chainFile) {
+        IOUtil.assertFileIsReadable(chainFile);
+        chains = Chain.loadChains(chainFile);
+
+        for (final Chain chain : this.chains.getAll()) {
+            final String from = chain.fromSequenceName;
+            final String to   = chain.toSequenceName;
+            final Set<String> names;
+            if (contigMap.containsKey(from)) {
+                names = contigMap.get(from);
+            }
+            else {
+                names = new HashSet<>();
+                contigMap.put(from, names);
+            }
+            names.add(to);
+        }
+    }
+
+    /**
+     * Throw an exception if all the "to" sequence names in the chains are not found in the given sequence dictionary.
+     */
+    public void validateToSequences(final SAMSequenceDictionary sequenceDictionary) {
+        for (final Chain chain : chains.getAll()) {
+            if (sequenceDictionary.getSequence(chain.toSequenceName) == null) {
+                throw new SAMException("Sequence " + chain.toSequenceName + " from chain file is not found in sequence dictionary.");
+            }
+        }
+
+    }
+
+    /**
+     * Lift over the given interval to the new genome build using the liftOverMinMatch set for this
+     * LiftOver object.
+     * @param interval Interval to be lifted over.
+     * @return Interval in the output build coordinates, or null if it cannot be lifted over.
+     */
+    public Interval liftOver(final Interval interval) {
+        return liftOver(interval, liftOverMinMatch);
+    }
+
+    /**
+     * Lift over the given interval to the new genome build.
+     * @param interval Interval to be lifted over.
+     * @param liftOverMinMatch Minimum fraction of bases that must remap.
+     * @return Interval in the output build coordinates, or null if it cannot be lifted over.
+     */
+    public Interval liftOver(final Interval interval, final double liftOverMinMatch) {
+        if (interval.length() == 0) {
+            throw new IllegalArgumentException("Zero-length interval cannot be lifted over.  Interval: " +
+                    interval.getName());
+        }
+        Chain chainHit = null;
+        TargetIntersection targetIntersection = null;
+        // Number of bases in interval that can be lifted over must be >= this.
+        double minMatchSize = liftOverMinMatch * interval.length();
+
+        // Find the appropriate Chain, and the part of the chain corresponding to the interval to be lifted over.
+        for (final Chain chain : chains.getOverlaps(interval)) {
+            final TargetIntersection candidateIntersection = targetIntersection(chain, interval);
+            if (candidateIntersection != null && candidateIntersection.intersectionLength >= minMatchSize) {
+                if (chainHit != null) {
+                    // In basic liftOver, multiple hits are not allowed.
+                    return null;
+                }
+                chainHit = chain;
+                targetIntersection = candidateIntersection;
+            } else if (candidateIntersection != null) {
+                LOG.info("Interval " + interval.getName() + " failed to match chain " + chain.id +
+                " because intersection length " + candidateIntersection.intersectionLength + " < minMatchSize "
+                + minMatchSize +
+                " (" + (candidateIntersection.intersectionLength/(float)interval.length()) + " < " + liftOverMinMatch + ")");
+            }
+        }
+        if (chainHit == null) {
+            // Can't be lifted over.
+            return null;
+        }
+
+        return createToInterval(interval.getName(), interval.isNegativeStrand(), targetIntersection);
+    }
+
+    public List<PartialLiftover> diagnosticLiftover(final Interval interval) {
+        final List<PartialLiftover> ret = new ArrayList<PartialLiftover>();
+        if (interval.length() == 0) {
+            throw new IllegalArgumentException("Zero-length interval cannot be lifted over.  Interval: " +
+                    interval.getName());
+        }
+        for (final Chain chain : chains.getOverlaps(interval)) {
+            Interval intersectingChain = interval.intersect(chain.interval);
+            final TargetIntersection targetIntersection = targetIntersection(chain, intersectingChain);
+            if (targetIntersection == null) {
+                ret.add(new PartialLiftover(intersectingChain, chain.id));
+            } else {
+                Interval toInterval = createToInterval(interval.getName(), interval.isNegativeStrand(), targetIntersection);
+                float percentLiftedOver = targetIntersection.intersectionLength/(float)interval.length();
+                ret.add(new PartialLiftover(intersectingChain, toInterval, targetIntersection.chain.id, percentLiftedOver));
+            }
+        }
+        return ret;
+    }
+
+    /**
+     * @return the set of destination contigs for each source contig in the chains file.
+     */
+    public Map<String, Set<String>> getContigMap() {
+        return Collections.unmodifiableMap(contigMap);
+    }
+
+    private static Interval createToInterval(final String intervalName, final boolean sourceNegativeStrand, final TargetIntersection targetIntersection) {
+        // Compute the query interval given the offsets of the target interval start and end into the first and
+        // last ContinuousBlocks.
+        int toStart = targetIntersection.chain.getBlock(targetIntersection.firstBlockIndex).toStart + targetIntersection.startOffset;
+        int toEnd = targetIntersection.chain.getBlock(targetIntersection.lastBlockIndex).getToEnd() - targetIntersection.offsetFromEnd;
+        if (toEnd <= toStart || toStart < 0) {
+            throw new SAMException("Something strange lifting over interval " + intervalName);
+        }
+
+        if (targetIntersection.chain.toOppositeStrand) {
+            // Flip if query is negative.
+            int negativeStart = targetIntersection.chain.toSequenceSize - toEnd;
+            int negativeEnd = targetIntersection.chain.toSequenceSize - toStart;
+            toStart = negativeStart;
+            toEnd = negativeEnd;
+        }
+        // Convert to 1-based, inclusive.
+        final boolean negativeStrand = targetIntersection.chain.toOppositeStrand ? !sourceNegativeStrand : sourceNegativeStrand;
+        return new Interval(targetIntersection.chain.toSequenceName, toStart+1, toEnd, negativeStrand, intervalName);
+    }
+
+    /**
+     * Add up overlap btw the blocks in this chain and the given interval.
+     * @return Length of overlap, offsets into first and last ContinuousBlocks, and indices of first and
+     * last ContinuousBlocks.
+     */
+    private static TargetIntersection targetIntersection(final Chain chain, final Interval interval) {
+        int intersectionLength = 0;
+        // Convert interval to 0-based, half-open
+        int start = interval.getStart() - 1;
+        int end = interval.getEnd();
+        int firstBlockIndex = -1;
+        int lastBlockIndex = -1;
+        int startOffset = -1;
+        int offsetFromEnd = -1;
+        List<Chain.ContinuousBlock> blockList = chain.getBlocks();
+        for (int i = 0; i < blockList.size(); ++i) {
+            final Chain.ContinuousBlock block = blockList.get(i);
+            if (block.fromStart >= end) {
+                break;
+            } else if (block.getFromEnd() <= start) {
+                continue;
+            }
+            if (firstBlockIndex == -1) {
+                firstBlockIndex = i;
+                if (start > block.fromStart) {
+                    startOffset = start - block.fromStart;
+                } else {
+                    startOffset = 0;
+                }
+            }
+            lastBlockIndex = i;
+            if (block.getFromEnd() > end) {
+                offsetFromEnd = block.getFromEnd() - end;
+            } else {
+                offsetFromEnd = 0;
+            }
+            int thisIntersection = Math.min(end, block.getFromEnd()) - Math.max(start, block.fromStart);
+            if (thisIntersection <= 0) {
+                throw new SAMException("Should have been some intersection.");
+            }
+            intersectionLength += thisIntersection;
+        }
+        if (intersectionLength == 0) {
+            return null;
+        }
+        return new TargetIntersection(chain, intersectionLength, startOffset, offsetFromEnd, firstBlockIndex, lastBlockIndex);
+    }
+
+    /**
+     * Get minimum fraction of bases that must remap.
+     */
+    public double getLiftOverMinMatch() {
+        return liftOverMinMatch;
+    }
+
+    /**
+     * Set minimum fraction of bases that must remap.
+     */
+    public void setLiftOverMinMatch(final double liftOverMinMatch) {
+        this.liftOverMinMatch = liftOverMinMatch;
+    }
+
+    /**
+    * Value class returned by targetIntersection()
+    */
+    private static class TargetIntersection {
+        /** Chain used for this intersection */
+        final Chain chain;
+        /** Total intersectionLength length */
+        final int intersectionLength;
+        /** Offset of target interval start in first block. */
+        final int startOffset;
+        /** Distance from target interval end to end of last block. */
+        final int offsetFromEnd;
+        /** Index of first ContinuousBlock matching interval. */
+        final int firstBlockIndex;
+        /** Index of last ContinuousBlock matching interval. */
+        final int lastBlockIndex;
+
+        TargetIntersection(final Chain chain,final int intersectionLength, final int startOffset,
+                           final int offsetFromEnd, final int firstBlockIndex, final int lastBlockIndex) {
+            this.chain = chain;
+            this.intersectionLength = intersectionLength;
+            this.startOffset = startOffset;
+            this.offsetFromEnd = offsetFromEnd;
+            this.firstBlockIndex = firstBlockIndex;
+            this.lastBlockIndex = lastBlockIndex;
+        }
+    }
+
+    /**
+     * Represents a portion of a liftover operation, for use in diagnosing liftover failures.
+     */
+    public static class PartialLiftover {
+        /** Intersection between "from" interval and "from" region of a chain. */
+        final Interval fromInterval;
+        /**
+         * Result of lifting over fromInterval (with no percentage mapped requirement).  This is null
+         * if fromInterval falls entirely with a gap of the chain. */
+        final Interval toInterval;
+        /** id of chain used for this liftover */
+        final int chainId;
+        /** Percentage of bases in fromInterval that lifted over.  0 if fromInterval is not covered by any chain. */
+        final float percentLiftedOver;
+
+        PartialLiftover(final Interval fromInterval, final Interval toInterval, final int chainId, final float percentLiftedOver) {
+            this.fromInterval = fromInterval;
+            this.toInterval = toInterval;
+            this.chainId = chainId;
+            this.percentLiftedOver = percentLiftedOver;
+        }
+
+        PartialLiftover(final Interval fromInterval, final int chainId) {
+            this.fromInterval = fromInterval;
+            this.toInterval = null;
+            this.chainId = chainId;
+            this.percentLiftedOver = 0.0f;
+        }
+
+        public String toString() {
+            if (toInterval == null) {
+                // Matched a chain, but entirely within a gap.
+                return fromInterval.toString() + " (len " + fromInterval.length() + ")=>null using chain " + chainId;
+            }
+            final String strand = toInterval.isNegativeStrand()? "-": "+";
+            return fromInterval.toString() + " (len " + fromInterval.length() + ")=>" + toInterval + "(" + strand
+                    + ") using chain " + chainId + " ; pct matched " + percentLiftedOver;
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/metrics/Header.java b/src/main/java/htsjdk/samtools/metrics/Header.java
similarity index 100%
rename from src/java/htsjdk/samtools/metrics/Header.java
rename to src/main/java/htsjdk/samtools/metrics/Header.java
diff --git a/src/java/htsjdk/samtools/metrics/MetricBase.java b/src/main/java/htsjdk/samtools/metrics/MetricBase.java
similarity index 100%
rename from src/java/htsjdk/samtools/metrics/MetricBase.java
rename to src/main/java/htsjdk/samtools/metrics/MetricBase.java
diff --git a/src/main/java/htsjdk/samtools/metrics/MetricsFile.java b/src/main/java/htsjdk/samtools/metrics/MetricsFile.java
new file mode 100644
index 0000000..ac0628a
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/metrics/MetricsFile.java
@@ -0,0 +1,594 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools.metrics;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.FormatUtil;
+import htsjdk.samtools.util.Histogram;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.StringUtil;
+
+import java.io.*;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Contains a set of metrics that can be written to a file and parsed back
+ * again. The set of metrics is composed of zero or more instances of a class,
+ * BEAN, that extends {@link MetricBase} (all instances must be of the same type)
+ * and may optionally include one or more histograms that share the same key set.
+ *
+ * @author Tim Fennell
+ */
+public class MetricsFile<BEAN extends MetricBase, HKEY extends Comparable> implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    public static final String MAJOR_HEADER_PREFIX = "## ";
+    public static final String MINOR_HEADER_PREFIX = "# ";
+    public static final String SEPARATOR = "\t";
+    public static final String HISTO_HEADER = "## HISTOGRAM\t";
+    public static final String METRIC_HEADER = "## METRICS CLASS\t";
+
+    private final Set<String> columnLabels = new HashSet<>();
+    private final List<Header> headers = new ArrayList<>();
+    private final List<BEAN> metrics = new ArrayList<>();
+    private final List<Histogram<HKEY>> histograms = new ArrayList<>();
+
+    /** Adds a header to the collection of metrics. */
+    public void addHeader(Header h) { this.headers.add(h); }
+
+    /** Returns the list of headers. */
+    public List<Header> getHeaders() { return Collections.unmodifiableList(this.headers); }
+
+    /** Adds a bean to the collection of metrics. */
+    public void addMetric(final BEAN bean) { this.metrics.add(bean); }
+
+    /** Add multiple metric beans at once. */
+    public void addAllMetrics(final Iterable<BEAN> beanz) {
+        for (final BEAN bean : beanz) { this.addMetric(bean); }
+    }
+
+    /** Returns the list of headers. */
+    public List<BEAN> getMetrics() { return Collections.unmodifiableList(this.metrics); }
+
+    public Set<String> getMetricsColumnLabels() { return Collections.unmodifiableSet(this.columnLabels); }
+
+    /** Returns the histogram contained in the metrics file if any. */
+    public Histogram<HKEY> getHistogram() {
+        if (!histograms.isEmpty()) return this.histograms.get(0);
+        else return null;
+    }
+
+    /** Sets the histogram contained in the metrics file. */
+    public void setHistogram(final Histogram<HKEY> histogram) {
+        if (this.histograms.isEmpty()) {
+            if (histogram != null) this.histograms.add(histogram);
+        }
+        else {
+            this.histograms.set(0, histogram);
+        }
+    }
+
+    /** Adds a histogram to the list of histograms in the metrics file. */
+    public void addHistogram(final Histogram<HKEY> histogram) {
+        this.histograms.add(histogram);
+    }
+
+    //** Returns an unmodifiable version of the histogram list */
+    public List<Histogram<HKEY>> getAllHistograms() {
+        return Collections.unmodifiableList(histograms);
+    }
+
+    /** Returns the number of histograms added to the metrics file. */
+    public int getNumHistograms() 
+    {
+    	return this.histograms.size();
+    }
+    
+    /** Returns the list of headers with the specified type. */
+    public List<Header> getHeaders(final Class<? extends Header> type) {
+        List<Header> tmp = new ArrayList<Header>();
+        for (final Header h : this.headers) {
+            if (h.getClass().equals(type)) {
+                tmp.add(h);
+            }
+        }
+
+        return tmp;
+    }
+
+    /**
+     * Writes out the metrics file to the supplied file. The file is written out
+     * headers first, metrics second and histogram third.
+     *
+     * @param f a File into which to write the metrics
+     */
+    public void write(final File f) {
+        FileWriter w = null;
+        try {
+            w = new FileWriter(f);
+            write(w);
+        }
+        catch (IOException ioe) {
+            throw new SAMException("Could not write metrics to file: " + f.getAbsolutePath(), ioe);
+        }
+        finally {
+            if (w != null) {
+                try {
+                    w.close();
+                } catch (IOException e) {
+                }
+            }
+        }
+    }
+
+    /**
+     * Writes out the metrics file to the supplied writer. The file is written out
+     * headers first, metrics second and histogram third.
+     *
+     * @param w a Writer into which to write the metrics
+     */
+    public void write(final Writer w) {
+        try {
+            final FormatUtil formatter = new FormatUtil();
+            final BufferedWriter out = new BufferedWriter(w);
+            printHeaders(out);
+            out.newLine();
+
+            printBeanMetrics(out, formatter);
+            out.newLine();
+
+            printHistogram(out, formatter);
+            out.newLine();
+            out.flush();
+        }
+        catch (IOException ioe) {
+            throw new SAMException("Could not write metrics file.", ioe);
+        }
+    }
+
+    /** Prints the headers into the provided PrintWriter. */
+    private void printHeaders(final BufferedWriter out) throws IOException {
+        for (final Header h : this.headers) {
+            out.append(MAJOR_HEADER_PREFIX);
+            out.append(h.getClass().getName());
+            out.newLine();
+            out.append(MINOR_HEADER_PREFIX);
+            out.append(h.toString());
+            out.newLine();
+        }
+    }
+
+    /** Prints each of the metrics entries into the provided PrintWriter. */
+    private void printBeanMetrics(final BufferedWriter out, final FormatUtil formatter) throws IOException {
+        if (this.metrics.isEmpty()) {
+            return;
+        }
+
+        // Write out a header row with the type of the metric class
+        out.append(METRIC_HEADER + getBeanType().getName());
+        out.newLine();
+
+        // Write out the column headers
+        final Field[] fields = getBeanType().getFields();
+        final int fieldCount = fields.length;
+
+        // Write out the column headers
+        for (int i=0; i<fieldCount; ++i) {
+            out.append(fields[i].getName());
+            if (i < fieldCount - 1) {
+                out.append(MetricsFile.SEPARATOR);
+            }
+            else {
+                out.newLine();
+            }
+        }
+
+        // Write out each of the data rows
+        for (final BEAN bean : this.metrics) {
+            for (int i=0; i<fieldCount; ++i) {
+                try {
+                    final Object value = fields[i].get(bean);
+                    out.append(StringUtil.assertCharactersNotInString(formatter.format(value), '\t', '\n'));
+
+                    if (i < fieldCount - 1) {
+                        out.append(MetricsFile.SEPARATOR);
+                    }
+                    else {
+                        out.newLine();
+                    }
+                }
+                catch (IllegalAccessException iae) {
+                    throw new SAMException("Could not read property " + fields[i].getName()
+                            + " from class of type " + bean.getClass());
+                }
+            }
+        }
+
+        out.flush();
+    }
+
+    /** Prints the histogram if one is present. */
+    private void printHistogram(final BufferedWriter out, final FormatUtil formatter) throws IOException {
+        final List<Histogram<HKEY>> nonEmptyHistograms = new ArrayList<Histogram<HKEY>>();
+        for (final Histogram<HKEY> histo : this.histograms) {
+            if (!histo.isEmpty()) nonEmptyHistograms.add(histo);
+        }
+
+        if (nonEmptyHistograms.isEmpty()) {
+            return;
+        }
+
+        // Build a combined key set.  Assume comparator is the same for all Histograms
+        final java.util.Set<HKEY> keys = new TreeSet<HKEY>(nonEmptyHistograms.get(0).comparator());
+        for (final Histogram<HKEY> histo : nonEmptyHistograms) {
+            if (histo != null) keys.addAll(histo.keySet());
+        }
+
+        // Add a header for the histogram key type
+        out.append(HISTO_HEADER + nonEmptyHistograms.get(0).keySet().iterator().next().getClass().getName());
+        out.newLine();
+
+        // Output a header row
+        out.append(StringUtil.assertCharactersNotInString(nonEmptyHistograms.get(0).getBinLabel(), '\t', '\n'));
+        for (final Histogram<HKEY> histo : nonEmptyHistograms) {
+            out.append(SEPARATOR);
+            out.append(StringUtil.assertCharactersNotInString(histo.getValueLabel(), '\t', '\n'));
+        }
+        out.newLine();
+
+        for (final HKEY key : keys) {
+            out.append(key.toString());
+
+            for (final Histogram<HKEY> histo : nonEmptyHistograms) {
+                final Histogram.Bin<HKEY> bin = histo.get(key);
+                final double value = (bin == null ? 0 : bin.getValue());
+
+                out.append(SEPARATOR);
+                out.append(formatter.format(value));
+            }
+
+            out.newLine();
+        }
+    }
+
+    /** Gets the type of the metrics bean being used. */
+    private Class<?> getBeanType() {
+        if (this.metrics.isEmpty()) {
+            return null;
+        } else {
+            return this.metrics.get(0).getClass();
+        }
+    }
+
+    /** Reads the Metrics in from the given reader. */
+    public void read(final Reader r) {
+        final BufferedReader in = new BufferedReader(r);
+        final FormatUtil formatter = new FormatUtil();
+        String line = null;
+
+        try {
+            // First read the headers
+            Header header = null;
+            while ((line = in.readLine()) != null) {
+                line = line.trim();
+                if ("".equals(line)) {
+                    // Do nothing! Nothing to be done!
+                }
+                else if (line.startsWith(METRIC_HEADER) || line.startsWith(HISTO_HEADER)) {
+                    // A line that starts with "## METRICS CLASS" heralds the start of the actual
+                    // data. Bounce our butts out of header parsing without reading the next line.
+                    // This isn't in the while loop's conditional because we want to trim() first.
+                    break;
+                }
+                else if (line.startsWith(MAJOR_HEADER_PREFIX)) {
+                    if (header != null) {
+                        throw new IllegalStateException("Consecutive header class lines encountered.");
+                    }
+                    
+                    final String className = line.substring(MAJOR_HEADER_PREFIX.length()).trim();
+                    try {
+                        header = (Header) loadClass(className, true).newInstance();
+                    }
+                    catch (final Exception e) {
+                        throw new SAMException("Error load and/or instantiating an instance of " + className, e);
+                    }
+                }
+                else if (line.startsWith(MINOR_HEADER_PREFIX)) {
+                    if (header == null) {
+                        throw new IllegalStateException("Header class must precede header value:" + line);
+                    }
+                    header.parse(line.substring(MINOR_HEADER_PREFIX.length()));
+                    this.headers.add(header);
+                    header = null;
+                }
+                else {
+                    throw new SAMException("Illegal state. Found following string in metrics file header: " + line);
+                }
+            }
+
+            // Read space between headers and metrics, if any
+            while (line != null && ! line.trim().startsWith(MAJOR_HEADER_PREFIX)) {
+                line = in.readLine();
+            }
+
+
+            if (line != null) {
+                line = line.trim();
+            
+                // Then read the metrics if there are any
+                if (line.startsWith(METRIC_HEADER)) {
+                    // Get the metric class from the header
+                    final String className = line.split(SEPARATOR)[1];
+                    Class<?> type = null;
+                    try {
+                        type = loadClass(className, true);
+                    }
+                    catch (final ClassNotFoundException cnfe) {
+                        throw new SAMException("Could not locate class with name " + className, cnfe);
+                    }
+
+                    // Read the next line with the column headers
+                    final String[] fieldNames = in.readLine().split(SEPARATOR);
+                    Collections.addAll(columnLabels, fieldNames);
+                    final Field[] fields = new Field[fieldNames.length];
+                    for (int i=0; i<fieldNames.length; ++i) {
+                        try {
+                            fields[i] = type.getField(fieldNames[i]);
+                        }
+                        catch (final Exception e) {
+                            throw new SAMException("Could not get field with name " + fieldNames[i] +
+                                " from class " + type.getName());
+                        }
+                    }
+
+                    // Now read the values
+                    while ((line = in.readLine()) != null) {
+                        if ("".equals(line.trim())) {
+                            break;
+                        }
+                        else {
+                            final String[] values = line.split(SEPARATOR, -1);
+                            BEAN bean = null;
+
+                            try { bean = (BEAN) type.newInstance(); }
+                            catch (final Exception e) { throw new SAMException("Error instantiating a " + type.getName(), e); }
+
+                            for (int i=0; i<fields.length; ++i) {
+                                Object value = null;
+                                if (values[i] != null && !values[i].isEmpty()) {
+                                    value = formatter.parseObject(values[i], fields[i].getType());
+                                }
+
+                                try { fields[i].set(bean, value); }
+                                catch (final Exception e) {
+                                    throw new SAMException("Error setting field " + fields[i].getName() +
+                                            " on class of type " + type.getName(), e);
+                                }
+                            }
+
+                            this.metrics.add(bean);
+                        }
+                    }
+                }
+            }
+
+            // Read away any blank lines between metrics and histograms
+            while (line != null && ! line.trim().startsWith(MAJOR_HEADER_PREFIX)) {
+                line = in.readLine();
+            }
+
+            // Then read the histograms if any are present
+            if (line != null) {
+                line = line.trim();
+
+                if (line.startsWith(HISTO_HEADER)) {
+                    // Get the key type of the histogram
+                    final String keyClassName = line.split(SEPARATOR)[1].trim();
+                    Class<?> keyClass = null;
+
+                    try { keyClass = loadClass(keyClassName, true); }
+                    catch (final ClassNotFoundException cnfe) { throw new SAMException("Could not load class with name " + keyClassName); }
+
+                    // Read the next line with the bin and value labels
+                    final String[] labels = in.readLine().split(SEPARATOR);
+                    for (int i=1; i<labels.length; ++i) {
+                        this.histograms.add(new Histogram<HKEY>(labels[0], labels[i]));
+                    }
+
+                    // Read the entries in the histograms
+                    while ((line = in.readLine()) != null && !"".equals(line)) {
+                        final String[] fields = line.trim().split(SEPARATOR);
+                        final HKEY key = (HKEY) formatter.parseObject(fields[0], keyClass);
+
+                        for (int i=1; i<fields.length; ++i) {
+                            final double value = formatter.parseDouble(fields[i]);
+                            this.histograms.get(i-1).increment(key, value);
+                        }
+                    }
+                }
+            }
+        }
+        catch (final IOException ioe) {
+            throw new SAMException("Could not read metrics from reader.", ioe);
+        }
+        finally{
+            CloserUtil.close(in);
+        }
+    }
+
+    /** Attempts to load a class, taking into account that some classes have "migrated" from the broad to sf. */
+    private Class<?> loadClass(final String className, final boolean tryOtherPackages) throws ClassNotFoundException {
+        // List of alternative packages to check in case classes moved around
+        final String[] packages = new String[] {
+                "edu.mit.broad.picard.genotype.concordance",
+                "edu.mit.broad.picard.genotype.fingerprint",
+                "edu.mit.broad.picard.ic",
+                "edu.mit.broad.picard.illumina",
+                "edu.mit.broad.picard.jumping",
+                "edu.mit.broad.picard.quality",
+                "edu.mit.broad.picard.samplevalidation",
+                "htsjdk.samtools.analysis",
+                "htsjdk.samtools.analysis.directed",
+                "htsjdk.samtools.sam",
+                "htsjdk.samtools.metrics",
+                "picard.sam",
+                "picard.metrics",
+                "picard.illumina",
+                "picard.analysis",
+                "picard.analysis.directed",
+                "picard.vcf"
+        };
+
+        try { return Class.forName(className); }
+        catch (ClassNotFoundException cnfe) {
+            if (tryOtherPackages) {
+                for (final String p : packages) {
+                    try {
+                        return loadClass(p + className.substring(className.lastIndexOf('.')), false);
+                    }
+                    catch (ClassNotFoundException cnf2) {/* do nothing */}
+                    // If it ws an inner class, try and see if it's a stand-alone class now
+                    if (className.indexOf('$') > -1) {
+                        try {
+                            return loadClass(p + "." + className.substring(className.lastIndexOf('$') + 1), false);
+                        }
+                        catch (ClassNotFoundException cnf2) {/* do nothing */}
+                    }
+                }
+            }
+
+            throw cnfe;
+        }
+    }
+
+    /** Checks that the headers, metrics and histogram are all equal. */
+    @Override
+    public boolean equals(final Object o) {
+        if (o == null) {
+            return false;
+        }
+        if (getClass() != o.getClass()) {
+            return false;
+        }
+        final MetricsFile that = (MetricsFile) o;
+
+        if (!areHeadersEqual(that)) {
+            return false;
+        }
+        if (!areMetricsEqual(that)) {
+            return false;
+        }
+        if (!areHistogramsEqual(that)) {
+            return false;
+        }
+
+        return true;
+    }
+
+    public boolean areHeadersEqual(final MetricsFile that) {
+        return this.headers.equals(that.headers);
+    }
+
+    public boolean areMetricsEqual(final MetricsFile that) {
+        return this.metrics.equals(that.metrics);
+    }
+
+    public boolean areHistogramsEqual(final MetricsFile that) {
+        return this.histograms.equals(that.histograms);
+    }
+
+    @Override
+    public int hashCode() {
+        int result = headers.hashCode();
+        result = 31 * result + metrics.hashCode();
+        return result;
+    }
+
+    /**
+     * Convenience method to read all the Metric beans from a metrics file.
+     * @param file to be read.
+     * @return list of beans from the file.
+     */
+    public static <T extends MetricBase> List<T> readBeans(final File file) {
+        final MetricsFile<T, ?> metricsFile = new MetricsFile<>();
+        final Reader in = IOUtil.openFileForBufferedReading(file);
+        metricsFile.read(in);
+        CloserUtil.close(in);
+        return metricsFile.getMetrics();
+    }
+
+    /**
+     * Method to read the header from a metrics file.
+     */
+    public static List<Header> readHeaders(final File file) {
+        try {
+            final MetricsFile<MetricBase, ?> metricsFile = new MetricsFile<>();
+            metricsFile.read(new FileReader(file));
+            return metricsFile.getHeaders();
+        } catch (FileNotFoundException e) {
+            throw new SAMException(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * Compare the metrics in two files, ignoring headers and histograms.
+     */
+    public static boolean areMetricsEqual(final File file1, final File file2) {
+        try {
+            final MetricsFile<MetricBase, ?> mf1 = new MetricsFile<>();
+            final MetricsFile<MetricBase, ?> mf2 = new MetricsFile<>();
+            mf1.read(new FileReader(file1));
+            mf2.read(new FileReader(file2));
+            return mf1.areMetricsEqual(mf2);
+        } catch (FileNotFoundException e) {
+            throw new SAMException(e.getMessage(), e);
+        }
+
+    }
+
+    /**
+     * Compare the metrics and histograms in two files, ignoring headers.
+     */
+    public static boolean areMetricsAndHistogramsEqual(final File file1, final File file2) {
+        try {
+            final MetricsFile<MetricBase, ?> mf1 = new MetricsFile<>();
+            final MetricsFile<MetricBase, ?> mf2 = new MetricsFile<>();
+            mf1.read(new FileReader(file1));
+            mf2.read(new FileReader(file2));
+
+            return mf1.areMetricsEqual(mf2) && mf1.areHistogramsEqual(mf2);
+
+        } catch (FileNotFoundException e) {
+            throw new SAMException(e.getMessage(), e);
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/metrics/StringHeader.java b/src/main/java/htsjdk/samtools/metrics/StringHeader.java
similarity index 100%
rename from src/java/htsjdk/samtools/metrics/StringHeader.java
rename to src/main/java/htsjdk/samtools/metrics/StringHeader.java
diff --git a/src/java/htsjdk/samtools/metrics/VersionHeader.java b/src/main/java/htsjdk/samtools/metrics/VersionHeader.java
similarity index 100%
rename from src/java/htsjdk/samtools/metrics/VersionHeader.java
rename to src/main/java/htsjdk/samtools/metrics/VersionHeader.java
diff --git a/src/java/htsjdk/samtools/reference/AbstractFastaSequenceFile.java b/src/main/java/htsjdk/samtools/reference/AbstractFastaSequenceFile.java
similarity index 100%
rename from src/java/htsjdk/samtools/reference/AbstractFastaSequenceFile.java
rename to src/main/java/htsjdk/samtools/reference/AbstractFastaSequenceFile.java
diff --git a/src/java/htsjdk/samtools/reference/FastaSequenceFile.java b/src/main/java/htsjdk/samtools/reference/FastaSequenceFile.java
similarity index 100%
rename from src/java/htsjdk/samtools/reference/FastaSequenceFile.java
rename to src/main/java/htsjdk/samtools/reference/FastaSequenceFile.java
diff --git a/src/java/htsjdk/samtools/reference/FastaSequenceIndex.java b/src/main/java/htsjdk/samtools/reference/FastaSequenceIndex.java
similarity index 100%
rename from src/java/htsjdk/samtools/reference/FastaSequenceIndex.java
rename to src/main/java/htsjdk/samtools/reference/FastaSequenceIndex.java
diff --git a/src/java/htsjdk/samtools/reference/IndexedFastaSequenceFile.java b/src/main/java/htsjdk/samtools/reference/IndexedFastaSequenceFile.java
similarity index 100%
rename from src/java/htsjdk/samtools/reference/IndexedFastaSequenceFile.java
rename to src/main/java/htsjdk/samtools/reference/IndexedFastaSequenceFile.java
diff --git a/src/java/htsjdk/samtools/reference/ReferenceSequence.java b/src/main/java/htsjdk/samtools/reference/ReferenceSequence.java
similarity index 100%
rename from src/java/htsjdk/samtools/reference/ReferenceSequence.java
rename to src/main/java/htsjdk/samtools/reference/ReferenceSequence.java
diff --git a/src/java/htsjdk/samtools/reference/ReferenceSequenceFile.java b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFile.java
similarity index 100%
rename from src/java/htsjdk/samtools/reference/ReferenceSequenceFile.java
rename to src/main/java/htsjdk/samtools/reference/ReferenceSequenceFile.java
diff --git a/src/java/htsjdk/samtools/reference/ReferenceSequenceFileFactory.java b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/reference/ReferenceSequenceFileFactory.java
rename to src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileFactory.java
diff --git a/src/java/htsjdk/samtools/reference/ReferenceSequenceFileWalker.java b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileWalker.java
similarity index 100%
rename from src/java/htsjdk/samtools/reference/ReferenceSequenceFileWalker.java
rename to src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileWalker.java
diff --git a/src/java/htsjdk/samtools/seekablestream/ByteArraySeekableStream.java b/src/main/java/htsjdk/samtools/seekablestream/ByteArraySeekableStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/ByteArraySeekableStream.java
rename to src/main/java/htsjdk/samtools/seekablestream/ByteArraySeekableStream.java
diff --git a/src/java/htsjdk/samtools/seekablestream/ISeekableStreamFactory.java b/src/main/java/htsjdk/samtools/seekablestream/ISeekableStreamFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/ISeekableStreamFactory.java
rename to src/main/java/htsjdk/samtools/seekablestream/ISeekableStreamFactory.java
diff --git a/src/java/htsjdk/samtools/seekablestream/SeekableBufferedStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableBufferedStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/SeekableBufferedStream.java
rename to src/main/java/htsjdk/samtools/seekablestream/SeekableBufferedStream.java
diff --git a/src/java/htsjdk/samtools/seekablestream/SeekableFTPStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableFTPStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/SeekableFTPStream.java
rename to src/main/java/htsjdk/samtools/seekablestream/SeekableFTPStream.java
diff --git a/src/java/htsjdk/samtools/seekablestream/SeekableFTPStreamHelper.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableFTPStreamHelper.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/SeekableFTPStreamHelper.java
rename to src/main/java/htsjdk/samtools/seekablestream/SeekableFTPStreamHelper.java
diff --git a/src/java/htsjdk/samtools/seekablestream/SeekableFileStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableFileStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/SeekableFileStream.java
rename to src/main/java/htsjdk/samtools/seekablestream/SeekableFileStream.java
diff --git a/src/java/htsjdk/samtools/seekablestream/SeekableHTTPStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableHTTPStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/SeekableHTTPStream.java
rename to src/main/java/htsjdk/samtools/seekablestream/SeekableHTTPStream.java
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekableMemoryStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableMemoryStream.java
new file mode 100644
index 0000000..37a5ab4
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekableMemoryStream.java
@@ -0,0 +1,64 @@
+package htsjdk.samtools.seekablestream;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+public class SeekableMemoryStream extends SeekableStream {
+    private final ByteBuffer buf;
+    private final String source;
+
+    public SeekableMemoryStream(final byte[] data, final String source) {
+        this.buf = ByteBuffer.wrap(data);
+        this.source = source;
+    }
+
+    @Override
+    public void close() throws IOException {
+        buf.clear();
+    }
+
+    @Override
+    public boolean eof() throws IOException {
+        return buf.position() == buf.limit();
+    }
+
+    @Override
+    public String getSource() {
+        return source;
+    }
+
+    @Override
+    public long length() {
+        return buf.array().length - buf.arrayOffset();
+    }
+
+    @Override
+    public int read(final byte[] buffer, final int offset, final int length) throws IOException {
+        int availableLength = Math.min(length, buf.remaining());
+        if (availableLength < 1) {
+            return -1;
+        }
+        buf.get(buffer, offset, availableLength);
+        return availableLength;
+    }
+
+    @Override
+    public void seek(final long position) throws IOException {
+        buf.position((int) position);
+    }
+
+    @Override
+    public int read() throws IOException {
+        if (buf.position() < buf.limit()) {
+            return buf.get() & 0xFF;
+        } else {
+            return -1;
+        }
+    }
+
+    @Override
+    public long position() throws IOException {
+        return buf.position();
+    }
+
+}
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekablePathStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekablePathStream.java
new file mode 100644
index 0000000..18a41e7
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekablePathStream.java
@@ -0,0 +1,121 @@
+package htsjdk.samtools.seekablestream;
+
+import htsjdk.samtools.util.Log;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+
+/**
+ * An implementation of {@link SeekableStream} for {@link Path}.
+ */
+public class SeekablePathStream extends SeekableStream {
+
+    private final static Log LOG = Log.getInstance(SeekablePathStream.class);
+
+    /**
+     * Collection of all open instances.  SeekablePathStream objects are usually open and kept open for the
+     * duration of a session.  This collection supports a method to close them all.
+     */
+    private static final Collection<SeekablePathStream> ALL_INSTANCES =
+            Collections.synchronizedCollection(new HashSet<SeekablePathStream>());
+
+    private final Path path;
+    private final SeekableByteChannel sbc;
+    private final ByteBuffer oneByteBuf = ByteBuffer.allocate(1);
+
+    public SeekablePathStream(final Path path) throws IOException {
+        this.path = path;
+        this.sbc = Files.newByteChannel(path);
+        ALL_INSTANCES.add(this);
+    }
+
+    @Override
+    public long length() {
+        try {
+            return sbc.size();
+        } catch (IOException e) {
+            LOG.error("Cannot find length of path: " + path, e);
+            return 0; // consistent with java.io.File
+        }
+    }
+
+    @Override
+    public boolean eof() throws IOException {
+        return length() == position();
+    }
+
+    @Override
+    public void seek(final long position) throws IOException {
+        sbc.position(position);
+    }
+
+    @Override
+    public long position() throws IOException {
+        return sbc.position();
+    }
+
+    @Override
+    public long skip(long n) throws IOException {
+        long initPos = position();
+        sbc.position(initPos + n);
+        return position() - initPos;
+    }
+
+    @Override
+    public int read(final byte[] buffer, final int offset, final int length) throws IOException {
+        if (length < 0) {
+            throw new IndexOutOfBoundsException();
+        }
+        ByteBuffer buf = ByteBuffer.wrap(buffer, offset, length);
+        int n = 0;
+        while (n < length) {
+            final int count = sbc.read(buf);
+            if (count < 0) {
+              if (n > 0) {
+                return n;
+              } else {
+                return count;
+              }
+            }
+            n += count;
+        }
+        return n;
+    }
+
+    @Override
+    public int read() throws IOException {
+        oneByteBuf.clear();
+        int n = sbc.read(oneByteBuf);
+        return n == 1 ? oneByteBuf.array()[0] & 0xff : n;
+    }
+
+    @Override
+    public String getSource() {
+        return path.toAbsolutePath().toString();
+    }
+
+
+    @Override
+    public void close() throws IOException {
+        ALL_INSTANCES.remove(this);
+        sbc.close();
+    }
+
+    public static synchronized void closeAllInstances() {
+        Collection<SeekablePathStream> clonedInstances = new HashSet<SeekablePathStream>();
+        clonedInstances.addAll(ALL_INSTANCES);
+        for (SeekablePathStream sfs : clonedInstances) {
+            try {
+                sfs.close();
+            } catch (IOException e) {
+                LOG.error("Error closing SeekablePathStream", e);
+            }
+        }
+        ALL_INSTANCES.clear();
+    }
+}
diff --git a/src/java/htsjdk/samtools/seekablestream/SeekableStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/SeekableStream.java
rename to src/main/java/htsjdk/samtools/seekablestream/SeekableStream.java
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java
new file mode 100644
index 0000000..fe8f42a
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java
@@ -0,0 +1,97 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2013 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.seekablestream;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+
+/**
+ * Singleton class for getting {@link SeekableStream}s from URL/paths
+ * Applications using this library can set their own factory
+ * @author jrobinso
+ * @date Nov 30, 2009
+ */
+public class SeekableStreamFactory{
+
+    private static final ISeekableStreamFactory DEFAULT_FACTORY;
+    private static ISeekableStreamFactory currentFactory;
+
+    static{
+        DEFAULT_FACTORY = new DefaultSeekableStreamFactory();
+        currentFactory = DEFAULT_FACTORY;
+    }
+
+    private SeekableStreamFactory(){}
+
+    public static void setInstance(final ISeekableStreamFactory factory){
+        currentFactory = factory;
+    }
+
+    public static ISeekableStreamFactory getInstance(){
+        return currentFactory;
+    }
+
+    /**
+     * Does this path point to a regular file on disk and not something like a URL?
+     * @param path the path to test
+     * @return true if the path is to a file on disk
+     */
+    public static boolean isFilePath(final String path) {
+        return ! ( path.startsWith("http:") || path.startsWith("https:") || path.startsWith("ftp:") );
+    }
+
+    private static class DefaultSeekableStreamFactory implements ISeekableStreamFactory {
+
+        public SeekableStream getStreamFor(final URL url) throws IOException {
+            return getStreamFor(url.toExternalForm());
+        }
+
+        public SeekableStream getStreamFor(final String path) throws IOException {
+            // todo -- add support for SeekableBlockInputStream
+
+            if (path.startsWith("http:") || path.startsWith("https:")) {
+                final URL url = new URL(path);
+                return new SeekableHTTPStream(url);
+            } else if (path.startsWith("ftp:")) {
+                return new SeekableFTPStream(new URL(path));
+            } else if (path.startsWith("file:")) {
+                return new SeekableFileStream(new File(new URL(path).getPath()));
+            } else {
+                return new SeekableFileStream(new File(path));
+            }
+        }
+
+        public SeekableStream getBufferedStream(SeekableStream stream){
+            return getBufferedStream(stream, SeekableBufferedStream.DEFAULT_BUFFER_SIZE);
+        }
+
+        public SeekableStream getBufferedStream(SeekableStream stream, int bufferSize){
+            if (bufferSize == 0) return stream;
+            else return new SeekableBufferedStream(stream, bufferSize);
+        }
+
+    }
+
+}
diff --git a/src/java/htsjdk/samtools/seekablestream/UserPasswordInput.java b/src/main/java/htsjdk/samtools/seekablestream/UserPasswordInput.java
similarity index 100%
rename from src/java/htsjdk/samtools/seekablestream/UserPasswordInput.java
rename to src/main/java/htsjdk/samtools/seekablestream/UserPasswordInput.java
diff --git a/src/main/java/htsjdk/samtools/sra/ReferenceCache.java b/src/main/java/htsjdk/samtools/sra/ReferenceCache.java
new file mode 100644
index 0000000..9eb389a
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/sra/ReferenceCache.java
@@ -0,0 +1,46 @@
+package htsjdk.samtools.sra;
+
+import htsjdk.samtools.SAMFileHeader;
+import ngs.ErrorMsg;
+import ngs.ReadCollection;
+import ngs.Reference;
+
+
+/**
+ * That is a thread-safe wrapper for a list of cache Reference objects.
+ * Those objects can be used from different threads without issues, however to load and save a Reference object, we
+ * need to acquire a lock.
+ *
+ * Created by andrii.nikitiuk on 10/28/15.
+ */
+public class ReferenceCache {
+    private ReadCollection run;
+    private SAMFileHeader virtualHeader;
+    private Reference cachedReference;
+
+    public ReferenceCache(ReadCollection run, SAMFileHeader virtualHeader) {
+        this.run = run;
+        this.virtualHeader = virtualHeader;
+    }
+
+    /**
+     * This method returns Reference objects by reference indexes in SAM header
+     * Those objects do not maintain thread safety
+     *
+     * @param referenceIndex reference index in
+     * @return a Reference object
+     */
+    public Reference get(int referenceIndex) {
+        String contig = virtualHeader.getSequence(referenceIndex).getSequenceName();
+
+        try {
+            if (cachedReference == null || !cachedReference.getCanonicalName().equals(contig)) {
+                cachedReference = run.getReference(contig);
+            }
+        } catch (ErrorMsg e) {
+            throw new RuntimeException(e);
+        }
+
+        return cachedReference;
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/sra/SRAAccession.java b/src/main/java/htsjdk/samtools/sra/SRAAccession.java
new file mode 100644
index 0000000..aadfdc3
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/sra/SRAAccession.java
@@ -0,0 +1,174 @@
+/*===========================================================================
+*
+*                            PUBLIC DOMAIN NOTICE
+*               National Center for Biotechnology Information
+*
+*  This software/database is a "United States Government Work" under the
+*  terms of the United States Copyright Act.  It was written as part of
+*  the author's official duties as a United States Government employee and
+*  thus cannot be copyrighted.  This software/database is freely available
+*  to the public for use. The National Library of Medicine and the U.S.
+*  Government have not placed any restriction on its use or reproduction.
+*
+*  Although all reasonable efforts have been taken to ensure the accuracy
+*  and reliability of the software and data, the NLM and the U.S.
+*  Government do not and cannot warrant the performance or results that
+*  may be obtained by using this software or data. The NLM and the U.S.
+*  Government disclaim all warranties, express or implied, including
+*  warranties of performance, merchantability or fitness for any particular
+*  purpose.
+*
+*  Please cite the author in any work or product based on this material.
+*
+* ===========================================================================
+*
+*/
+
+package htsjdk.samtools.sra;
+
+import gov.nih.nlm.ncbi.ngs.error.LibraryLoadError;
+import htsjdk.samtools.Defaults;
+import htsjdk.samtools.util.Log;
+import gov.nih.nlm.ncbi.ngs.NGS;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Serializable;
+import java.util.Arrays;
+
+/**
+ * Describes a single SRA accession for SRA read collection
+ * Also provides app string functionality and allows to check if working SRA is supported on the running platform
+ *
+ * Important: due to checks performed in SRAAccession.isValid(), we won't recognise any accessions other
+ * than ones that follow the pattern "^[SED]RR[0-9]{6,9}$", e.g. SRR000123
+ */
+public class SRAAccession implements Serializable {
+    private static final Log log = Log.getInstance(SRAAccession.class);
+
+    private static boolean noLibraryDownload;
+    private static boolean initTried = false;
+    private static String appVersionString = null;
+    private final static String defaultAppVersionString = "[unknown software]";
+    private final static String htsJdkVersionString = "HTSJDK-NGS";
+
+    private String acc;
+
+    static {
+        noLibraryDownload = !Defaults.SRA_LIBRARIES_DOWNLOAD;
+        if (noLibraryDownload) {
+            System.setProperty("vdb.System.noLibraryDownload", "1");
+        }
+    }
+
+    /**
+     * Sets an app version string which will let SRA know which software uses it.
+     * @param appVersionString a string that describes running application
+     */
+    public static void setAppVersionString(String appVersionString) {
+        SRAAccession.appVersionString = appVersionString;
+    }
+
+    /**
+     * @deprecated
+     * @return true if SRA successfully loaded native libraries and fully initialized,
+     * false otherwise
+     */
+    public static boolean isSupported() {
+        return checkIfInitialized() == null;
+    }
+
+    /**
+     * Tries to initialize SRA. Initialization error is saved during first call,
+     * all subsequent calls will return the same saved error or null.
+     *
+     * @return ExceptionInInitializerError if initialization failed, null if initialization was successful
+     */
+    public static ExceptionInInitializerError checkIfInitialized() {
+        final ExceptionInInitializerError ngsInitError;
+        if (!initTried) {
+            log.debug("Initializing SRA module");
+            ngsInitError = NGS.getInitializationError();
+            if (ngsInitError != null) {
+                log.info("SRA initialization failed. Will not be able to read from SRA");
+            } else {
+                NGS.setAppVersionString(getFullVersionString());
+            }
+            initTried = true;
+        } else {
+            ngsInitError = NGS.getInitializationError();
+        }
+        return ngsInitError;
+    }
+
+    /**
+     * @param acc accession
+     * @return true if a string is a valid SRA accession
+     */
+    public static boolean isValid(String acc) {
+        boolean looksLikeSRA = false;
+        File f = new File(acc);
+        if (f.isFile()) {
+            byte[] buffer = new byte[8];
+            byte[] signature1 = "NCBI.sra".getBytes();
+            byte[] signature2 = "NCBInenc".getBytes();
+
+            try (InputStream is = new FileInputStream(f)) {
+                int numRead = is.read(buffer);
+
+                looksLikeSRA = numRead == buffer.length &&
+                        (Arrays.equals(buffer, signature1) || Arrays.equals(buffer, signature2));
+            } catch (IOException e) {
+                looksLikeSRA = false;
+            }
+        } else if (f.exists()) {
+            // anything else local other than a file is not an SRA archive
+            looksLikeSRA = false;
+        } else {
+            looksLikeSRA = acc.toUpperCase().matches ( "^[SED]RR[0-9]{6,9}$" );
+        }
+
+        if (!looksLikeSRA) return false;
+
+        final ExceptionInInitializerError initError = checkIfInitialized();
+        if (initError != null) {
+            if (noLibraryDownload && initError instanceof LibraryLoadError) {
+                throw new LinkageError(
+                        "Failed to load SRA native libraries and auto-download is disabled. " +
+                        "Please re-run with JVM argument -Dsamjdk.sra_libraries_download=true to enable auto-download of native libraries",
+                        initError
+                );
+            } else {
+                throw initError;
+            }
+        }
+
+        return NGS.isValid(acc);
+    }
+
+    /**
+     * @param acc accession
+     */
+    public SRAAccession(String acc) {
+        this.acc = acc;
+    }
+
+    public String toString() {
+        return acc;
+    }
+
+    /**
+     * @return true if contained string is an SRA accession
+     */
+    public boolean isValid() {
+        return SRAAccession.isValid(acc);
+    }
+
+    private static String getFullVersionString() {
+        String versionString = appVersionString == null ? defaultAppVersionString : appVersionString;
+        versionString += " through " + htsJdkVersionString;
+        return versionString;
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/sra/SRAAlignmentIterator.java b/src/main/java/htsjdk/samtools/sra/SRAAlignmentIterator.java
new file mode 100644
index 0000000..650ddce
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/sra/SRAAlignmentIterator.java
@@ -0,0 +1,209 @@
+/*===========================================================================
+*
+*                            PUBLIC DOMAIN NOTICE
+*               National Center for Biotechnology Information
+*
+*  This software/database is a "United States Government Work" under the
+*  terms of the United States Copyright Act.  It was written as part of
+*  the author's official duties as a United States Government employee and
+*  thus cannot be copyrighted.  This software/database is freely available
+*  to the public for use. The National Library of Medicine and the U.S.
+*  Government have not placed any restriction on its use or reproduction.
+*
+*  Although all reasonable efforts have been taken to ensure the accuracy
+*  and reliability of the software and data, the NLM and the U.S.
+*  Government do not and cannot warrant the performance or results that
+*  may be obtained by using this software or data. The NLM and the U.S.
+*  Government disclaim all warranties, express or implied, including
+*  warranties of performance, merchantability or fitness for any particular
+*  purpose.
+*
+*  Please cite the author in any work or product based on this material.
+*
+* ===========================================================================
+*
+*/
+
+package htsjdk.samtools.sra;
+
+
+import htsjdk.samtools.Chunk;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SRAIterator;
+import htsjdk.samtools.ValidationStringency;
+import htsjdk.samtools.util.CloseableIterator;
+import ngs.Alignment;
+import ngs.AlignmentIterator;
+import ngs.ErrorMsg;
+import ngs.ReadCollection;
+import ngs.Reference;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+
+
+/**
+ * Iterator for aligned reads.
+ * Is used from SRAIterator.
+ * Created by andrii.nikitiuk on 9/3/15.
+ */
+public class SRAAlignmentIterator implements CloseableIterator<SAMRecord> {
+    private ValidationStringency validationStringency;
+
+    private SRAAccession accession;
+    private ReadCollection run;
+    private SAMFileHeader header;
+    private ReferenceCache cachedReferences;
+    private List<Long> referencesLengths;
+    private Iterator<Chunk> referencesChunksIterator;
+    private int currentReference = -1;
+
+    private boolean hasMoreReferences = true;
+
+    private AlignmentIterator alignedIterator;
+    private Boolean hasMoreAlignments = false;
+
+    private SRALazyRecord lastRecord;
+
+    /**
+     * @param run opened read collection
+     * @param header sam header
+     * @param cachedReferences list of cached references shared among all iterators from a single SRAFileReader
+     * @param recordRangeInfo info about record ranges withing SRA archive
+     * @param chunk used to determine which alignments the iterator should return
+     */
+    public SRAAlignmentIterator(SRAAccession accession, final ReadCollection run, final SAMFileHeader header, ReferenceCache cachedReferences,
+                                final SRAIterator.RecordRangeInfo recordRangeInfo, final Chunk chunk) {
+        this.accession = accession;
+        this.run = run;
+        this.header = header;
+        this.cachedReferences = cachedReferences;
+        this.referencesLengths = recordRangeInfo.getReferenceLengthsAligned();
+
+        referencesChunksIterator = getReferenceChunks(chunk).iterator();
+
+        try {
+            nextReference();
+        } catch (final Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Override
+    public boolean hasNext() {
+        // check aligned
+        if (lastRecord != null) {
+            lastRecord.detachFromIterator();
+            lastRecord = null;
+        }
+
+        if (hasMoreAlignments == null) {
+            try {
+                hasMoreAlignments = alignedIterator.nextAlignment();
+            } catch (ErrorMsg e) {
+                throw new RuntimeException(e);
+            }
+        }
+        while (!hasMoreAlignments && hasMoreReferences) {
+            nextReference();
+        }
+
+        return hasMoreAlignments;
+    }
+
+    @Override
+    public SAMRecord next() {
+        if (!hasNext()) {
+            throw new NoSuchElementException("No more alignments are available");
+        }
+
+        return nextAlignment();
+    }
+
+    @Override
+    public void remove() {
+        throw new UnsupportedOperationException("Removal of records not implemented.");
+    }
+
+    public void setValidationStringency(ValidationStringency validationStringency) {
+        this.validationStringency = validationStringency;
+    }
+
+    private SAMRecord nextAlignment() {
+        try {
+            lastRecord = new SRALazyRecord(header, accession, run, alignedIterator, alignedIterator.getReadId(), alignedIterator.getAlignmentId());
+        } catch (ErrorMsg e) {
+            throw new RuntimeException(e);
+        }
+        if (validationStringency != null) {
+            lastRecord.setValidationStringency(validationStringency);
+        }
+
+        hasMoreAlignments = null;
+
+        return lastRecord;
+    }
+
+    private void nextReference() {
+        if (!hasMoreReferences) {
+            throw new NoSuchElementException("Cannot get next reference - already at last one");
+        }
+
+        try {
+            alignedIterator = null;
+            hasMoreAlignments = false;
+
+            hasMoreReferences = referencesChunksIterator.hasNext();
+            if (!hasMoreReferences) {
+                return;
+            }
+
+            currentReference++;
+            Chunk refChunk = referencesChunksIterator.next();
+            if (refChunk == null) {
+                return;
+            }
+
+            Reference reference = cachedReferences.get(currentReference);
+
+            alignedIterator = reference.getFilteredAlignmentSlice(
+                    refChunk.getChunkStart(), refChunk.getChunkEnd() - refChunk.getChunkStart(),
+                    Alignment.all, Alignment.startWithinSlice | Alignment.passDuplicates | Alignment.passFailed, 0);
+
+            hasMoreAlignments = alignedIterator.nextAlignment();
+        } catch (ErrorMsg e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private List<Chunk> getReferenceChunks(final Chunk chunk) {
+        List<Chunk> referencesChunks = new ArrayList<Chunk>();
+        long refOffset = 0;
+        for (Long refLen : referencesLengths) {
+            if (chunk.getChunkStart() - refOffset >= refLen || chunk.getChunkEnd() - refOffset <= 0) {
+                referencesChunks.add(null);
+            } else {
+                long refChunkStart = Math.max(chunk.getChunkStart() - refOffset, 0);
+                long refChunkEnd = Math.min(chunk.getChunkEnd() - refOffset, refLen);
+                referencesChunks.add(new Chunk(refChunkStart, refChunkEnd));
+            }
+
+            refOffset += refLen;
+        }
+
+        return referencesChunks;
+    }
+
+    @Override
+    public void close() {
+        if (lastRecord != null) {
+            lastRecord.detachFromIterator();
+            lastRecord = null;
+        }
+
+        alignedIterator = null;
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/sra/SRAIndexedSequenceFile.java b/src/main/java/htsjdk/samtools/sra/SRAIndexedSequenceFile.java
new file mode 100644
index 0000000..1d9a471
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/sra/SRAIndexedSequenceFile.java
@@ -0,0 +1,112 @@
+package htsjdk.samtools.sra;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import ngs.ErrorMsg;
+import ngs.ReadCollection;
+import ngs.Reference;
+import ngs.ReferenceIterator;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * Allows reading Reference data from SRA
+ */
+public class SRAIndexedSequenceFile implements ReferenceSequenceFile {
+    private SRAAccession acc;
+    private ReadCollection run;
+    private Reference cachedReference;
+
+    private Iterator<SAMSequenceRecord> sequenceRecordIterator;
+
+    protected SAMSequenceDictionary sequenceDictionary;
+
+    /**
+     * @param acc accession
+     */
+    public SRAIndexedSequenceFile(SRAAccession acc) {
+        this.acc = acc;
+
+        if (!acc.isValid()) {
+            throw new RuntimeException("Passed an invalid SRA accession into SRA reader: " + acc);
+        }
+
+        try {
+            run = gov.nih.nlm.ncbi.ngs.NGS.openReadCollection(acc.toString());
+            sequenceDictionary = loadSequenceDictionary();
+        } catch (final ErrorMsg e) {
+            throw new RuntimeException(e);
+        }
+
+        reset();
+    }
+
+    @Override
+    public SAMSequenceDictionary getSequenceDictionary() {
+        return sequenceDictionary;
+    }
+
+    @Override
+    public ReferenceSequence nextSequence() {
+        SAMSequenceRecord sequence = sequenceRecordIterator.next();
+        return getSubsequenceAt(sequence.getSequenceName(), 1L, sequence.getSequenceLength());
+    }
+
+    @Override
+    public void reset() {
+        sequenceRecordIterator = sequenceDictionary.getSequences().iterator();
+    }
+
+    @Override
+    public boolean isIndexed() {
+        return true;
+    }
+
+    @Override
+    public ReferenceSequence getSequence(String contig) {
+        return getSubsequenceAt(contig, 1L, sequenceDictionary.getSequence(contig).getSequenceLength());
+    }
+
+    @Override
+    public ReferenceSequence getSubsequenceAt(String contig, long start, long stop) {
+        SAMSequenceRecord sequence = sequenceDictionary.getSequence(contig);
+        int referenceIndex = sequence.getSequenceIndex();
+
+        byte[] bases;
+
+        try {
+            Reference reference;
+            synchronized (this) {
+                if (cachedReference == null || !cachedReference.getCanonicalName().equals(contig)) {
+                    cachedReference = run.getReference(contig);
+                }
+                reference = cachedReference;
+
+                bases = reference.getReferenceBases(start - 1, stop - (start - 1)).getBytes();
+            }
+        } catch (ErrorMsg e) {
+            throw new RuntimeException(e);
+        }
+
+        return new ReferenceSequence(contig, referenceIndex, bases);
+    }
+
+    @Override
+    public void close() throws IOException {
+        cachedReference = null;
+    }
+
+    protected SAMSequenceDictionary loadSequenceDictionary() throws ErrorMsg {
+        SAMSequenceDictionary dict = new SAMSequenceDictionary();
+
+        ReferenceIterator itRef = run.getReferences();
+        while (itRef.nextReference()) {
+            dict.addSequence(new SAMSequenceRecord(itRef.getCanonicalName(), (int) itRef.getLength()));
+        }
+
+        return dict;
+    }
+}
\ No newline at end of file
diff --git a/src/java/htsjdk/samtools/sra/SRALazyRecord.java b/src/main/java/htsjdk/samtools/sra/SRALazyRecord.java
similarity index 100%
rename from src/java/htsjdk/samtools/sra/SRALazyRecord.java
rename to src/main/java/htsjdk/samtools/sra/SRALazyRecord.java
diff --git a/src/java/htsjdk/samtools/sra/SRAUnalignmentIterator.java b/src/main/java/htsjdk/samtools/sra/SRAUnalignmentIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/sra/SRAUnalignmentIterator.java
rename to src/main/java/htsjdk/samtools/sra/SRAUnalignmentIterator.java
diff --git a/src/java/htsjdk/samtools/sra/SRAUtils.java b/src/main/java/htsjdk/samtools/sra/SRAUtils.java
similarity index 100%
rename from src/java/htsjdk/samtools/sra/SRAUtils.java
rename to src/main/java/htsjdk/samtools/sra/SRAUtils.java
diff --git a/src/java/htsjdk/samtools/util/AbstractAsyncWriter.java b/src/main/java/htsjdk/samtools/util/AbstractAsyncWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/AbstractAsyncWriter.java
rename to src/main/java/htsjdk/samtools/util/AbstractAsyncWriter.java
diff --git a/src/java/htsjdk/samtools/util/AbstractIterator.java b/src/main/java/htsjdk/samtools/util/AbstractIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/AbstractIterator.java
rename to src/main/java/htsjdk/samtools/util/AbstractIterator.java
diff --git a/src/java/htsjdk/samtools/util/AbstractProgressLogger.java b/src/main/java/htsjdk/samtools/util/AbstractProgressLogger.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/AbstractProgressLogger.java
rename to src/main/java/htsjdk/samtools/util/AbstractProgressLogger.java
diff --git a/src/java/htsjdk/samtools/util/AsciiWriter.java b/src/main/java/htsjdk/samtools/util/AsciiWriter.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/AsciiWriter.java
rename to src/main/java/htsjdk/samtools/util/AsciiWriter.java
diff --git a/src/java/htsjdk/samtools/util/AsyncBufferedIterator.java b/src/main/java/htsjdk/samtools/util/AsyncBufferedIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/AsyncBufferedIterator.java
rename to src/main/java/htsjdk/samtools/util/AsyncBufferedIterator.java
diff --git a/src/java/htsjdk/samtools/util/BinaryCodec.java b/src/main/java/htsjdk/samtools/util/BinaryCodec.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/BinaryCodec.java
rename to src/main/java/htsjdk/samtools/util/BinaryCodec.java
diff --git a/src/java/htsjdk/samtools/util/BlockCompressedFilePointerUtil.java b/src/main/java/htsjdk/samtools/util/BlockCompressedFilePointerUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/BlockCompressedFilePointerUtil.java
rename to src/main/java/htsjdk/samtools/util/BlockCompressedFilePointerUtil.java
diff --git a/src/java/htsjdk/samtools/util/BlockCompressedInputStream.java b/src/main/java/htsjdk/samtools/util/BlockCompressedInputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/BlockCompressedInputStream.java
rename to src/main/java/htsjdk/samtools/util/BlockCompressedInputStream.java
diff --git a/src/main/java/htsjdk/samtools/util/BlockCompressedOutputStream.java b/src/main/java/htsjdk/samtools/util/BlockCompressedOutputStream.java
new file mode 100644
index 0000000..408282f
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/util/BlockCompressedOutputStream.java
@@ -0,0 +1,392 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import htsjdk.samtools.util.zip.DeflaterFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.zip.CRC32;
+import java.util.zip.Deflater;
+
+/**
+ * Writer for a file that is a series of gzip blocks (BGZF format).  The caller just treats it as an
+ * OutputStream, and under the covers a gzip block is written when the amount of uncompressed as-yet-unwritten
+ * bytes reaches a threshold.
+ *
+ * The advantage of BGZF over conventional gzip is that BGZF allows for seeking without having to scan through
+ * the entire file up to the position being sought.
+ *
+ * Note that the flush() method should not be called by client
+ * unless you know what you're doing, because it forces a gzip block to be written even if the
+ * number of buffered bytes has not reached threshold.  close(), on the other hand, must be called
+ * when done writing in order to force the last gzip block to be written.
+ *
+ * c.f. http://samtools.sourceforge.net/SAM1.pdf for details of BGZF file format.
+ */
+public class BlockCompressedOutputStream
+        extends OutputStream
+        implements LocationAware
+{
+
+    private static final Log log = Log.getInstance(BlockCompressedOutputStream.class);
+
+    private static int defaultCompressionLevel = BlockCompressedStreamConstants.DEFAULT_COMPRESSION_LEVEL;
+    private static DeflaterFactory defaultDeflaterFactory = new DeflaterFactory();
+
+    /**
+     * Sets the GZip compression level for subsequent BlockCompressedOutputStream object creation
+     * that do not specify the compression level.
+     * @param compressionLevel 1 <= compressionLevel <= 9
+     */
+    public static void setDefaultCompressionLevel(final int compressionLevel) {
+        if (compressionLevel < Deflater.NO_COMPRESSION || compressionLevel > Deflater.BEST_COMPRESSION) {
+            throw new IllegalArgumentException("Invalid compression level: " + compressionLevel);
+        }
+        defaultCompressionLevel = compressionLevel;
+    }
+
+    public static int getDefaultCompressionLevel() {
+        return defaultCompressionLevel;
+    }
+
+    /**
+     * Sets the default {@link DeflaterFactory} that will be used for all instances unless specified otherwise in the constructor.
+     * If this method is not called the default is a factory that will create the JDK {@link Deflater}.
+     * @param deflaterFactory non-null default factory.
+     */
+    public static void setDefaultDeflaterFactory(final DeflaterFactory deflaterFactory) {
+        if (deflaterFactory == null) {
+            throw new IllegalArgumentException("null deflaterFactory");
+        }
+        defaultDeflaterFactory = deflaterFactory;
+    }
+
+    public static DeflaterFactory getDefaultDeflaterFactory() {
+        return defaultDeflaterFactory;
+    }
+
+    private final BinaryCodec codec;
+    private final byte[] uncompressedBuffer = new byte[BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE];
+    private int numUncompressedBytes = 0;
+    private final byte[] compressedBuffer =
+            new byte[BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE -
+                    BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH];
+    private final Deflater deflater;
+
+    // A second deflater is created for the very unlikely case where the regular deflation actually makes
+    // things bigger, and the compressed block is too big.  It should be possible to downshift the
+    // primary deflater to NO_COMPRESSION level, recompress, and then restore it to its original setting,
+    // but in practice that doesn't work.
+    // The motivation for deflating at NO_COMPRESSION level is that it will predictably produce compressed
+    // output that is 10 bytes larger than the input, and the threshold at which a block is generated is such that
+    // the size of tbe final gzip block will always be <= 64K.  This is preferred over the previous method,
+    // which would attempt to compress up to 64K bytes, and if the resulting compressed block was too large,
+    // try compressing fewer input bytes (aka "downshifting').  The problem with downshifting is that
+    // getFilePointer might return an inaccurate value.
+    // I assume (AW 29-Oct-2013) that there is no value in using hardware-assisted deflater for no-compression mode,
+    // so just use JDK standard.
+    private final Deflater noCompressionDeflater = new Deflater(Deflater.NO_COMPRESSION, true);
+    private final CRC32 crc32 = new CRC32();
+    private File file = null;
+    private long mBlockAddress = 0;
+
+
+    // Really a local variable, but allocate once to reduce GC burden.
+    private final byte[] singleByteArray = new byte[1];
+
+    /**
+     * Uses default compression level, which is 5 unless changed by setCompressionLevel
+     * Note: this constructor uses the default {@link DeflaterFactory}, see {@link #getDefaultDeflaterFactory()}.
+     * Use {@link #BlockCompressedOutputStream(File, int, DeflaterFactory)} to specify a custom factory.
+     */
+    public BlockCompressedOutputStream(final String filename) {
+        this(filename, defaultCompressionLevel);
+    }
+
+    /**
+     * Uses default compression level, which is 5 unless changed by setCompressionLevel
+     * Note: this constructor uses the default {@link DeflaterFactory}, see {@link #getDefaultDeflaterFactory()}.
+     * Use {@link #BlockCompressedOutputStream(File, int, DeflaterFactory)} to specify a custom factory.
+     */
+    public BlockCompressedOutputStream(final File file) {
+        this(file, defaultCompressionLevel);
+    }
+
+    /**
+     * Prepare to compress at the given compression level
+     * Note: this constructor uses the default {@link DeflaterFactory}, see {@link #getDefaultDeflaterFactory()}.
+     * @param compressionLevel 1 <= compressionLevel <= 9
+     */
+    public BlockCompressedOutputStream(final String filename, final int compressionLevel) {
+        this(new File(filename), compressionLevel);
+    }
+
+    /**
+     * Prepare to compress at the given compression level
+     * @param compressionLevel 1 <= compressionLevel <= 9
+     * Note: this constructor uses the default {@link DeflaterFactory}, see {@link #getDefaultDeflaterFactory()}.
+     * Use {@link #BlockCompressedOutputStream(File, int, DeflaterFactory)} to specify a custom factory.
+     */
+    public BlockCompressedOutputStream(final File file, final int compressionLevel) {
+        this(file, compressionLevel, defaultDeflaterFactory);
+    }
+
+    /**
+     * Prepare to compress at the given compression level
+     * @param compressionLevel 1 <= compressionLevel <= 9
+     * @param deflaterFactory custom factory to create deflaters (overrides the default)
+     */
+    public BlockCompressedOutputStream(final File file, final int compressionLevel, final DeflaterFactory deflaterFactory) {
+        this.file = file;
+        codec = new BinaryCodec(file, true);
+        deflater = deflaterFactory.makeDeflater(compressionLevel, true);
+        log.debug("Using deflater: " + deflater.getClass().getSimpleName());
+    }
+
+    /**
+     * Uses default compression level, which is 5 unless changed by setCompressionLevel
+     * Note: this constructor uses the default {@link DeflaterFactory}, see {@link #getDefaultDeflaterFactory()}.
+     * Use {@link #BlockCompressedOutputStream(OutputStream, File, int, DeflaterFactory)} to specify a custom factory.
+     *
+     * @param file may be null
+     */
+    public BlockCompressedOutputStream(final OutputStream os, final File file) {
+        this(os, file, defaultCompressionLevel);
+    }
+
+    /**
+     * Note: this constructor uses the default {@link DeflaterFactory}, see {@link #getDefaultDeflaterFactory()}.
+     * Use {@link #BlockCompressedOutputStream(OutputStream, File, int, DeflaterFactory)} to specify a custom factory.
+     */
+    public BlockCompressedOutputStream(final OutputStream os, final File file, final int compressionLevel) {
+        this(os, file, compressionLevel, defaultDeflaterFactory);
+    }
+
+    /**
+     * Creates the output stream.
+     * @param os output stream to create a BlockCompressedOutputStream from
+     * @param file file to which to write the output or null if not available
+     * @param compressionLevel the compression level (0-9)
+     * @param deflaterFactory custom factory to create deflaters (overrides the default)
+     */
+    public BlockCompressedOutputStream(final OutputStream os, final File file, final int compressionLevel, final DeflaterFactory deflaterFactory) {
+        this.file = file;
+        codec = new BinaryCodec(os);
+        if (file != null) {
+            codec.setOutputFileName(file.getAbsolutePath());
+        }
+        deflater = deflaterFactory.makeDeflater(compressionLevel, true);
+        log.debug("Using deflater: " + deflater.getClass().getSimpleName());
+    }
+
+    /**
+     *
+     * @param location May be null.  Used for error messages, and for checking file termination.
+     * @param output May or not already be a BlockCompressedOutputStream.
+     * @return A BlockCompressedOutputStream, either by wrapping the given OutputStream, or by casting if it already
+     *         is a BCOS.
+     */
+    public static BlockCompressedOutputStream maybeBgzfWrapOutputStream(final File location, OutputStream output) {
+        if (!(output instanceof BlockCompressedOutputStream)) {
+           return new BlockCompressedOutputStream(output, location);
+        } else {
+           return (BlockCompressedOutputStream)output;
+        }
+    }
+
+    /**
+     * Writes b.length bytes from the specified byte array to this output stream. The general contract for write(b)
+     * is that it should have exactly the same effect as the call write(b, 0, b.length).
+     * @param bytes the data
+     */
+    @Override
+    public void write(final byte[] bytes) throws IOException {
+        write(bytes, 0, bytes.length);
+    }
+
+    /**
+     * Writes len bytes from the specified byte array starting at offset off to this output stream. The general
+     * contract for write(b, off, len) is that some of the bytes in the array b are written to the output stream in order;
+     * element b[off] is the first byte written and b[off+len-1] is the last byte written by this operation.
+     *
+     * @param bytes the data
+     * @param startIndex the start offset in the data
+     * @param numBytes the number of bytes to write
+     */
+    @Override
+    public void write(final byte[] bytes, int startIndex, int numBytes) throws IOException {
+        assert(numUncompressedBytes < uncompressedBuffer.length);
+        while (numBytes > 0) {
+            final int bytesToWrite = Math.min(uncompressedBuffer.length - numUncompressedBytes, numBytes);
+            System.arraycopy(bytes, startIndex, uncompressedBuffer, numUncompressedBytes, bytesToWrite);
+            numUncompressedBytes += bytesToWrite;
+            startIndex += bytesToWrite;
+            numBytes -= bytesToWrite;
+            assert(numBytes >= 0);
+            if (numUncompressedBytes == uncompressedBuffer.length) {
+                deflateBlock();
+            }
+        }
+    }
+
+    /**
+     * WARNING: flush() affects the output format, because it causes the current contents of uncompressedBuffer
+     * to be compressed and written, even if it isn't full.  Unless you know what you're doing, don't call flush().
+     * Instead, call close(), which will flush any unwritten data before closing the underlying stream.
+     *
+     */
+    @Override
+    public void flush() throws IOException {
+        while (numUncompressedBytes > 0) {
+            deflateBlock();
+        }
+        codec.getOutputStream().flush();
+    }
+
+    /**
+     * close() must be called in order to flush any remaining buffered bytes.  An unclosed file will likely be
+     * defective.
+     *
+     */
+    @Override
+    public void close() throws IOException {
+        flush();
+        // For debugging...
+        // if (numberOfThrottleBacks > 0) {
+        //     System.err.println("In BlockCompressedOutputStream, had to throttle back " + numberOfThrottleBacks +
+        //                        " times for file " + codec.getOutputFileName());
+        // }
+        codec.writeBytes(BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK);
+        codec.close();
+        // Can't re-open something that is not a regular file, e.g. a named pipe or an output stream
+        if (this.file == null || !this.file.isFile()) return;
+        if (BlockCompressedInputStream.checkTermination(this.file) !=
+                BlockCompressedInputStream.FileTermination.HAS_TERMINATOR_BLOCK) {
+            throw new IOException("Terminator block not found after closing BGZF file " + this.file);
+        }
+    }
+
+    /**
+     * Writes the specified byte to this output stream. The general contract for write is that one byte is written
+     * to the output stream. The byte to be written is the eight low-order bits of the argument b.
+     * The 24 high-order bits of b are ignored.
+     * @param bite
+     * @throws IOException
+     */
+    public void write(final int bite) throws IOException {
+        singleByteArray[0] = (byte)bite;
+        write(singleByteArray);
+    }
+
+    /** Encode virtual file pointer
+     * Upper 48 bits is the byte offset into the compressed stream of a block.
+     * Lower 16 bits is the byte offset into the uncompressed stream inside the block.
+     */
+    public long getFilePointer(){
+        return BlockCompressedFilePointerUtil.makeFilePointer(mBlockAddress, numUncompressedBytes);
+    }
+
+    @Override
+    public long getPosition() {
+        return getFilePointer();
+    }
+
+    /**
+     * Attempt to write the data in uncompressedBuffer to the underlying file in a gzip block.
+     * If the entire uncompressedBuffer does not fit in the maximum allowed size, reduce the amount
+     * of data to be compressed, and slide the excess down in uncompressedBuffer so it can be picked
+     * up in the next deflate event.
+     * @return size of gzip block that was written.
+     */
+    private int deflateBlock() {
+        if (numUncompressedBytes == 0) {
+            return 0;
+        }
+        final int bytesToCompress = numUncompressedBytes;
+        // Compress the input
+        deflater.reset();
+        deflater.setInput(uncompressedBuffer, 0, bytesToCompress);
+        deflater.finish();
+        int compressedSize = deflater.deflate(compressedBuffer, 0, compressedBuffer.length);
+
+        // If it didn't all fit in compressedBuffer.length, set compression level to NO_COMPRESSION
+        // and try again.  This should always fit.
+        if (!deflater.finished()) {
+            noCompressionDeflater.reset();
+            noCompressionDeflater.setInput(uncompressedBuffer, 0, bytesToCompress);
+            noCompressionDeflater.finish();
+            compressedSize = noCompressionDeflater.deflate(compressedBuffer, 0, compressedBuffer.length);
+            if (!noCompressionDeflater.finished()) {
+                throw new IllegalStateException("unpossible");
+            }
+        }
+        // Data compressed small enough, so write it out.
+        crc32.reset();
+        crc32.update(uncompressedBuffer, 0, bytesToCompress);
+
+        final int totalBlockSize = writeGzipBlock(compressedSize, bytesToCompress, crc32.getValue());
+        assert(bytesToCompress <= numUncompressedBytes);
+
+        // Clear out from uncompressedBuffer the data that was written
+        if (bytesToCompress == numUncompressedBytes) {
+            numUncompressedBytes = 0;
+        } else {
+            System.arraycopy(uncompressedBuffer, bytesToCompress, uncompressedBuffer, 0,
+                    numUncompressedBytes - bytesToCompress);
+            numUncompressedBytes -= bytesToCompress;
+        }
+        mBlockAddress += totalBlockSize;
+        return totalBlockSize;
+    }
+
+    /**
+     * Writes the entire gzip block, assuming the compressed data is stored in compressedBuffer
+     * @return  size of gzip block that was written.
+     */
+    private int writeGzipBlock(final int compressedSize, final int uncompressedSize, final long crc) {
+        // Init gzip header
+        codec.writeByte(BlockCompressedStreamConstants.GZIP_ID1);
+        codec.writeByte(BlockCompressedStreamConstants.GZIP_ID2);
+        codec.writeByte(BlockCompressedStreamConstants.GZIP_CM_DEFLATE);
+        codec.writeByte(BlockCompressedStreamConstants.GZIP_FLG);
+        codec.writeInt(0); // Modification time
+        codec.writeByte(BlockCompressedStreamConstants.GZIP_XFL);
+        codec.writeByte(BlockCompressedStreamConstants.GZIP_OS_UNKNOWN);
+        codec.writeShort(BlockCompressedStreamConstants.GZIP_XLEN);
+        codec.writeByte(BlockCompressedStreamConstants.BGZF_ID1);
+        codec.writeByte(BlockCompressedStreamConstants.BGZF_ID2);
+        codec.writeShort(BlockCompressedStreamConstants.BGZF_LEN);
+        final int totalBlockSize = compressedSize + BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH +
+                BlockCompressedStreamConstants.BLOCK_FOOTER_LENGTH;
+
+        // I don't know why we store block size - 1, but that is what the spec says
+        codec.writeShort((short)(totalBlockSize - 1));
+        codec.writeBytes(compressedBuffer, 0, compressedSize);
+        codec.writeInt((int)crc);
+        codec.writeInt(uncompressedSize);
+        return totalBlockSize;
+    }
+}
diff --git a/src/java/htsjdk/samtools/util/BlockCompressedStreamConstants.java b/src/main/java/htsjdk/samtools/util/BlockCompressedStreamConstants.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/BlockCompressedStreamConstants.java
rename to src/main/java/htsjdk/samtools/util/BlockCompressedStreamConstants.java
diff --git a/src/java/htsjdk/samtools/util/BlockGunzipper.java b/src/main/java/htsjdk/samtools/util/BlockGunzipper.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/BlockGunzipper.java
rename to src/main/java/htsjdk/samtools/util/BlockGunzipper.java
diff --git a/src/java/htsjdk/samtools/util/BufferedLineReader.java b/src/main/java/htsjdk/samtools/util/BufferedLineReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/BufferedLineReader.java
rename to src/main/java/htsjdk/samtools/util/BufferedLineReader.java
diff --git a/src/java/htsjdk/samtools/util/CigarUtil.java b/src/main/java/htsjdk/samtools/util/CigarUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/CigarUtil.java
rename to src/main/java/htsjdk/samtools/util/CigarUtil.java
diff --git a/src/java/htsjdk/samtools/util/CloseableIterator.java b/src/main/java/htsjdk/samtools/util/CloseableIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/CloseableIterator.java
rename to src/main/java/htsjdk/samtools/util/CloseableIterator.java
diff --git a/src/java/htsjdk/samtools/util/CloserUtil.java b/src/main/java/htsjdk/samtools/util/CloserUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/CloserUtil.java
rename to src/main/java/htsjdk/samtools/util/CloserUtil.java
diff --git a/src/java/htsjdk/samtools/util/CodeUtil.java b/src/main/java/htsjdk/samtools/util/CodeUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/CodeUtil.java
rename to src/main/java/htsjdk/samtools/util/CodeUtil.java
diff --git a/src/java/htsjdk/samtools/util/CollectionUtil.java b/src/main/java/htsjdk/samtools/util/CollectionUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/CollectionUtil.java
rename to src/main/java/htsjdk/samtools/util/CollectionUtil.java
diff --git a/src/java/htsjdk/samtools/util/ComparableTuple.java b/src/main/java/htsjdk/samtools/util/ComparableTuple.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ComparableTuple.java
rename to src/main/java/htsjdk/samtools/util/ComparableTuple.java
diff --git a/src/java/htsjdk/samtools/util/CoordMath.java b/src/main/java/htsjdk/samtools/util/CoordMath.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/CoordMath.java
rename to src/main/java/htsjdk/samtools/util/CoordMath.java
diff --git a/src/java/htsjdk/samtools/util/CoordSpanInputSteam.java b/src/main/java/htsjdk/samtools/util/CoordSpanInputSteam.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/CoordSpanInputSteam.java
rename to src/main/java/htsjdk/samtools/util/CoordSpanInputSteam.java
diff --git a/src/java/htsjdk/samtools/util/DateParser.java b/src/main/java/htsjdk/samtools/util/DateParser.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/DateParser.java
rename to src/main/java/htsjdk/samtools/util/DateParser.java
diff --git a/src/java/htsjdk/samtools/util/DelegatingIterator.java b/src/main/java/htsjdk/samtools/util/DelegatingIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/DelegatingIterator.java
rename to src/main/java/htsjdk/samtools/util/DelegatingIterator.java
diff --git a/src/java/htsjdk/samtools/util/DiskBackedQueue.java b/src/main/java/htsjdk/samtools/util/DiskBackedQueue.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/DiskBackedQueue.java
rename to src/main/java/htsjdk/samtools/util/DiskBackedQueue.java
diff --git a/src/java/htsjdk/samtools/util/FastLineReader.java b/src/main/java/htsjdk/samtools/util/FastLineReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/FastLineReader.java
rename to src/main/java/htsjdk/samtools/util/FastLineReader.java
diff --git a/src/java/htsjdk/samtools/util/FastqQualityFormat.java b/src/main/java/htsjdk/samtools/util/FastqQualityFormat.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/FastqQualityFormat.java
rename to src/main/java/htsjdk/samtools/util/FastqQualityFormat.java
diff --git a/src/java/htsjdk/samtools/util/FileAppendStreamLRUCache.java b/src/main/java/htsjdk/samtools/util/FileAppendStreamLRUCache.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/FileAppendStreamLRUCache.java
rename to src/main/java/htsjdk/samtools/util/FileAppendStreamLRUCache.java
diff --git a/src/java/htsjdk/samtools/util/FormatUtil.java b/src/main/java/htsjdk/samtools/util/FormatUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/FormatUtil.java
rename to src/main/java/htsjdk/samtools/util/FormatUtil.java
diff --git a/src/main/java/htsjdk/samtools/util/Histogram.java b/src/main/java/htsjdk/samtools/util/Histogram.java
new file mode 100644
index 0000000..19cbc1d
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/util/Histogram.java
@@ -0,0 +1,567 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools.util;
+
+import java.io.Serializable;
+import java.util.*;
+
+import static java.lang.Math.*;
+
+/**
+ * Class for computing and accessing histogram type data.  Stored internally in
+ * a sorted Map so that keys can be iterated in order.
+ *
+ * @author Tim Fennell
+ */
+public final class Histogram<K extends Comparable> implements Serializable {
+    private static final long serialVersionUID = 1L;
+    private String binLabel   = "BIN";
+    private String valueLabel = "VALUE";
+    private final NavigableMap<K, Bin<K>> map;
+
+    /** Constructs a new Histogram with default bin and value labels. */
+    public Histogram() {
+        this.map = new TreeMap<>();
+    }
+
+    /** Constructs a new Histogram with supplied bin and value labels. */
+    public Histogram(final String binLabel, final String valueLabel) {
+        this();
+        this.binLabel = binLabel;
+        this.valueLabel = valueLabel;
+    }
+
+    /** Constructs a new Histogram that'll use the supplied comparator to sort keys. */
+    public Histogram(final Comparator<? super K> comparator) {
+        this.map = new TreeMap<>(comparator);
+    }
+
+    /** Constructor that takes labels for the bin and values and a comparator to sort the bins. */
+    public Histogram(final String binLabel, final String valueLabel, final Comparator<? super K> comparator) {
+        this(comparator);
+        this.binLabel = binLabel;
+        this.valueLabel = valueLabel;
+    }
+
+    /** Copy constructor for a histogram. */
+    public Histogram(final Histogram<K> in) {
+        this.map = new TreeMap<>(in.map);
+        this.binLabel = in.binLabel;
+        this.valueLabel = in.valueLabel;
+    }
+
+    /** Represents a bin in the Histogram. */
+    public static class Bin<K extends Comparable> implements Serializable {
+        private static final long serialVersionUID = 1L;
+        private final K id;
+        private double value = 0;
+
+        /** Constructs a new bin with the given ID. */
+        private Bin(final K id) { this.id = id; }
+
+        /** Gets the ID of this bin. */
+        public K getId() { return id; }
+
+        /** Gets the value in the bin. */
+        public double getValue() { return value; }
+
+        /** Returns the String format for the value in the bin. */
+        @Override
+        public String toString() { return String.valueOf(this.value); }
+
+        /** Checks the equality of the bin by ID and value. */
+        @Override
+        public boolean equals(final Object o) {
+            if (this == o) return true;
+            if (o == null || getClass() != o.getClass()) return false;
+
+            final Bin<K> bin = (Bin<K>) o;
+
+            if (Double.compare(bin.value, value) != 0) return false;
+            if (!id.equals(bin.id)) return false;
+
+            return true;
+        }
+
+        @Override
+        public int hashCode() {
+            int result;
+            final long temp;
+            result = id.hashCode();
+            temp = value != +0.0d ? Double.doubleToLongBits(value) : 0L;
+            result = 31 * result + (int) (temp ^ (temp >>> 32));
+            return result;
+        }
+
+        public double getIdValue() {
+            if (id instanceof Number) {
+                return ((Number) id).doubleValue();
+            } else {
+                throw new UnsupportedOperationException("getIdValue only supported for Histogram<? extends Number>");
+            }
+        }
+    }
+
+    /** Prefill the histogram with the supplied set of bins. */
+    public void prefillBins(final K... ids) {
+        for (final K id : ids) {
+            map.put(id, new Bin<>(id));
+        }
+    }
+
+    /** Increments the value in the designated bin by 1. */
+    public void increment(final K id) {
+        increment(id, 1d);
+    }
+
+    /** Increments the value in the designated bin by the supplied increment. */
+    public void increment(final K id, final double increment) {
+        Bin<K> bin = map.get(id);
+        if (bin == null) {
+            bin = new Bin<>(id);
+            map.put(id, bin);
+        }
+
+        bin.value += increment;
+    }
+
+    public String getBinLabel() { return binLabel; }
+    public void setBinLabel(final String binLabel) { this.binLabel = binLabel; }
+
+    public String getValueLabel() { return valueLabel; }
+    public void setValueLabel(final String valueLabel) { this.valueLabel = valueLabel; }
+
+    /** Checks that the labels and values in the two histograms are identical. */
+    @Override
+    public boolean equals(final Object o) {
+        if (o == this) {
+            return true;
+        }
+        return o != null &&
+                (o instanceof Histogram) &&
+                ((Histogram) o).binLabel.equals(this.binLabel) &&
+                ((Histogram) o).valueLabel.equals(this.valueLabel) &&
+                ((Histogram) o).map.equals(this.map);
+    }
+
+    @Override
+    public String toString() {
+        return map.toString();
+    }
+
+    @Override
+    public int hashCode() {
+        return Objects.hash(binLabel, valueLabel, map);
+    }
+
+    /**
+     * Assuming that the key type for the histogram is a Number type, returns the mean of
+     * all the items added to the histogram.
+     */
+    public double getMean() {
+        // Could use simply getSum() / getCount(), but that would require iterating over the
+        // values() set twice, which seems inefficient given how simply the computation is.
+        double product=0, totalCount=0;
+        for (final Bin<K> bin : map.values()) {
+            final double idValue = bin.getIdValue();
+            final double count   = bin.getValue();
+
+            product += idValue * count;
+            totalCount += count;
+        }
+
+        return product / totalCount;
+    }
+
+    /**
+     * Returns the sum of the products of the histgram bin ids and the number of entries in each bin.
+     * Note: This is only supported if this histogram stores instances of Number.
+     */
+    public double getSum() {
+        double total = 0;
+        for (final Bin<K> bin : map.values()) {
+            total += bin.getValue() * bin.getIdValue();
+        }
+
+        return total;
+    }
+
+    /**
+     * Returns the sum of the number of entries in each bin.
+     */
+    public double getSumOfValues() {
+        double total = 0;
+        for (final Bin<K> bin : map.values()) {
+            total += bin.getValue();
+        }
+
+        return total;
+    }
+
+    public double getStandardDeviation() {
+        final double mean = getMean();
+
+        double count = 0;
+        double total = 0;
+
+        for (final Bin<K> bin : map.values()) {
+            final double localCount = bin.getValue();
+            final double value = bin.getIdValue();
+
+            count += localCount;
+            total += localCount * pow(value - mean, 2);
+        }
+
+        return Math.sqrt(total / (count-1));
+    }
+
+    /**
+     * Calculates the mean bin size
+     */
+    public double getMeanBinSize() {
+        return (getSumOfValues() / size());
+    }
+
+    /**
+     * Returns the size of this histogram.
+     */
+    public int size() {
+        return map.size();
+    }
+
+    /**
+     * Returns the comparator used to order the keys in this histogram, or
+     * {@code null} if this histogram uses the {@linkplain Comparable
+     * natural ordering} of its keys.
+     *
+     * @return the comparator used to order the keys in this histogram,
+     *         or {@code null} if this histogram uses the natural ordering
+     *         of its keys
+     */
+    public Comparator<? super K> comparator() {
+        return map.comparator();
+    }
+
+    /**
+	 * Calculates the median bin size
+	 */
+	public double getMedianBinSize() {
+		if (size() == 0) {
+			return 0;
+		}
+
+		final List<Double> binValues = new ArrayList<>();
+		for (final Bin<K> bin : values()) {
+			binValues.add(bin.getValue());
+		}
+		Collections.sort(binValues);
+
+		final int midPoint = binValues.size() / 2;
+		double median = binValues.get(midPoint);
+		if (binValues.size() % 2 == 0) {
+			median = (median + binValues.get(midPoint-1)) / 2;
+		}
+
+		return median;
+	}
+
+    /**
+     * Returns a {@link Collection} view of the values contained in this histogram.
+     * The collection's iterator returns the values in ascending order
+     * of the corresponding keys.
+     */
+    public Collection<Bin<K>> values() {
+        return map.values();
+    }
+
+    /**
+     * Calculates the standard deviation of the bin size
+     */
+    public double getStandardDeviationBinSize(final double mean) {
+        double total = 0;
+        for(final Bin<K> bin : values()) {
+            total += Math.pow(bin.getValue() - mean, 2);
+        }
+        return Math.sqrt(total / (Math.max(1,values().size()-1)));
+    }
+
+    /**
+     * Gets the bin in which the given percentile falls.
+     *
+     * @param percentile a value between 0 and 1
+     * @return the bin value in which the percentile falls
+     */
+    public double getPercentile(final double percentile) {
+        if (percentile <= 0) throw new IllegalArgumentException("Cannot query percentiles of 0 or below");
+        if (percentile >= 1) throw new IllegalArgumentException("Cannot query percentiles of 1 or above");
+
+        double total = getCount();
+        double sofar = 0;
+        for (Bin<K> bin : values()) {
+            sofar += bin.getValue();
+            if (sofar / total >= percentile) return bin.getIdValue();
+        }
+
+        throw new IllegalStateException("Could not find percentile: " + percentile);
+    }
+
+    /**
+     * Returns the cumulative probability of observing a value <= v when sampling the
+     * distribution represented by this histogram.
+     * @throws UnsupportedOperationException if this histogram does not store instances of Number
+     */
+    public double getCumulativeProbability(final double v) {
+        double count = 0;
+        double total = 0;
+
+        for (final Bin<K> bin : values()) {
+            final double binValue = bin.getIdValue();
+            if (binValue <= v) count += bin.getValue();
+            total += bin.getValue();
+        }
+
+        return count / total;
+    }
+
+    public double getMedian() {
+        double total = 0;
+        double count = getCount();
+
+        // Base cases
+        if (count == 0) return 0;
+        if (count == 1) return values().iterator().next().getIdValue();
+
+        final double midLow, midHigh;
+        if (count % 2 == 0) {
+            midLow = count / 2;
+            midHigh = midLow + 1;
+        }
+        else {
+            midLow = Math.ceil(count / 2);
+            midHigh = midLow;
+        }
+
+        Double midLowValue  = null;
+        Double midHighValue = null;
+        for (final Bin<K> bin : values()) {
+            total += bin.getValue();
+            if (midLowValue  == null && total >= midLow)  midLowValue  = bin.getIdValue();
+            if (midHighValue == null && total >= midHigh) midHighValue = bin.getIdValue();
+            if (midLowValue != null && midHighValue != null) break;
+        }
+
+        return (midLowValue + midHighValue) / 2;
+    }
+
+    /** Gets the median absolute deviation of the distribution. */
+    public double getMedianAbsoluteDeviation() {
+        final double median = getMedian();
+        final Histogram<Double> deviations = new Histogram<>();
+        for (final Bin<K> bin : values()) {
+            final double dev = abs(bin.getIdValue() - median);
+            deviations.increment(dev, bin.getValue());
+        }
+
+        return deviations.getMedian();
+    }
+
+    /**
+     * Returns a value that is intended to estimate the mean of the distribution, if the distribution is
+     * essentially normal, by using the median absolute deviation to remove the effect of
+     * erroneous massive outliers.
+     */
+    public double estimateSdViaMad() {
+        return 1.4826 * getMedianAbsoluteDeviation();
+    }
+
+    /** Returns id of the Bin that's the mode of the distribution (i.e. the largest bin).
+     * @throws UnsupportedOperationException if this histogram does not store instances of Number
+     */
+    public double getMode() {
+        return getModeBin().getIdValue();
+    }
+
+    /** Returns the Bin that's the mode of the distribution (i.e. the largest bin). */
+    private Bin<K> getModeBin() {
+        Bin<K> modeBin = null;
+
+        for (final Bin<K> bin : values()) {
+            if (modeBin == null || modeBin.value < bin.value) {
+                modeBin = bin;
+            }
+        }
+
+        return modeBin;
+    }
+
+
+    /**
+     * Returns the key with the lowest count.
+     * @throws UnsupportedOperationException if this histogram does not store instances of Number
+     */
+    public double getMin() {
+        return map.firstEntry().getValue().getIdValue();
+    }
+
+    /**
+     * Returns the key with the highest count.
+     * @throws UnsupportedOperationException if this histogram does not store instances of Number
+     */
+    public double getMax() {
+        return map.lastEntry().getValue().getIdValue();
+    }
+
+    public double getCount() {
+        double count = 0;
+        for (final Bin<K> bin : values()) {
+            count += bin.value;
+        }
+
+        return count;
+    }
+
+    /** Gets the geometric mean of the distribution. */
+    public double getGeometricMean() {
+        double total = 0;
+        double count = 0;
+        for (final Bin<K> bin : values()) {
+            total += bin.value * log(bin.getIdValue());
+            count += bin.value;
+        }
+
+        return exp(total / count);
+    }
+
+    /**
+     * Trims the histogram when the bins in the tail of the distribution contain fewer than mode/tailLimit items
+     */
+    public void trimByTailLimit(final int tailLimit) {
+        if (isEmpty()) {
+            return;
+        }
+
+        final Bin<K> modeBin = getModeBin();
+        final double mode = modeBin.getIdValue();
+        final double sizeOfModeBin = modeBin.getValue();
+        final double minimumBinSize = sizeOfModeBin/tailLimit;
+        Bin<K> lastBin = null;
+
+        final List<K> binsToKeep = new ArrayList<>();
+        for (Bin<K> bin : values()) {
+            double binId = ((Number)bin.getId()).doubleValue();
+
+            if (binId <= mode) {
+                binsToKeep.add(bin.getId());
+            }
+            else if ((lastBin != null && ((Number)lastBin.getId()).doubleValue() != binId - 1) || bin.getValue() < minimumBinSize) {
+                break;
+            }
+            else {
+                binsToKeep.add(bin.getId());
+            }
+            lastBin = bin;
+        }
+
+        final Object keys[] = keySet().toArray();
+        for (Object binId : keys) {
+            if (!binsToKeep.contains(binId)) {
+                remove(binId);
+            }
+        }
+    }
+
+    private Bin<K> remove(final Object key) {
+        return map.remove(key);
+    }
+
+    /**
+     * Returns true if this histogram has no data in in, false otherwise.
+     */
+    public boolean isEmpty() {
+        return map.isEmpty();
+    }
+
+    /**
+     * Trims the histogram so that only bins <= width are kept.
+     */
+    public void trimByWidth(final int width) {
+        final Iterator<K> it = map.descendingKeySet().iterator();
+        while (it.hasNext()) {
+
+            if (((Number)it.next()).doubleValue() > width) {
+                it.remove();
+            } else break;
+        }
+    }
+
+    /***
+     * Immutable method that divides the current Histogram by an input Histogram and generates a new one
+     * Throws an exception if the bins don't match up exactly
+     * @param divisorHistogram
+     * @return
+     * @throws IllegalArgumentException if the keySet of this histogram is not equal to the keySet of the given divisorHistogram
+     */
+    public Histogram<K> divideByHistogram(final Histogram<K> divisorHistogram) {
+        final Histogram<K> output = new Histogram<K>();
+        if (!this.keySet().equals(divisorHistogram.keySet())) throw new IllegalArgumentException("Attempting to divide Histograms with non-identical bins");
+        for (final K key : this.keySet()){
+            final Bin<K> dividend = this.get(key);
+            final Bin<K> divisor = divisorHistogram.get(key);
+            output.increment(key, dividend.getValue()/divisor.getValue());
+        }
+        return output;
+    }
+
+    /***
+     * Mutable method that allows the addition of a Histogram into the current one.
+     * @param addHistogram
+     */
+    public void addHistogram(final Histogram<K> addHistogram) {
+        for (final K key : addHistogram.keySet()){
+            this.increment(key, addHistogram.get(key).getValue());
+        }
+    }
+
+    /**
+     * Retrieves the bin associated with the given key.
+     */
+    public Bin<K> get(final K key) {
+        return map.get(key);
+    }
+
+    /**
+     * Returns the set of keys for this histogram.
+     */
+    public Set<K> keySet() {
+        return map.keySet();
+    }
+
+    /**
+     * Return whether this histogram contains the given key.
+     */
+    public boolean containsKey(final K key){
+        return map.containsKey(key);
+    }
+}
diff --git a/src/java/htsjdk/samtools/util/HttpUtils.java b/src/main/java/htsjdk/samtools/util/HttpUtils.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/HttpUtils.java
rename to src/main/java/htsjdk/samtools/util/HttpUtils.java
diff --git a/src/java/htsjdk/samtools/util/IOUtil.java b/src/main/java/htsjdk/samtools/util/IOUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/IOUtil.java
rename to src/main/java/htsjdk/samtools/util/IOUtil.java
diff --git a/src/java/htsjdk/samtools/util/Interval.java b/src/main/java/htsjdk/samtools/util/Interval.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Interval.java
rename to src/main/java/htsjdk/samtools/util/Interval.java
diff --git a/src/java/htsjdk/samtools/util/IntervalList.java b/src/main/java/htsjdk/samtools/util/IntervalList.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/IntervalList.java
rename to src/main/java/htsjdk/samtools/util/IntervalList.java
diff --git a/src/java/htsjdk/samtools/util/IntervalListReferenceSequenceMask.java b/src/main/java/htsjdk/samtools/util/IntervalListReferenceSequenceMask.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/IntervalListReferenceSequenceMask.java
rename to src/main/java/htsjdk/samtools/util/IntervalListReferenceSequenceMask.java
diff --git a/src/java/htsjdk/samtools/util/IntervalTree.java b/src/main/java/htsjdk/samtools/util/IntervalTree.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/IntervalTree.java
rename to src/main/java/htsjdk/samtools/util/IntervalTree.java
diff --git a/src/java/htsjdk/samtools/util/IntervalTreeMap.java b/src/main/java/htsjdk/samtools/util/IntervalTreeMap.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/IntervalTreeMap.java
rename to src/main/java/htsjdk/samtools/util/IntervalTreeMap.java
diff --git a/src/java/htsjdk/samtools/util/IntervalUtil.java b/src/main/java/htsjdk/samtools/util/IntervalUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/IntervalUtil.java
rename to src/main/java/htsjdk/samtools/util/IntervalUtil.java
diff --git a/src/java/htsjdk/samtools/util/Iso8601Date.java b/src/main/java/htsjdk/samtools/util/Iso8601Date.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Iso8601Date.java
rename to src/main/java/htsjdk/samtools/util/Iso8601Date.java
diff --git a/src/java/htsjdk/samtools/util/IterableAdapter.java b/src/main/java/htsjdk/samtools/util/IterableAdapter.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/IterableAdapter.java
rename to src/main/java/htsjdk/samtools/util/IterableAdapter.java
diff --git a/src/java/htsjdk/samtools/util/IterableOnceIterator.java b/src/main/java/htsjdk/samtools/util/IterableOnceIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/IterableOnceIterator.java
rename to src/main/java/htsjdk/samtools/util/IterableOnceIterator.java
diff --git a/src/java/htsjdk/samtools/util/Iterables.java b/src/main/java/htsjdk/samtools/util/Iterables.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Iterables.java
rename to src/main/java/htsjdk/samtools/util/Iterables.java
diff --git a/src/java/htsjdk/samtools/util/Lazy.java b/src/main/java/htsjdk/samtools/util/Lazy.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Lazy.java
rename to src/main/java/htsjdk/samtools/util/Lazy.java
diff --git a/src/java/htsjdk/samtools/util/LineReader.java b/src/main/java/htsjdk/samtools/util/LineReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/LineReader.java
rename to src/main/java/htsjdk/samtools/util/LineReader.java
diff --git a/src/java/htsjdk/samtools/util/ListMap.java b/src/main/java/htsjdk/samtools/util/ListMap.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ListMap.java
rename to src/main/java/htsjdk/samtools/util/ListMap.java
diff --git a/src/java/htsjdk/samtools/util/Locatable.java b/src/main/java/htsjdk/samtools/util/Locatable.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Locatable.java
rename to src/main/java/htsjdk/samtools/util/Locatable.java
diff --git a/src/java/htsjdk/samtools/util/LocationAware.java b/src/main/java/htsjdk/samtools/util/LocationAware.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/LocationAware.java
rename to src/main/java/htsjdk/samtools/util/LocationAware.java
diff --git a/src/java/htsjdk/samtools/util/Locus.java b/src/main/java/htsjdk/samtools/util/Locus.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Locus.java
rename to src/main/java/htsjdk/samtools/util/Locus.java
diff --git a/src/java/htsjdk/samtools/util/LocusComparator.java b/src/main/java/htsjdk/samtools/util/LocusComparator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/LocusComparator.java
rename to src/main/java/htsjdk/samtools/util/LocusComparator.java
diff --git a/src/java/htsjdk/samtools/util/LocusImpl.java b/src/main/java/htsjdk/samtools/util/LocusImpl.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/LocusImpl.java
rename to src/main/java/htsjdk/samtools/util/LocusImpl.java
diff --git a/src/java/htsjdk/samtools/util/Log.java b/src/main/java/htsjdk/samtools/util/Log.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Log.java
rename to src/main/java/htsjdk/samtools/util/Log.java
diff --git a/src/java/htsjdk/samtools/util/Md5CalculatingInputStream.java b/src/main/java/htsjdk/samtools/util/Md5CalculatingInputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Md5CalculatingInputStream.java
rename to src/main/java/htsjdk/samtools/util/Md5CalculatingInputStream.java
diff --git a/src/java/htsjdk/samtools/util/Md5CalculatingOutputStream.java b/src/main/java/htsjdk/samtools/util/Md5CalculatingOutputStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Md5CalculatingOutputStream.java
rename to src/main/java/htsjdk/samtools/util/Md5CalculatingOutputStream.java
diff --git a/src/java/htsjdk/samtools/util/MergingIterator.java b/src/main/java/htsjdk/samtools/util/MergingIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/MergingIterator.java
rename to src/main/java/htsjdk/samtools/util/MergingIterator.java
diff --git a/src/java/htsjdk/samtools/util/Murmur3.java b/src/main/java/htsjdk/samtools/util/Murmur3.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Murmur3.java
rename to src/main/java/htsjdk/samtools/util/Murmur3.java
diff --git a/src/main/java/htsjdk/samtools/util/OverlapDetector.java b/src/main/java/htsjdk/samtools/util/OverlapDetector.java
new file mode 100644
index 0000000..ba177b7
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/util/OverlapDetector.java
@@ -0,0 +1,193 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import java.util.*;
+
+/**
+ * Utility class to efficiently do in memory overlap detection between a large
+ * set of mapping like objects, and one or more candidate mappings.
+ *
+ * You can use it for example to detect all locatables overlapping a given set of locatables:
+ * <pre>{@code
+ *    OverlapDetector<Locatable> detector = OverlapDetector.create(locatables);
+ *    Set<Locatable> overlaps = detector.getOverlaps(query);
+ *
+ *    boolean anyOverlap = detector.overlapsAny(query); //faster API for checking presence of any overlap
+ * }</pre>
+ */
+public class OverlapDetector<T> {
+    private final Map<Object, IntervalTree<Set<T>>> cache = new HashMap<>();
+    private final int lhsBuffer;
+    private final int rhsBuffer;
+
+    /**
+     * Constructs an overlap detector.
+     * @param lhsBuffer the amount by which to "trim" coordinates of mappings on the left
+     *                  hand side when calculating overlaps
+     * @param rhsBuffer the amount by which to "trim" coordinates of mappings on the right
+     *                  hand side when calculating overlaps
+     */
+    public OverlapDetector(int lhsBuffer, int rhsBuffer) {
+        this.lhsBuffer = lhsBuffer;
+        this.rhsBuffer = rhsBuffer;
+    }
+
+    /**
+     * Creates a new OverlapDetector with no trim and the given set of intervals.
+     */
+    public static <T extends Locatable> OverlapDetector<T> create(final List<T> intervals) {
+        final OverlapDetector<T> detector = new OverlapDetector<>(0, 0);
+        detector.addAll(intervals, intervals);
+        return detector;
+    }
+
+    /** Adds a Locatable to the set of Locatables against which to match candidates. */
+    public void addLhs(final T object, final Locatable interval) {
+        if (object == null) {
+            throw new IllegalArgumentException("null object");
+        }
+        if (interval == null) {
+            throw new IllegalArgumentException("null interval");
+        }
+        final String seqId = interval.getContig();
+
+        IntervalTree<Set<T>> tree = this.cache.get(seqId);
+        if (tree == null) {
+            tree = new IntervalTree<>();
+            this.cache.put(seqId, tree);
+        }
+
+        final int start = interval.getStart() + this.lhsBuffer;
+        final int end   = interval.getEnd()   - this.lhsBuffer;
+
+        final Set<T> objects = new HashSet<>(1);
+        objects.add(object);
+        if (start <= end) {  // Don't put in sequences that have no overlappable bases
+            final Set<T> alreadyThere = tree.put(start, end, objects);
+            if (alreadyThere != null) {
+                alreadyThere.add(object);
+                tree.put(start, end, alreadyThere);
+            }
+        }
+    }
+
+    /**
+     * Adds all items to the overlap detector.
+     *
+     * The order of the lists matters only in the sense that it needs to be the same for the intervals
+     * and the corresponding objects.
+     */
+    public void addAll(final List<T> objects, final List<? extends Locatable> intervals) {
+        if (objects == null) {
+            throw new IllegalArgumentException("null objects");
+        }
+        if (intervals == null) {
+            throw new IllegalArgumentException("null intervals");
+        }
+        if (objects.size() != intervals.size()) {
+            throw new IllegalArgumentException("Objects and intervals must be the same size but were " + objects.size() + " and " + intervals.size());
+        }
+
+        for (int i=0; i<objects.size(); ++i) {
+            addLhs(objects.get(i), intervals.get(i));
+        }
+    }
+
+    /**
+     * Gets all the objects that could be returned by the overlap detector.
+     */
+    public Set<T> getAll() {
+        final Set<T> all = new HashSet<>();
+        for (final IntervalTree<Set<T>> tree : this.cache.values()) {
+            for (IntervalTree.Node<Set<T>> node : tree) {
+                all.addAll(node.getValue());
+            }
+        }
+        return all;
+    }
+
+    /**
+     * Returns true iff the given locatable overlaps any locatable in this detector.
+     *
+     * This is a performance shortcut API functionally equivalent to:
+     * <pre>{@code
+     *      ! getOverlaps(locatable).isEmpty()
+     * }</pre>
+     */
+    public boolean overlapsAny(final Locatable locatable) {
+        if (locatable == null) {
+            throw new IllegalArgumentException("null locatable");
+        }
+        final String seqId = locatable.getContig();
+        final IntervalTree<Set<T>> tree = this.cache.get(seqId);
+        if (tree == null) {
+            return false;
+        }
+        final int start = locatable.getStart() + this.rhsBuffer;
+        final int end   = locatable.getEnd()   - this.rhsBuffer;
+
+        if (start > end) {
+            return false;
+        }
+
+        final Iterator<IntervalTree.Node<Set<T>>> it = tree.overlappers(start, end);
+        while (it.hasNext()) {
+            final IntervalTree.Node<Set<T>> node = it.next();
+            if (!node.getValue().isEmpty()) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Gets the Set of objects that overlap the provided locatable.
+     * The returned set may not be modifiable.
+     */
+    public Set<T> getOverlaps(final Locatable locatable)  {
+        if (locatable == null) {
+            throw new IllegalArgumentException("null locatable");
+        }
+        final String seqId = locatable.getContig();
+        final IntervalTree<Set<T>> tree = this.cache.get(seqId);
+        if (tree == null) {
+            return Collections.emptySet();
+        }
+        final int start = locatable.getStart() + this.rhsBuffer;
+        final int end   = locatable.getEnd()   - this.rhsBuffer;
+
+        if (start > end) {
+            return Collections.emptySet();
+        }
+
+        final Set<T> matches = new HashSet<>();
+        final Iterator<IntervalTree.Node<Set<T>>> it = tree.overlappers(start, end);
+        while (it.hasNext()) {
+            final IntervalTree.Node<Set<T>> node = it.next();
+            matches.addAll(node.getValue());
+        }
+        return matches;
+    }
+}
diff --git a/src/java/htsjdk/samtools/util/PeekIterator.java b/src/main/java/htsjdk/samtools/util/PeekIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/PeekIterator.java
rename to src/main/java/htsjdk/samtools/util/PeekIterator.java
diff --git a/src/java/htsjdk/samtools/util/PeekableIterator.java b/src/main/java/htsjdk/samtools/util/PeekableIterator.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/PeekableIterator.java
rename to src/main/java/htsjdk/samtools/util/PeekableIterator.java
diff --git a/src/java/htsjdk/samtools/util/ProcessExecutor.java b/src/main/java/htsjdk/samtools/util/ProcessExecutor.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ProcessExecutor.java
rename to src/main/java/htsjdk/samtools/util/ProcessExecutor.java
diff --git a/src/java/htsjdk/samtools/util/ProgressLogger.java b/src/main/java/htsjdk/samtools/util/ProgressLogger.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ProgressLogger.java
rename to src/main/java/htsjdk/samtools/util/ProgressLogger.java
diff --git a/src/java/htsjdk/samtools/util/ProgressLoggerInterface.java b/src/main/java/htsjdk/samtools/util/ProgressLoggerInterface.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ProgressLoggerInterface.java
rename to src/main/java/htsjdk/samtools/util/ProgressLoggerInterface.java
diff --git a/src/java/htsjdk/samtools/util/QualityEncodingDetector.java b/src/main/java/htsjdk/samtools/util/QualityEncodingDetector.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/QualityEncodingDetector.java
rename to src/main/java/htsjdk/samtools/util/QualityEncodingDetector.java
diff --git a/src/java/htsjdk/samtools/util/QualityUtil.java b/src/main/java/htsjdk/samtools/util/QualityUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/QualityUtil.java
rename to src/main/java/htsjdk/samtools/util/QualityUtil.java
diff --git a/src/java/htsjdk/samtools/util/ReferenceSequenceMask.java b/src/main/java/htsjdk/samtools/util/ReferenceSequenceMask.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ReferenceSequenceMask.java
rename to src/main/java/htsjdk/samtools/util/ReferenceSequenceMask.java
diff --git a/src/java/htsjdk/samtools/util/RelativeIso8601Date.java b/src/main/java/htsjdk/samtools/util/RelativeIso8601Date.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/RelativeIso8601Date.java
rename to src/main/java/htsjdk/samtools/util/RelativeIso8601Date.java
diff --git a/src/java/htsjdk/samtools/util/ResourceLimitedMap.java b/src/main/java/htsjdk/samtools/util/ResourceLimitedMap.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ResourceLimitedMap.java
rename to src/main/java/htsjdk/samtools/util/ResourceLimitedMap.java
diff --git a/src/java/htsjdk/samtools/util/ResourceLimitedMapFunctor.java b/src/main/java/htsjdk/samtools/util/ResourceLimitedMapFunctor.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ResourceLimitedMapFunctor.java
rename to src/main/java/htsjdk/samtools/util/ResourceLimitedMapFunctor.java
diff --git a/src/java/htsjdk/samtools/util/RuntimeEOFException.java b/src/main/java/htsjdk/samtools/util/RuntimeEOFException.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/RuntimeEOFException.java
rename to src/main/java/htsjdk/samtools/util/RuntimeEOFException.java
diff --git a/src/java/htsjdk/samtools/util/RuntimeIOException.java b/src/main/java/htsjdk/samtools/util/RuntimeIOException.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/RuntimeIOException.java
rename to src/main/java/htsjdk/samtools/util/RuntimeIOException.java
diff --git a/src/java/htsjdk/samtools/util/RuntimeScriptException.java b/src/main/java/htsjdk/samtools/util/RuntimeScriptException.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/RuntimeScriptException.java
rename to src/main/java/htsjdk/samtools/util/RuntimeScriptException.java
diff --git a/src/main/java/htsjdk/samtools/util/SamLocusIterator.java b/src/main/java/htsjdk/samtools/util/SamLocusIterator.java
new file mode 100644
index 0000000..466d0d6
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/util/SamLocusIterator.java
@@ -0,0 +1,679 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import htsjdk.samtools.*;
+import htsjdk.samtools.filter.*;
+
+import java.util.*;
+
+/**
+ * Iterator that traverses a SAM File, accumulating information on a per-locus basis.
+ * Optionally takes a target interval list, in which case the loci returned are the ones covered by
+ * the interval list.  If no target interval list, whatever loci are covered by the input reads are returned.
+ * By default duplicate reads and non-primary alignments are filtered out.  Filtering may be changed
+ * via setSamFilters().
+ *
+ * @author alecw at broadinstitute.org
+ */
+public class SamLocusIterator implements Iterable<SamLocusIterator.LocusInfo>, CloseableIterator<SamLocusIterator.LocusInfo> {
+    private static final Log LOG = Log.getInstance(SamLocusIterator.class);
+
+    /**
+     * Holds a SAMRecord plus the zero-based offset into that SAMRecord's bases and quality scores that corresponds
+     * to the base and quality at the genomic position described the containing LocusInfo.
+     */
+    public static class RecordAndOffset {
+        private final SAMRecord record;
+        private final int offset;
+
+        public RecordAndOffset(final SAMRecord record, final int offset) {
+            this.offset = offset;
+            this.record = record;
+        }
+
+        /** Zero-based offset into the read corresponding to the current position in LocusInfo */
+        public int getOffset() { return offset; }
+        public SAMRecord getRecord() { return record; }
+        public byte getReadBase() { return record.getReadBases()[offset]; }
+        public byte getBaseQuality() { return record.getBaseQualities()[offset]; }
+    }
+
+    /**
+     * The unit of iteration.  Holds information about the locus (the SAMSequenceRecord and 1-based position
+     * on the reference), plus List of ReadAndOffset objects, one for each read that overlaps the locus;
+     * two more List_s_ of ReadAndOffset objects include reads that overlap the locus with insertions and deletions
+     * respectively
+     */
+    public static final class LocusInfo implements Locus {
+        private final SAMSequenceRecord referenceSequence;
+        private final int position;
+        private final List<RecordAndOffset> recordAndOffsets = new ArrayList<RecordAndOffset>(100);
+        private List<RecordAndOffset> deletedInRecord = null;
+        private List<RecordAndOffset> insertedInRecord = null;
+
+        LocusInfo(final SAMSequenceRecord referenceSequence, final int position) {
+            this.referenceSequence = referenceSequence;
+            this.position = position;
+        }
+
+        /** Accumulate info for one read at the locus. */
+        public void add(final SAMRecord read, final int position) {
+            recordAndOffsets.add(new RecordAndOffset(read, position));
+        }
+
+        /** Accumulate info for one read with a deletion */
+        public void addDeleted(final SAMRecord read, int previousPosition) {
+            if (deletedInRecord == null) {
+                deletedInRecord = new ArrayList<>();
+            }
+            deletedInRecord.add(new RecordAndOffset(read, previousPosition));
+        }
+
+        /**
+         * Accumulate info for one read with an insertion.
+         * For this locus, the reads in the insertion are included also in recordAndOffsets
+         */
+        public void addInserted(final SAMRecord read, int firstPosition) {
+            if (insertedInRecord == null) {
+                insertedInRecord = new ArrayList<>();
+            }
+            insertedInRecord.add(new RecordAndOffset(read, firstPosition));
+        }
+
+        public int getSequenceIndex() { return referenceSequence.getSequenceIndex(); }
+
+        /** @return 1-based reference position */
+        public int getPosition() { return position; }
+        public List<RecordAndOffset> getRecordAndPositions() { return Collections.unmodifiableList(recordAndOffsets); }
+        public String getSequenceName() { return referenceSequence.getSequenceName(); }
+        @Override public String toString() { return referenceSequence.getSequenceName() + ":" + position; }
+        public int getSequenceLength() {return referenceSequence.getSequenceLength();}
+        public List<RecordAndOffset> getDeletedInRecord() {
+            return (deletedInRecord == null) ? Collections.emptyList() : Collections.unmodifiableList(deletedInRecord);
+        }
+        public List<RecordAndOffset> getInsertedInRecord() {
+            return (insertedInRecord == null) ? Collections.emptyList() : Collections.unmodifiableList(insertedInRecord);
+        }
+
+        /**
+         * @return <code>true</code> if all the RecordAndOffset lists are empty;
+         * <code>false</code> if at least one have records
+         */
+        public boolean isEmpty() {
+            return recordAndOffsets.isEmpty() &&
+                    (deletedInRecord == null || deletedInRecord.isEmpty()) &&
+                    (insertedInRecord == null || insertedInRecord.isEmpty());
+        }
+    }
+
+
+    private final SamReader samReader;
+    private final ReferenceSequenceMask referenceSequenceMask;
+    private PeekableIterator<SAMRecord> samIterator;
+    private List<SamRecordFilter> samFilters = Arrays.asList(new SecondaryOrSupplementaryFilter(),
+            new DuplicateReadFilter());
+    private final List<Interval> intervals;
+    private final boolean useIndex;
+
+    /**
+     * LocusInfos on this list are ready to be returned by iterator.  All reads that overlap
+     * the locus have been accumulated before the LocusInfo is moved into this list.
+     */
+    private final ArrayList<LocusInfo> complete = new ArrayList<LocusInfo>(100);
+
+    /**
+     * LocusInfos for which accumulation is in progress.  When {@link #accumulateSamRecord(SAMRecord)} is called
+     * the state of this list is guaranteed to be either:
+     *   a) Empty, or
+     *   b) That the element at index 0 corresponds to the same genomic locus as the first aligned base
+     *      in the read being accumulated
+     *
+     * Before each new read is accumulated the accumulator is examined and:
+     *   i) any LocusInfos at positions earlier than the read start are moved to {@link #complete}
+     *   ii) any uncovered positions between the last LocusInfo and the first aligned base of the new read
+     *       have LocusInfos created and added to {@link #complete} if we are emitting uncovered loci
+     */
+    private final ArrayList<LocusInfo> accumulator = new ArrayList<LocusInfo>(100);
+
+    private int qualityScoreCutoff = Integer.MIN_VALUE;
+    private int mappingQualityScoreCutoff = Integer.MIN_VALUE;
+    private boolean includeNonPfReads = true;
+
+    /**
+     * If true, emit a LocusInfo for every locus in the target map, or if no target map,
+     * emit a LocusInfo for every locus in the reference sequence.
+     * If false, emit a LocusInfo only if a locus has coverage.
+     */
+    private boolean emitUncoveredLoci = true;
+
+    /**
+     * If set, this will cap the number of reads we accumulate for any given position.
+     * Note that if we hit the maximum threshold at the first position in the accumulation queue,
+     * then we throw further reads overlapping that position completely away (including for subsequent positions).
+     * This is a useful feature if one wants to minimize the memory footprint in files with a few massively large pileups,
+     * but it must be pointed out that it could cause major bias because of the non-random nature with which the cap is
+     * applied (the first maxReadsToAccumulatePerLocus reads are kept and all subsequent ones are dropped).
+     */
+    private int maxReadsToAccumulatePerLocus = Integer.MAX_VALUE;
+
+    // Set to true when we have enforced the accumulation limit for the first time
+    private boolean enforcedAccumulationLimit = false;
+
+    /**
+     * If true, include indels in the LocusInfo
+     */
+    private boolean includeIndels = false;
+
+
+    // When there is a target mask, these members remember the last locus for which a LocusInfo has been
+    // returned, so that any uncovered locus in the target mask can be covered by a 0-coverage LocusInfo
+    private int lastReferenceSequence = 0;
+    private int lastPosition = 0;
+
+    // Set to true when past all aligned reads in input SAM file
+    private boolean finishedAlignedReads = false;
+
+    private final LocusComparator<Locus> locusComparator = new LocusComparator<Locus>();
+
+
+    /**
+     * Prepare to iterate through the given SAM records, skipping non-primary alignments.  Do not use
+     * BAM index even if available.
+     */
+    public SamLocusIterator(final SamReader samReader) {
+        this(samReader, null);
+    }
+
+    /**
+     * Prepare to iterate through the given SAM records, skipping non-primary alignments.  Do not use
+     * BAM index even if available.
+     *
+     * @param intervalList Either the list of desired intervals, or null.  Note that if an intervalList is
+     *                     passed in that is not coordinate sorted, it will eventually be coordinated sorted by this class.
+     */
+    public SamLocusIterator(final SamReader samReader, final IntervalList intervalList) {
+        this(samReader, intervalList, samReader.hasIndex());
+    }
+
+    /**
+     * Prepare to iterate through the given SAM records, skipping non-primary alignments
+     *
+     * @param samReader    must be coordinate sorted
+     * @param intervalList Either the list of desired intervals, or null.  Note that if an intervalList is
+     *                     passed in that is not coordinate sorted, it will eventually be coordinated sorted by this class.
+     * @param useIndex     If true, do indexed lookup to improve performance.  Not relevant if intervalList == null.
+     *                     It is no longer the case the useIndex==true can make performance worse.  It should always perform at least
+     *                     as well as useIndex==false, and generally will be much faster.
+     */
+    public SamLocusIterator(final SamReader samReader, final IntervalList intervalList, final boolean useIndex) {
+        if (samReader.getFileHeader().getSortOrder() == null || samReader.getFileHeader().getSortOrder() == SAMFileHeader.SortOrder.unsorted) {
+            LOG.warn("SamLocusIterator constructed with samReader that has SortOrder == unsorted.  ", "" +
+                    "Assuming SAM is coordinate sorted, but exceptions may occur if it is not.");
+        } else if (samReader.getFileHeader().getSortOrder() != SAMFileHeader.SortOrder.coordinate) {
+            throw new SAMException("SamLocusIterator cannot operate on a SAM file that is not coordinate sorted.");
+        }
+        this.samReader = samReader;
+        this.useIndex = useIndex;
+        if (intervalList != null) {
+            intervals = intervalList.uniqued().getIntervals();
+            this.referenceSequenceMask = new IntervalListReferenceSequenceMask(intervalList);
+        } else {
+            intervals = null;
+            this.referenceSequenceMask = new WholeGenomeReferenceSequenceMask(samReader.getFileHeader());
+        }
+    }
+
+    public Iterator<LocusInfo> iterator() {
+        if (samIterator != null) {
+            throw new IllegalStateException("Cannot call iterator() more than once on SamLocusIterator");
+        }
+        CloseableIterator<SAMRecord> tempIterator;
+        if (intervals != null) {
+            tempIterator = new SamRecordIntervalIteratorFactory().makeSamRecordIntervalIterator(samReader, intervals, useIndex);
+        } else {
+            tempIterator = samReader.iterator();
+        }
+        if (samFilters != null) {
+            tempIterator = new FilteringSamIterator(tempIterator, new AggregateFilter(samFilters));
+        }
+        samIterator = new PeekableIterator<SAMRecord>(tempIterator);
+        return this;
+    }
+
+    public void close() {
+        this.samIterator.close();
+    }
+
+    private boolean samHasMore() {
+        return !finishedAlignedReads && (samIterator.peek() != null);
+    }
+
+    /**
+     * Returns true if there are more LocusInfo objects that can be returned, due to any of the following reasons:
+     *   1) there are more aligned reads in the SAM file
+     *   2) there are LocusInfos in some stage of accumulation
+     *   3) there are loci in the target mask that have yet to be accumulated (even if there are no reads covering them)
+     */
+    public boolean hasNext() {
+        if (this.samIterator == null) {
+            iterator();
+        }
+
+        while (complete.isEmpty() && ((!accumulator.isEmpty()) || samHasMore() || hasRemainingMaskBases())) {
+            final LocusInfo locusInfo = next();
+            if (locusInfo != null) {
+                complete.add(0, locusInfo);
+            }
+        }
+        return !complete.isEmpty();
+    }
+
+    /**
+     * Returns true if there are more bases at which the locus iterator must emit LocusInfos because
+     * there are loci beyond the last emitted loci which are in the set of loci to be emitted and
+     * the iterator is setup to emit uncovered loci - so we can guarantee we'll emit those loci.
+     */
+    private boolean hasRemainingMaskBases() {
+        // if there are more sequences in the mask, by definition some of them must have
+        // marked bases otherwise if we're in the last sequence, but we're not at the last marked position,
+        // there is also more in the mask
+        if (!emitUncoveredLoci) {
+            // If not emitting uncovered loci, this check is irrelevant
+            return false;
+        }
+        return (lastReferenceSequence < referenceSequenceMask.getMaxSequenceIndex() ||
+                (lastReferenceSequence == referenceSequenceMask.getMaxSequenceIndex() &&
+                        lastPosition < referenceSequenceMask.nextPosition(lastReferenceSequence, lastPosition)));
+    }
+
+    /**
+     * hasNext() has been fixed so that if it returns true, next() is now guaranteed not to return null.
+     */
+    public LocusInfo next() {
+
+        // if we don't have any completed entries to return, try and make some!
+        while (complete.isEmpty() && samHasMore()) {
+            final SAMRecord rec = samIterator.peek();
+
+            // There might be unmapped reads mixed in with the mapped ones, but when a read
+            // is encountered with no reference index it means that all the mapped reads have been seen.
+            if (rec.getReferenceIndex() == -1) {
+                this.finishedAlignedReads = true;
+                continue;
+
+            }
+            // Skip over an unaligned read that has been forced to be sorted with the aligned reads
+            if (rec.getReadUnmappedFlag()
+                    || rec.getMappingQuality() < this.mappingQualityScoreCutoff
+                    || (!this.includeNonPfReads && rec.getReadFailsVendorQualityCheckFlag())) {
+                samIterator.next();
+                continue;
+            }
+
+            int start = rec.getAlignmentStart();
+            // only if we are including indels and the record does not start in the first base of the reference
+            // the stop locus to populate the queue is not the same if the record starts with an insertion
+            if (includeIndels && start != 1 && startWithInsertion(rec.getCigar())) {
+                // the start to populate is one less
+                start--;
+            }
+            final Locus alignmentStart = new LocusImpl(rec.getReferenceIndex(), start);
+            // emit everything that is before the start of the current read, because we know no more
+            // coverage will be accumulated for those loci.
+            while (!accumulator.isEmpty() && locusComparator.compare(accumulator.get(0), alignmentStart) < 0) {
+                final LocusInfo first = accumulator.get(0);
+                populateCompleteQueue(alignmentStart);
+                if (!complete.isEmpty()) {
+                    return complete.remove(0);
+                }
+                if (!accumulator.isEmpty() && first == accumulator.get(0)) {
+                    throw new SAMException("Stuck in infinite loop");
+                }
+            }
+
+            // at this point, either the accumulator list is empty or the head should
+            // be the same position as the first base of the read (or insertion if first)
+            if (!accumulator.isEmpty()) {
+                if (accumulator.get(0).getSequenceIndex() != rec.getReferenceIndex() ||
+                        accumulator.get(0).position != start) {
+                    throw new IllegalStateException("accumulator should be empty or aligned with current SAMRecord");
+                }
+            }
+
+            // Store the loci for the read in the accumulator
+            if (!surpassedAccumulationThreshold()) {
+                accumulateSamRecord(rec);
+                // Store the indels if requested
+                if (includeIndels) {
+                    accumulateIndels(rec);
+                }
+            }
+            samIterator.next();
+        }
+
+        final Locus endLocus = new LocusImpl(Integer.MAX_VALUE, Integer.MAX_VALUE);
+        // if we have nothing to return to the user, and we're at the end of the SAM iterator,
+        // push everything into the complete queue
+        if (complete.isEmpty() && !samHasMore()) {
+            while (!accumulator.isEmpty()) {
+                populateCompleteQueue(endLocus);
+                if (!complete.isEmpty()) {
+                    return complete.remove(0);
+                }
+            }
+        }
+
+        // if there are completed entries, return those
+        if (!complete.isEmpty()) {
+            return complete.remove(0);
+        } else if (emitUncoveredLoci) {
+            final Locus afterLastMaskPositionLocus = new LocusImpl(referenceSequenceMask.getMaxSequenceIndex(),
+                    referenceSequenceMask.getMaxPosition() + 1);
+            // In this case... we're past the last read from SAM so see if we can
+            // fill out any more (zero coverage) entries from the mask
+            return createNextUncoveredLocusInfo(afterLastMaskPositionLocus);
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * @return true if we have surpassed the maximum accumulation threshold for the first locus in the accumulator, false otherwise
+     */
+    private boolean surpassedAccumulationThreshold() {
+        final boolean surpassesThreshold = !accumulator.isEmpty() && accumulator.get(0).recordAndOffsets.size() >= maxReadsToAccumulatePerLocus;
+        if (surpassesThreshold && !enforcedAccumulationLimit) {
+            LOG.warn("We have encountered greater than " + maxReadsToAccumulatePerLocus + " reads at position " + accumulator.get(0).toString() + " and will ignore the remaining reads at this position.  Note that further warnings will be suppressed.");
+            enforcedAccumulationLimit = true;
+        }
+        return surpassesThreshold;
+    }
+
+    /**
+     * Check if cigar start with an insertion, ignoring other operators that do not consume references bases
+     * @param cigar the cigar
+     * @return <code>true</code> if the first operator to consume reference bases or be an insertion, is an insertion; <code>false</code> otherwise
+     */
+    private static boolean startWithInsertion(final Cigar cigar) {
+        for (final CigarElement element : cigar.getCigarElements()) {
+            if (element.getOperator()==CigarOperator.I) return true;
+            if (!element.getOperator().consumesReferenceBases()) continue;
+            break;
+        }
+        return false;
+    }
+
+    /**
+     * Capture the loci covered by the given SAMRecord in the LocusInfos in the accumulator,
+     * creating new LocusInfos as needed.
+     */
+    private void accumulateSamRecord(final SAMRecord rec) {
+
+        // get the accumulator offset
+        int accOffset = getAccumulatorOffset(rec);
+
+        final int minQuality = getQualityScoreCutoff();
+        final boolean dontCheckQualities = minQuality == 0;
+        final byte[] baseQualities = dontCheckQualities ? null : rec.getBaseQualities();
+
+        // interpret the CIGAR string and add the base info
+        for (final AlignmentBlock alignmentBlock : rec.getAlignmentBlocks()) {
+            final int readStart = alignmentBlock.getReadStart();
+            final int refStart = alignmentBlock.getReferenceStart();
+            final int blockLength = alignmentBlock.getLength();
+
+            for (int i = 0; i < blockLength; ++i) {
+                // 0-based offset into the read of the current base
+                final int readOffset = readStart + i - 1;
+
+                // if the quality score cutoff is met, accumulate the base info
+                if (dontCheckQualities || baseQualities.length == 0 || baseQualities[readOffset] >= minQuality) {
+                    // 0-based offset from the aligned position of the first base in the read to the aligned position of the current base.
+                    final int refOffset = refStart + i - accOffset;
+                    accumulator.get(refOffset).add(rec, readOffset);
+                }
+            }
+        }
+    }
+
+    /**
+     * Requires that the accumulator for the record is previously fill with
+     * {@link #accumulateSamRecord(htsjdk.samtools.SAMRecord)}.
+     * Include in the LocusInfo the indels; the quality threshold does not affect insertions/deletions
+     */
+    private void accumulateIndels(final SAMRecord rec) {
+        // get the cigar elements
+        final List<CigarElement> cigar = rec.getCigar().getCigarElements();
+        // 0-based offset into the read of the current base
+        int readBase = 0;
+        // 0-based offset for the reference of the current base
+        // the accumulator could have the previous position because an indel is accumulating
+        int refBase = rec.getAlignmentStart() - getAccumulatorOffset(rec);
+        // iterate over the cigar element
+        for (int elementIndex = 0; elementIndex < cigar.size(); elementIndex++) {
+            final CigarElement e = cigar.get(elementIndex);
+            final CigarOperator operator = e.getOperator();
+            if (operator.equals(CigarOperator.I)) {
+                System.err.println("");
+                // insertions are included in the previous base
+                accumulator.get(refBase - 1).addInserted(rec, readBase);
+                readBase += e.getLength();
+            } else if (operator.equals(CigarOperator.D)) {
+                // accumulate for each position that spans the deletion
+                for (int i = 0; i < e.getLength(); i++) {
+                    // the offset is the one for the previous base
+                    accumulator.get(refBase + i).addDeleted(rec, readBase - 1);
+                }
+                refBase += e.getLength();
+            } else {
+                if (operator.consumesReadBases()) readBase += e.getLength();
+                if (operator.consumesReferenceBases()) refBase += e.getLength();
+            }
+        }
+    }
+
+    /**
+     * Create the next relevant zero-coverage LocusInfo
+     *
+     * @param stopBeforeLocus don't go up to this sequence and position
+     * @return a zero-coverage LocusInfo, or null if there is none before the stopBefore locus
+     */
+    private LocusInfo createNextUncoveredLocusInfo(final Locus stopBeforeLocus) {
+        while (lastReferenceSequence <= stopBeforeLocus.getSequenceIndex() &&
+                lastReferenceSequence <= referenceSequenceMask.getMaxSequenceIndex()) {
+
+            if (lastReferenceSequence == stopBeforeLocus.getSequenceIndex() &&
+                    lastPosition + 1 >= stopBeforeLocus.getPosition()) {
+                return null;
+            }
+
+            final int nextbit = referenceSequenceMask.nextPosition(lastReferenceSequence, lastPosition);
+
+            // try the next reference sequence
+            if (nextbit == -1) {
+                // No more in this reference sequence
+                if (lastReferenceSequence == stopBeforeLocus.getSequenceIndex()) {
+                    lastPosition = stopBeforeLocus.getPosition();
+                    return null;
+                }
+                lastReferenceSequence++;
+                lastPosition = 0;
+            } else if (lastReferenceSequence < stopBeforeLocus.getSequenceIndex() || nextbit < stopBeforeLocus.getPosition()) {
+                lastPosition = nextbit;
+                return new LocusInfo(getReferenceSequence(lastReferenceSequence), lastPosition);
+            } else if (nextbit >= stopBeforeLocus.getPosition()) {
+                return null;
+            }
+        }
+
+        return null;
+    }
+
+    /**
+     * Pop the first entry from the LocusInfo accumulator into the complete queue.  In addition,
+     * check the ReferenceSequenceMask and if there are intervening mask positions between the last popped base and the one
+     * about to be popped, put those on the complete queue as well.
+     * Note that a single call to this method may not empty the accumulator completely, or even
+     * empty it at all, because it may just put a zero-coverage LocusInfo into the complete queue.
+     */
+    private void populateCompleteQueue(final Locus stopBeforeLocus) {
+        // Because of gapped alignments, it is possible to create LocusInfo's with no reads associated with them.
+        // Skip over these if not including indels
+        while (!accumulator.isEmpty() && accumulator.get(0).isEmpty() &&
+                locusComparator.compare(accumulator.get(0), stopBeforeLocus) < 0) {
+            accumulator.remove(0);
+        }
+        if (accumulator.isEmpty()) {
+            return;
+        }
+        final LocusInfo locusInfo = accumulator.get(0);
+        if (locusComparator.compare(stopBeforeLocus, locusInfo) <= 0) {
+            return;
+        }
+
+        // If necessary, emit a zero-coverage LocusInfo
+        if (emitUncoveredLoci) {
+            final LocusInfo zeroCoverage = createNextUncoveredLocusInfo(locusInfo);
+            if (zeroCoverage != null) {
+                complete.add(zeroCoverage);
+                return;
+            }
+        }
+
+        // At this point we know we're going to process the LocusInfo, so remove it from the accumulator.
+        accumulator.remove(0);
+
+        // fill in any gaps based on our genome mask
+        final int sequenceIndex = locusInfo.getSequenceIndex();
+
+
+        // only add to the complete queue if it's in the mask (or we have no mask!)
+        if (referenceSequenceMask.get(locusInfo.getSequenceIndex(), locusInfo.getPosition())) {
+            complete.add(locusInfo);
+        }
+
+        lastReferenceSequence = sequenceIndex;
+        lastPosition = locusInfo.getPosition();
+    }
+
+    /**
+     * Ensure that the queue is populated and get the accumulator offset for the current record
+     */
+    private int getAccumulatorOffset(SAMRecord rec) {
+        final SAMSequenceRecord ref = getReferenceSequence(rec.getReferenceIndex());
+        final int alignmentStart = rec.getAlignmentStart();
+        final int alignmentEnd = rec.getAlignmentEnd();
+        final int alignmentLength = alignmentEnd - alignmentStart;
+        // get the offset for an insertion if we are tracking them
+        final int insOffset = (includeIndels && startWithInsertion(rec.getCigar())) ? 1 : 0;
+        // if there is an insertion in the first base and it is not tracked in the accumulator, add it
+        if (insOffset == 1 && accumulator.isEmpty()) {
+            accumulator.add(new LocusInfo(ref, alignmentStart - 1));
+        }
+
+        // Ensure there are LocusInfos up to and including this position
+        for (int i = accumulator.size(); i <= alignmentLength + insOffset; ++i) {
+            accumulator.add(new LocusInfo(ref, alignmentStart + i - insOffset));
+        }
+        return alignmentStart - insOffset;
+    }
+
+    private SAMSequenceRecord getReferenceSequence(final int referenceSequenceIndex) {
+        return samReader.getFileHeader().getSequence(referenceSequenceIndex);
+    }
+
+    public void remove() {
+        throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
+    }
+
+    // --------------------------------------------------------------------------------------------
+    // Helper methods below this point...
+    // --------------------------------------------------------------------------------------------
+
+    /**
+     * Controls which, if any, SAMRecords are filtered.  By default duplicate reads and non-primary alignments
+     * are filtered out.  The list of filters passed here replaces any existing filters.
+     *
+     * @param samFilters list of filters, or null if no filtering is desired.
+     */
+    public void setSamFilters(final List<SamRecordFilter> samFilters) {
+        this.samFilters = samFilters;
+    }
+
+    public int getQualityScoreCutoff() {
+        return qualityScoreCutoff;
+    }
+
+    public void setQualityScoreCutoff(final int qualityScoreCutoff) {
+        this.qualityScoreCutoff = qualityScoreCutoff;
+    }
+
+    public int getMappingQualityScoreCutoff() {
+        return mappingQualityScoreCutoff;
+    }
+
+    public void setMappingQualityScoreCutoff(final int mappingQualityScoreCutoff) {
+        this.mappingQualityScoreCutoff = mappingQualityScoreCutoff;
+    }
+
+    public boolean isIncludeNonPfReads() {
+        return includeNonPfReads;
+    }
+
+    public void setIncludeNonPfReads(final boolean includeNonPfReads) {
+        this.includeNonPfReads = includeNonPfReads;
+    }
+
+    public boolean isEmitUncoveredLoci() {
+        return emitUncoveredLoci;
+    }
+
+    public void setEmitUncoveredLoci(final boolean emitUncoveredLoci) {
+        this.emitUncoveredLoci = emitUncoveredLoci;
+    }
+
+    public int getMaxReadsToAccumulatePerLocus() {
+        return maxReadsToAccumulatePerLocus;
+    }
+
+    /**
+     * If set, this will cap the number of reads we accumulate for any given position.
+     * As is pointed out above, setting this could cause major bias because of the non-random nature with which the
+     * cap is applied (the first maxReadsToAccumulatePerLocus reads are kept and all subsequent ones are dropped).
+     */
+    public void setMaxReadsToAccumulatePerLocus(final int maxReadsToAccumulatePerLocus) {
+        this.maxReadsToAccumulatePerLocus = maxReadsToAccumulatePerLocus;
+    }
+
+    public boolean isIncludeIndels() {
+        return includeIndels;
+    }
+
+    public void setIncludeIndels(final boolean includeIndels) {
+        this.includeIndels = includeIndels;
+    }
+
+}
+
diff --git a/src/java/htsjdk/samtools/util/SamRecordIntervalIteratorFactory.java b/src/main/java/htsjdk/samtools/util/SamRecordIntervalIteratorFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/SamRecordIntervalIteratorFactory.java
rename to src/main/java/htsjdk/samtools/util/SamRecordIntervalIteratorFactory.java
diff --git a/src/java/htsjdk/samtools/util/SamRecordTrackingBuffer.java b/src/main/java/htsjdk/samtools/util/SamRecordTrackingBuffer.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/SamRecordTrackingBuffer.java
rename to src/main/java/htsjdk/samtools/util/SamRecordTrackingBuffer.java
diff --git a/src/java/htsjdk/samtools/util/SamRecordWithOrdinal.java b/src/main/java/htsjdk/samtools/util/SamRecordWithOrdinal.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/SamRecordWithOrdinal.java
rename to src/main/java/htsjdk/samtools/util/SamRecordWithOrdinal.java
diff --git a/src/java/htsjdk/samtools/util/SequenceUtil.java b/src/main/java/htsjdk/samtools/util/SequenceUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/SequenceUtil.java
rename to src/main/java/htsjdk/samtools/util/SequenceUtil.java
diff --git a/src/java/htsjdk/samtools/util/SnappyLoader.java b/src/main/java/htsjdk/samtools/util/SnappyLoader.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/SnappyLoader.java
rename to src/main/java/htsjdk/samtools/util/SnappyLoader.java
diff --git a/src/java/htsjdk/samtools/util/SolexaQualityConverter.java b/src/main/java/htsjdk/samtools/util/SolexaQualityConverter.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/SolexaQualityConverter.java
rename to src/main/java/htsjdk/samtools/util/SolexaQualityConverter.java
diff --git a/src/main/java/htsjdk/samtools/util/SortingCollection.java b/src/main/java/htsjdk/samtools/util/SortingCollection.java
new file mode 100644
index 0000000..6babd4e
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/util/SortingCollection.java
@@ -0,0 +1,514 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import htsjdk.samtools.Defaults;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.lang.reflect.Array;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+import java.util.TreeSet;
+
+/**
+ * Collection to which many records can be added.  After all records are added, the collection can be
+ * iterated, and the records will be returned in order defined by the comparator.  Records may be spilled
+ * to a temporary directory if there are more records added than will fit in memory.  As a result of this,
+ * the objects returned may not be identical to the objects added to the collection, but they should be
+ * equal as determined by the codec used to write them to disk and read them back.
+ *
+ * When iterating over the collection, the number of file handles required is numRecordsInCollection/maxRecordsInRam.
+ * If this becomes a limiting factor, a file handle cache could be added.
+ *
+ * If Snappy DLL is available and snappy.disable system property is not set to true, then Snappy is used
+ * to compress temporary files.
+ */
+public class SortingCollection<T> implements Iterable<T> {
+
+    /**
+     * Client must implement this class, which defines the way in which records are written to and
+     * read from file.
+     */
+    public interface Codec<T> extends Cloneable {
+        /**
+         * Where to write encoded output
+         * @param os
+         */
+        void setOutputStream(OutputStream os);
+
+        /**
+         * Where to read encoded input from
+         * @param is
+         */
+        void setInputStream(InputStream is);
+        /**
+         * Write object to output stream
+         * @param val what to write
+         */
+        void encode(T val);
+
+        /**
+         * Read the next record from the input stream and convert into a java object.
+         * @return null if no more records.  Should throw exception if EOF is encountered in the middle of
+         * a record.
+         */
+        T decode();
+
+        /**
+         * Must return a cloned copy of the codec that can be used independently of
+         * the original instance.  This is required so that multiple codecs can exist simultaneously
+         * that each is reading a separate file.
+         */
+        Codec<T> clone();
+    }
+
+    /** Directories where files of sorted records go. */
+    private final File[] tmpDirs;
+
+    /** The minimum amount of space free on a temp filesystem to write a file there. */
+    private final long TMP_SPACE_FREE = IOUtil.FIVE_GBS;
+
+    /**
+     * Used to write records to file, and used as a prototype to create codecs for reading.
+     */
+    private final SortingCollection.Codec<T> codec;
+
+    /**
+     * For sorting, both when spilling records to file, and merge sorting.
+     */
+    private final Comparator<T> comparator;
+    private final int maxRecordsInRam;
+    private int numRecordsInRam = 0;
+    private T[] ramRecords;
+    private boolean iterationStarted = false;
+    private boolean doneAdding = false;
+
+    /**
+     * Set to true when all temp files have been cleaned up
+     */
+    private boolean cleanedUp = false;
+
+    /**
+     * List of files in tmpDir containing sorted records
+     */
+    private final List<File> files = new ArrayList<File>();
+
+    private boolean destructiveIteration = true;
+
+    private TempStreamFactory tempStreamFactory = new TempStreamFactory();
+
+    /**
+     * Prepare to accumulate records to be sorted
+     * @param componentType Class of the record to be sorted.  Necessary because of Java generic lameness.
+     * @param codec For writing records to file and reading them back into RAM
+     * @param comparator Defines output sort order
+     * @param maxRecordsInRam how many records to accumulate before spilling to disk
+     * @param tmpDir Where to write files of records that will not fit in RAM
+     */
+    private SortingCollection(final Class<T> componentType, final SortingCollection.Codec<T> codec,
+                             final Comparator<T> comparator, final int maxRecordsInRam, final File... tmpDir) {
+        if (maxRecordsInRam <= 0) {
+            throw new IllegalArgumentException("maxRecordsInRam must be > 0");
+        }
+
+        if (tmpDir == null || tmpDir.length == 0) {
+            throw new IllegalArgumentException("At least one temp directory must be provided.");
+        }
+
+        this.tmpDirs = tmpDir;
+        this.codec = codec;
+        this.comparator = comparator;
+        this.maxRecordsInRam = maxRecordsInRam;
+        this.ramRecords = (T[])Array.newInstance(componentType, maxRecordsInRam);
+    }
+
+    public void add(final T rec) {
+        if (doneAdding) {
+            throw new IllegalStateException("Cannot add after calling doneAdding()");
+        }
+        if (iterationStarted) {
+            throw new IllegalStateException("Cannot add after calling iterator()");
+        }
+        if (numRecordsInRam == maxRecordsInRam) {
+            spillToDisk();
+        }
+        ramRecords[numRecordsInRam++] = rec;
+    }
+
+    /**
+     * This method can be called after caller is done adding to collection, in order to possibly free
+     * up memory.  If iterator() is called immediately after caller is done adding, this is not necessary,
+     * because iterator() triggers the same freeing.
+     */
+    public void doneAdding() {
+        if (this.cleanedUp) {
+            throw new IllegalStateException("Cannot call doneAdding() after cleanup() was called.");
+        }
+        if (doneAdding) {
+            return;
+        }
+
+        doneAdding = true;
+
+        if (this.files.isEmpty()) {
+            return;
+        }
+
+        if (this.numRecordsInRam > 0) {
+            spillToDisk();
+        }
+
+        // Facilitate GC
+        this.ramRecords = null;
+    }
+
+    /**
+     * @return True if this collection is allowed to discard data during iteration in order to reduce memory
+     * footprint, precluding a second iteration over the collection.
+     */
+    public boolean isDestructiveIteration() {
+        return destructiveIteration;
+    }
+
+    /**
+     * Tell this collection that it is allowed to discard data during iteration in order to reduce memory footprint,
+     * precluding a second iteration.  This is true by default.
+     */
+    public void setDestructiveIteration(boolean destructiveIteration) {
+        this.destructiveIteration = destructiveIteration;
+    }
+
+    /**
+     * Sort the records in memory, write them to a file, and clear the buffer of records in memory.
+     */
+    private void spillToDisk() {
+        try {
+            Arrays.sort(this.ramRecords, 0, this.numRecordsInRam, this.comparator);
+            final File f = newTempFile();
+            OutputStream os = null;
+            try {
+                os = tempStreamFactory.wrapTempOutputStream(new FileOutputStream(f), Defaults.BUFFER_SIZE);
+                this.codec.setOutputStream(os);
+                for (int i = 0; i < this.numRecordsInRam; ++i) {
+                    this.codec.encode(ramRecords[i]);
+                    // Facilitate GC
+                    this.ramRecords[i] = null;
+                }
+
+                os.flush();
+            } catch (RuntimeIOException ex) {
+                throw new RuntimeIOException("Problem writing temporary file " + f.getAbsolutePath() +
+                        ".  Try setting TMP_DIR to a file system with lots of space.", ex);
+            } finally {
+                if (os != null) {
+                    os.close();
+                }
+            }
+
+            this.numRecordsInRam = 0;
+            this.files.add(f);
+
+        }
+        catch (IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    /**
+     * Creates a new tmp file on one of the available temp filesystems, registers it for deletion
+     * on JVM exit and then returns it.
+     */
+    private File newTempFile() throws IOException {
+        return IOUtil.newTempFile("sortingcollection.", ".tmp", this.tmpDirs, TMP_SPACE_FREE);
+    }
+
+    /**
+     * Prepare to iterate through the records in order.  This method may be called more than once,
+     * but add() may not be called after this method has been called.
+     */
+    public CloseableIterator<T> iterator() {
+        if (this.cleanedUp) {
+            throw new IllegalStateException("Cannot call iterator() after cleanup() was called.");
+        }
+        doneAdding();
+
+        this.iterationStarted = true;
+        if (this.files.isEmpty()) {
+            return new InMemoryIterator();
+        } else {
+            return new MergingIterator();
+        }
+    }
+
+    /**
+     * Delete any temporary files.  After this method is called, iterator() may not be called.
+     */
+    public void cleanup() {
+        this.iterationStarted = true;
+        this.cleanedUp = true;
+
+        IOUtil.deleteFiles(this.files);
+    }
+
+    /**
+     * Syntactic sugar around the ctor, to save some typing of type parameters
+     *
+     * @param componentType Class of the record to be sorted.  Necessary because of Java generic lameness.
+     * @param codec For writing records to file and reading them back into RAM
+     * @param comparator Defines output sort order
+     * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk
+     * @param tmpDir Where to write files of records that will not fit in RAM
+     */
+    public static <T> SortingCollection<T> newInstance(final Class<T> componentType,
+                                                       final SortingCollection.Codec<T> codec,
+                                                       final Comparator<T> comparator,
+                                                       final int maxRecordsInRAM,
+                                                       final File... tmpDir) {
+        return new SortingCollection<T>(componentType, codec, comparator, maxRecordsInRAM, tmpDir);
+
+    }
+
+    /**
+     * Syntactic sugar around the ctor, to save some typing of type parameters
+     *
+     * @param componentType Class of the record to be sorted.  Necessary because of Java generic lameness.
+     * @param codec For writing records to file and reading them back into RAM
+     * @param comparator Defines output sort order
+     * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk
+     * @param tmpDirs Where to write files of records that will not fit in RAM
+     */
+    public static <T> SortingCollection<T> newInstance(final Class<T> componentType,
+                                                       final SortingCollection.Codec<T> codec,
+                                                       final Comparator<T> comparator,
+                                                       final int maxRecordsInRAM,
+                                                       final Collection<File> tmpDirs) {
+        return new SortingCollection<T>(componentType,
+                                        codec,
+                                        comparator,
+                                        maxRecordsInRAM,
+                                        tmpDirs.toArray(new File[tmpDirs.size()]));
+
+    }
+
+
+    /**
+     * Syntactic sugar around the ctor, to save some typing of type parameters.  Writes files to java.io.tmpdir
+     *
+     * @param componentType Class of the record to be sorted.  Necessary because of Java generic lameness.
+     * @param codec For writing records to file and reading them back into RAM
+     * @param comparator Defines output sort order
+     * @param maxRecordsInRAM how many records to accumulate in memory before spilling to disk
+     */
+    public static <T> SortingCollection<T> newInstance(final Class<T> componentType,
+                                                       final SortingCollection.Codec<T> codec,
+                                                       final Comparator<T> comparator,
+                                                       final int maxRecordsInRAM) {
+
+        final File tmpDir = new File(System.getProperty("java.io.tmpdir"));
+        return new SortingCollection<T>(componentType, codec, comparator, maxRecordsInRAM, tmpDir);
+    }
+
+    /**
+     * For iteration when number of records added is less than the threshold for spilling to disk.
+     */
+    class InMemoryIterator implements CloseableIterator<T> {
+        private int iterationIndex = 0;
+
+        InMemoryIterator() {
+            Arrays.sort(SortingCollection.this.ramRecords,
+                        0,
+                        SortingCollection.this.numRecordsInRam,
+                        SortingCollection.this.comparator);
+        }
+
+        public void close() {
+            // nothing to do
+        }
+
+        public boolean hasNext() {
+            return this.iterationIndex < SortingCollection.this.numRecordsInRam;
+        }
+
+        public T next() {
+            if (!hasNext()) {
+                throw new NoSuchElementException();
+            }
+            T ret = SortingCollection.this.ramRecords[iterationIndex];
+            if (destructiveIteration) SortingCollection.this.ramRecords[iterationIndex] = null;
+            ++iterationIndex;
+            return ret;
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException();
+        }
+    }
+
+    /**
+     * For iteration when spilling to disk has occurred.
+     * Each file is has records in sort order within the file.
+     * This iterator automatically closes when it iterates to the end, but if not iterating
+     * to the end it is a good idea to call close().
+     *
+     * Algorithm: MergingIterator maintains a PriorityQueue of PeekFileRecordIterators.
+     * Each PeekFileRecordIterator iterates through a file in which the records are sorted.
+     * The comparator for PeekFileRecordIterator used by the PriorityQueue peeks at the next record from
+     * the file, so the first element in the PriorityQueue is the file that has the next record to be emitted.
+     * In order to get the next record, the first PeekFileRecordIterator in the PriorityQueue is popped,
+     * the record is obtained from that iterator, and then if that iterator is not empty, it is pushed back into
+     * the PriorityQueue.  Because it now has a different record as its next element, it may go into another
+     * location in the PriorityQueue
+     */
+    class MergingIterator implements CloseableIterator<T> {
+        private final TreeSet<PeekFileRecordIterator> queue;
+
+        MergingIterator() {
+            this.queue = new TreeSet<PeekFileRecordIterator>(new PeekFileRecordIteratorComparator());
+            int n = 0;
+            for (final File f : SortingCollection.this.files) {
+                final FileRecordIterator it = new FileRecordIterator(f);
+                if (it.hasNext()) {
+                    this.queue.add(new PeekFileRecordIterator(it, n++));
+                }
+                else {
+                    it.close();
+                }
+            }
+        }
+
+        public boolean hasNext() {
+            return !this.queue.isEmpty();
+        }
+
+        public T next() {
+            if (!hasNext()) {
+                throw new NoSuchElementException();
+            }
+
+            final PeekFileRecordIterator fileIterator = queue.pollFirst();
+            final T ret = fileIterator.next();
+            if (fileIterator.hasNext()) {
+                this.queue.add(fileIterator);
+            }
+            else {
+                ((CloseableIterator<T>)fileIterator.getUnderlyingIterator()).close();
+            }
+
+            return ret;
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException();
+        }
+
+        public void close() {
+            while (!this.queue.isEmpty()) {
+                final PeekFileRecordIterator it = this.queue.pollFirst();
+                ((CloseableIterator<T>)it.getUnderlyingIterator()).close();
+            }
+        }
+    }
+
+    /**
+     * Read a file of records in format defined by the codec
+     */
+    class FileRecordIterator implements CloseableIterator<T> {
+        private final File file;
+        private final FileInputStream is;
+        private final Codec<T> codec;
+        private T currentRecord = null;
+
+        FileRecordIterator(final File file) {
+            this.file = file;
+            try {
+                this.is = new FileInputStream(file);
+                this.codec = SortingCollection.this.codec.clone();
+                this.codec.setInputStream(tempStreamFactory.wrapTempInputStream(this.is, Defaults.BUFFER_SIZE));
+                advance();
+            }
+            catch (FileNotFoundException e) {
+                throw new RuntimeIOException(e);
+            }
+        }
+
+        public boolean hasNext() {
+            return this.currentRecord != null;
+        }
+
+        public T next() {
+            if (!hasNext()) {
+                throw new NoSuchElementException();
+            }
+            final T ret = this.currentRecord;
+            advance();
+            return ret;
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException();
+        }
+
+        private void advance() {
+            this.currentRecord = this.codec.decode();
+        }
+
+        public void close() {
+            CloserUtil.close(this.is);
+        }
+    }
+
+
+    /**
+     * Just a typedef
+     */
+    class PeekFileRecordIterator extends PeekIterator<T> {
+        final int n; // A serial number used for tie-breaking in the sort
+        PeekFileRecordIterator(final Iterator<T> underlyingIterator, final int n) {
+            super(underlyingIterator);
+            this.n = n;
+        }
+    }
+
+    class PeekFileRecordIteratorComparator implements Comparator<PeekFileRecordIterator>, Serializable {
+        private static final long serialVersionUID = 1L;
+
+        public int compare(final PeekFileRecordIterator lhs, final PeekFileRecordIterator rhs) {
+            final int result = comparator.compare(lhs.peek(), rhs.peek());
+            if (result == 0) return lhs.n - rhs.n;
+            else return result;
+        }
+    }
+}
diff --git a/src/java/htsjdk/samtools/util/SortingLongCollection.java b/src/main/java/htsjdk/samtools/util/SortingLongCollection.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/SortingLongCollection.java
rename to src/main/java/htsjdk/samtools/util/SortingLongCollection.java
diff --git a/src/java/htsjdk/samtools/util/StopWatch.java b/src/main/java/htsjdk/samtools/util/StopWatch.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/StopWatch.java
rename to src/main/java/htsjdk/samtools/util/StopWatch.java
diff --git a/src/java/htsjdk/samtools/util/StringLineReader.java b/src/main/java/htsjdk/samtools/util/StringLineReader.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/StringLineReader.java
rename to src/main/java/htsjdk/samtools/util/StringLineReader.java
diff --git a/src/java/htsjdk/samtools/util/StringUtil.java b/src/main/java/htsjdk/samtools/util/StringUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/StringUtil.java
rename to src/main/java/htsjdk/samtools/util/StringUtil.java
diff --git a/src/java/htsjdk/samtools/util/TempStreamFactory.java b/src/main/java/htsjdk/samtools/util/TempStreamFactory.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/TempStreamFactory.java
rename to src/main/java/htsjdk/samtools/util/TempStreamFactory.java
diff --git a/src/java/htsjdk/samtools/util/TestUtil.java b/src/main/java/htsjdk/samtools/util/TestUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/TestUtil.java
rename to src/main/java/htsjdk/samtools/util/TestUtil.java
diff --git a/src/java/htsjdk/samtools/util/TrimmingUtil.java b/src/main/java/htsjdk/samtools/util/TrimmingUtil.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/TrimmingUtil.java
rename to src/main/java/htsjdk/samtools/util/TrimmingUtil.java
diff --git a/src/java/htsjdk/samtools/util/Tuple.java b/src/main/java/htsjdk/samtools/util/Tuple.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/Tuple.java
rename to src/main/java/htsjdk/samtools/util/Tuple.java
diff --git a/src/java/htsjdk/samtools/util/WholeGenomeReferenceSequenceMask.java b/src/main/java/htsjdk/samtools/util/WholeGenomeReferenceSequenceMask.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/WholeGenomeReferenceSequenceMask.java
rename to src/main/java/htsjdk/samtools/util/WholeGenomeReferenceSequenceMask.java
diff --git a/src/java/htsjdk/samtools/util/ftp/FTPClient.java b/src/main/java/htsjdk/samtools/util/ftp/FTPClient.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ftp/FTPClient.java
rename to src/main/java/htsjdk/samtools/util/ftp/FTPClient.java
diff --git a/src/java/htsjdk/samtools/util/ftp/FTPReply.java b/src/main/java/htsjdk/samtools/util/ftp/FTPReply.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ftp/FTPReply.java
rename to src/main/java/htsjdk/samtools/util/ftp/FTPReply.java
diff --git a/src/java/htsjdk/samtools/util/ftp/FTPStream.java b/src/main/java/htsjdk/samtools/util/ftp/FTPStream.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ftp/FTPStream.java
rename to src/main/java/htsjdk/samtools/util/ftp/FTPStream.java
diff --git a/src/java/htsjdk/samtools/util/ftp/FTPUtils.java b/src/main/java/htsjdk/samtools/util/ftp/FTPUtils.java
similarity index 100%
rename from src/java/htsjdk/samtools/util/ftp/FTPUtils.java
rename to src/main/java/htsjdk/samtools/util/ftp/FTPUtils.java
diff --git a/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java b/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java
new file mode 100644
index 0000000..c15e7c6
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java
@@ -0,0 +1,48 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2013 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util.zip;
+
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import java.util.zip.Deflater;
+
+/**
+ * Factory for {@link Deflater} objects used by {@link BlockCompressedOutputStream}.
+ * This class may be extended to provide alternative deflaters (e.g., for improved performance).
+ */
+public class DeflaterFactory {
+
+    public DeflaterFactory() {
+        //Note: made explicit constructor to make searching for references easier
+    }
+
+    /**
+     * Returns a deflater object that will be used when writing BAM files.
+     * Subclasses may override to provide their own deflater implementation.
+     * @param compressionLevel the compression level (0-9)
+     * @param nowrap if true then use GZIP compatible compression
+     */
+    public Deflater makeDeflater(final int compressionLevel, final boolean nowrap) {
+        return new Deflater(compressionLevel, nowrap);
+    }
+}
diff --git a/src/java/htsjdk/tribble/AbstractFeatureCodec.java b/src/main/java/htsjdk/tribble/AbstractFeatureCodec.java
similarity index 100%
rename from src/java/htsjdk/tribble/AbstractFeatureCodec.java
rename to src/main/java/htsjdk/tribble/AbstractFeatureCodec.java
diff --git a/src/java/htsjdk/tribble/AbstractFeatureReader.java b/src/main/java/htsjdk/tribble/AbstractFeatureReader.java
similarity index 100%
rename from src/java/htsjdk/tribble/AbstractFeatureReader.java
rename to src/main/java/htsjdk/tribble/AbstractFeatureReader.java
diff --git a/src/main/java/htsjdk/tribble/AsciiFeatureCodec.java b/src/main/java/htsjdk/tribble/AsciiFeatureCodec.java
new file mode 100644
index 0000000..141b764
--- /dev/null
+++ b/src/main/java/htsjdk/tribble/AsciiFeatureCodec.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2007-2010 by The Broad Institute, Inc. and the Massachusetts Institute of Technology.
+ * All Rights Reserved.
+ *
+ * This software is licensed under the terms of the GNU Lesser General Public License (LGPL), Version 2.1 which
+ * is available at http://www.opensource.org/licenses/lgpl-2.1.php.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS." THE BROAD AND MIT MAKE NO REPRESENTATIONS OR WARRANTIES OF
+ * ANY KIND CONCERNING THE SOFTWARE, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT
+ * OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE.  IN NO EVENT SHALL THE BROAD OR MIT, OR THEIR
+ * RESPECTIVE TRUSTEES, DIRECTORS, OFFICERS, EMPLOYEES, AND AFFILIATES BE LIABLE FOR ANY DAMAGES OF
+ * ANY KIND, INCLUDING, WITHOUT LIMITATION, INCIDENTAL OR CONSEQUENTIAL DAMAGES, ECONOMIC
+ * DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER THE BROAD OR MIT SHALL
+ * BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE
+ * FOREGOING.
+ */
+
+package htsjdk.tribble;
+
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.LocationAware;
+import htsjdk.tribble.readers.*;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * A convenience base class for codecs that want to read in features from ASCII lines.
+ * <p/>
+ * This class overrides the general decode locs for streams and presents instead
+ * Strings to decode(String) and readHeader(LineReader) functions.
+ *
+ * @param <T> The feature type this codec reads
+ */
+public abstract class AsciiFeatureCodec<T extends Feature> extends AbstractFeatureCodec<T, LineIterator> {
+    protected AsciiFeatureCodec(final Class<T> myClass) {
+        super(myClass);
+    }
+    
+    @Override
+    public void close(final LineIterator lineIterator) {
+        CloserUtil.close(lineIterator);
+    }
+
+    @Override
+    public boolean isDone(final LineIterator lineIterator) {
+        return !lineIterator.hasNext();
+    }
+
+    @Override
+    public LocationAware makeIndexableSourceFromStream(final InputStream bufferedInputStream) {
+        final PositionalBufferedStream pbs;
+        if (bufferedInputStream instanceof PositionalBufferedStream) {
+            pbs = (PositionalBufferedStream) bufferedInputStream;
+        } else {
+            pbs = new PositionalBufferedStream(bufferedInputStream);
+        }
+        return new AsciiLineReaderIterator(new AsciiLineReader(pbs));
+    }
+
+    @Override
+    public LineIterator makeSourceFromStream(final InputStream bufferedInputStream) {
+        return new LineIteratorImpl(new SynchronousLineReader(bufferedInputStream));
+    }
+
+    /** 
+     * Convenience method.  Decoding in ASCII files operates line-by-line, so obviate the need to call 
+     * {@link htsjdk.tribble.readers.LineIterator#next()} in implementing classes and, instead, have them implement
+     * {@link AsciiFeatureCodec#decode(String)}.
+     */
+    @Override
+    public T decode(final LineIterator lineIterator) {
+        return decode(lineIterator.next());
+    }
+
+    /** @see AsciiFeatureCodec#decode(htsjdk.tribble.readers.LineIterator) */
+    public abstract T decode(String s);
+
+    @Override
+    public FeatureCodecHeader readHeader(final LineIterator lineIterator) throws IOException {
+        // TODO: Track header end here, rather than assuming there isn't one.
+        return new FeatureCodecHeader(readActualHeader(lineIterator), FeatureCodecHeader.NO_HEADER_END);
+    }
+
+    /**
+     * Read and return the header, or null if there is no header.
+     *
+     * @return the actual header data in the file, or null if none is available
+     */
+    abstract public Object readActualHeader(final LineIterator reader);
+}
diff --git a/src/java/htsjdk/tribble/BinaryFeatureCodec.java b/src/main/java/htsjdk/tribble/BinaryFeatureCodec.java
similarity index 100%
rename from src/java/htsjdk/tribble/BinaryFeatureCodec.java
rename to src/main/java/htsjdk/tribble/BinaryFeatureCodec.java
diff --git a/src/java/htsjdk/tribble/CloseableTribbleIterator.java b/src/main/java/htsjdk/tribble/CloseableTribbleIterator.java
similarity index 100%
rename from src/java/htsjdk/tribble/CloseableTribbleIterator.java
rename to src/main/java/htsjdk/tribble/CloseableTribbleIterator.java
diff --git a/src/java/htsjdk/tribble/Feature.java b/src/main/java/htsjdk/tribble/Feature.java
similarity index 100%
rename from src/java/htsjdk/tribble/Feature.java
rename to src/main/java/htsjdk/tribble/Feature.java
diff --git a/src/java/htsjdk/tribble/FeatureCodec.java b/src/main/java/htsjdk/tribble/FeatureCodec.java
similarity index 100%
rename from src/java/htsjdk/tribble/FeatureCodec.java
rename to src/main/java/htsjdk/tribble/FeatureCodec.java
diff --git a/src/java/htsjdk/tribble/FeatureCodecHeader.java b/src/main/java/htsjdk/tribble/FeatureCodecHeader.java
similarity index 100%
rename from src/java/htsjdk/tribble/FeatureCodecHeader.java
rename to src/main/java/htsjdk/tribble/FeatureCodecHeader.java
diff --git a/src/java/htsjdk/tribble/FeatureReader.java b/src/main/java/htsjdk/tribble/FeatureReader.java
similarity index 100%
rename from src/java/htsjdk/tribble/FeatureReader.java
rename to src/main/java/htsjdk/tribble/FeatureReader.java
diff --git a/src/java/htsjdk/tribble/NameAwareCodec.java b/src/main/java/htsjdk/tribble/NameAwareCodec.java
similarity index 100%
rename from src/java/htsjdk/tribble/NameAwareCodec.java
rename to src/main/java/htsjdk/tribble/NameAwareCodec.java
diff --git a/src/java/htsjdk/tribble/SimpleFeature.java b/src/main/java/htsjdk/tribble/SimpleFeature.java
similarity index 100%
rename from src/java/htsjdk/tribble/SimpleFeature.java
rename to src/main/java/htsjdk/tribble/SimpleFeature.java
diff --git a/src/main/java/htsjdk/tribble/TabixFeatureReader.java b/src/main/java/htsjdk/tribble/TabixFeatureReader.java
new file mode 100644
index 0000000..5d90295
--- /dev/null
+++ b/src/main/java/htsjdk/tribble/TabixFeatureReader.java
@@ -0,0 +1,217 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2013 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.tribble;
+
+import htsjdk.samtools.util.BlockCompressedInputStream;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.tribble.readers.*;
+import htsjdk.tribble.util.ParsingUtils;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * @author Jim Robinson
+ * @since 2/11/12
+ */
+public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatureReader<T, SOURCE> {
+
+    TabixReader tabixReader;
+    List<String> sequenceNames;
+
+    /**
+     *
+     * @param featureFile - path to a feature file. Can be a local file, http url, or ftp url
+     * @param codec
+     * @throws IOException
+     */
+    public TabixFeatureReader(final String featureFile, final AsciiFeatureCodec codec) throws IOException {
+        super(featureFile, codec);
+        tabixReader = new TabixReader(featureFile);
+        sequenceNames = new ArrayList<String>(tabixReader.getChromosomes());
+        readHeader();
+    }
+
+    /**
+     *
+     * @param featureFile - path to a feature file. Can be a local file, http url, or ftp url
+     * @param indexFile - path to the index file.
+     * @param codec
+     * @throws IOException
+     */
+    public TabixFeatureReader(final String featureFile, final String indexFile, final AsciiFeatureCodec codec) throws IOException {
+        super(featureFile, codec);
+        tabixReader = new TabixReader(featureFile, indexFile);
+        sequenceNames = new ArrayList<String>(tabixReader.getChromosomes());
+        readHeader();
+    }
+
+
+    /**
+     * read the header
+     *
+     * @return a Object, representing the file header, if available
+     * @throws IOException throws an IOException if we can't open the file
+     */
+    private void readHeader() throws IOException {
+        SOURCE source = null;
+        try {
+            source = codec.makeSourceFromStream(new PositionalBufferedStream(new BlockCompressedInputStream(ParsingUtils.openInputStream(path))));
+            header = codec.readHeader(source);
+        } catch (Exception e) {
+            throw new TribbleException.MalformedFeatureFile("Unable to parse header with error: " + e.getMessage(), path, e);
+        } finally {
+            if (source != null) {
+                codec.close(source);
+            }
+        }
+    }
+
+    @Override
+    public boolean hasIndex(){
+        return true;
+    }
+
+
+    public List<String> getSequenceNames() {
+        return sequenceNames;
+    }
+
+    /**
+     * Return iterator over all features overlapping the given interval
+     *
+     * @param chr
+     * @param start
+     * @param end
+     * @return
+     * @throws IOException
+     */
+    public CloseableTribbleIterator<T> query(final String chr, final int start, final int end) throws IOException {
+        final List<String> mp = getSequenceNames();
+        if (mp == null) throw new TribbleException.TabixReaderFailure("Unable to find sequence named " + chr +
+                " in the tabix index. ", path);
+        if (!mp.contains(chr)) {
+            return new EmptyIterator<T>();
+        }
+        final TabixIteratorLineReader lineReader = new TabixIteratorLineReader(tabixReader.query(tabixReader.chr2tid(chr), start - 1, end));
+        return new FeatureIterator<T>(lineReader, start - 1, end);
+    }
+
+    public CloseableTribbleIterator<T> iterator() throws IOException {
+        final InputStream is = new BlockCompressedInputStream(ParsingUtils.openInputStream(path));
+        final PositionalBufferedStream stream = new PositionalBufferedStream(is);
+        final LineReader reader = new SynchronousLineReader(stream);
+        return new FeatureIterator<T>(reader, 0, Integer.MAX_VALUE);
+    }
+
+    public void close() throws IOException {
+        tabixReader.close();
+    }
+
+
+    class FeatureIterator<T extends Feature> implements CloseableTribbleIterator<T> {
+        private T currentRecord;
+        private LineReader lineReader;
+        private int start;
+        private int end;
+
+        public FeatureIterator(final LineReader lineReader, final int start, final int end) throws IOException {
+            this.lineReader = lineReader;
+            this.start = start;
+            this.end = end;
+            readNextRecord();
+        }
+
+
+        /**
+         * Advance to the next record in the query interval.
+         *
+         * @throws IOException
+         */
+        protected void readNextRecord() throws IOException {
+            currentRecord = null;
+            String nextLine;
+            while (currentRecord == null && (nextLine = lineReader.readLine()) != null) {
+                final Feature f;
+                try {
+                    f = ((AsciiFeatureCodec)codec).decode(nextLine);
+                    if (f == null) {
+                        continue;   // Skip
+                    }
+                    if (f.getStart() > end) {
+                        return;    // Done
+                    }
+                    if (f.getEnd() <= start) {
+                        continue;   // Skip
+                    }
+
+                    currentRecord = (T) f;
+
+                } catch (TribbleException e) {
+                    e.setSource(path);
+                    throw e;
+                } catch (NumberFormatException e) {
+                    String error = "Error parsing line: " + nextLine;
+                    throw new TribbleException.MalformedFeatureFile(error, path, e);
+                }
+
+
+            }
+        }
+
+
+        public boolean hasNext() {
+            return currentRecord != null;
+        }
+
+        public T next() {
+            T ret = currentRecord;
+            try {
+                readNextRecord();
+            } catch (IOException e) {
+                throw new RuntimeIOException("Unable to read the next record, the last record was at " +
+                        ret.getContig() + ":" + ret.getStart() + "-" + ret.getEnd(), e);
+            }
+            return ret;
+
+        }
+
+        public void remove() {
+            throw new UnsupportedOperationException("Remove is not supported in Iterators");
+        }
+
+        public void close() {
+            lineReader.close();
+        }
+
+        public Iterator<T> iterator() {
+            return this;
+        }
+    }
+
+
+}
diff --git a/src/java/htsjdk/tribble/Tribble.java b/src/main/java/htsjdk/tribble/Tribble.java
similarity index 100%
rename from src/java/htsjdk/tribble/Tribble.java
rename to src/main/java/htsjdk/tribble/Tribble.java
diff --git a/src/java/htsjdk/tribble/TribbleException.java b/src/main/java/htsjdk/tribble/TribbleException.java
similarity index 100%
rename from src/java/htsjdk/tribble/TribbleException.java
rename to src/main/java/htsjdk/tribble/TribbleException.java
diff --git a/src/java/htsjdk/tribble/TribbleIndexedFeatureReader.java b/src/main/java/htsjdk/tribble/TribbleIndexedFeatureReader.java
similarity index 100%
rename from src/java/htsjdk/tribble/TribbleIndexedFeatureReader.java
rename to src/main/java/htsjdk/tribble/TribbleIndexedFeatureReader.java
diff --git a/src/java/htsjdk/tribble/annotation/Strand.java b/src/main/java/htsjdk/tribble/annotation/Strand.java
similarity index 100%
rename from src/java/htsjdk/tribble/annotation/Strand.java
rename to src/main/java/htsjdk/tribble/annotation/Strand.java
diff --git a/src/java/htsjdk/tribble/bed/BEDCodec.java b/src/main/java/htsjdk/tribble/bed/BEDCodec.java
similarity index 100%
rename from src/java/htsjdk/tribble/bed/BEDCodec.java
rename to src/main/java/htsjdk/tribble/bed/BEDCodec.java
diff --git a/src/java/htsjdk/tribble/bed/BEDFeature.java b/src/main/java/htsjdk/tribble/bed/BEDFeature.java
similarity index 100%
rename from src/java/htsjdk/tribble/bed/BEDFeature.java
rename to src/main/java/htsjdk/tribble/bed/BEDFeature.java
diff --git a/src/java/htsjdk/tribble/bed/FullBEDFeature.java b/src/main/java/htsjdk/tribble/bed/FullBEDFeature.java
similarity index 100%
rename from src/java/htsjdk/tribble/bed/FullBEDFeature.java
rename to src/main/java/htsjdk/tribble/bed/FullBEDFeature.java
diff --git a/src/java/htsjdk/tribble/bed/SimpleBEDFeature.java b/src/main/java/htsjdk/tribble/bed/SimpleBEDFeature.java
similarity index 100%
rename from src/java/htsjdk/tribble/bed/SimpleBEDFeature.java
rename to src/main/java/htsjdk/tribble/bed/SimpleBEDFeature.java
diff --git a/src/java/htsjdk/tribble/example/CountRecords.java b/src/main/java/htsjdk/tribble/example/CountRecords.java
similarity index 100%
rename from src/java/htsjdk/tribble/example/CountRecords.java
rename to src/main/java/htsjdk/tribble/example/CountRecords.java
diff --git a/src/java/htsjdk/tribble/example/ExampleBinaryCodec.java b/src/main/java/htsjdk/tribble/example/ExampleBinaryCodec.java
similarity index 100%
rename from src/java/htsjdk/tribble/example/ExampleBinaryCodec.java
rename to src/main/java/htsjdk/tribble/example/ExampleBinaryCodec.java
diff --git a/src/java/htsjdk/tribble/example/IndexToTable.java b/src/main/java/htsjdk/tribble/example/IndexToTable.java
similarity index 100%
rename from src/java/htsjdk/tribble/example/IndexToTable.java
rename to src/main/java/htsjdk/tribble/example/IndexToTable.java
diff --git a/src/java/htsjdk/tribble/example/IndicesAreEqual.java b/src/main/java/htsjdk/tribble/example/IndicesAreEqual.java
similarity index 100%
rename from src/java/htsjdk/tribble/example/IndicesAreEqual.java
rename to src/main/java/htsjdk/tribble/example/IndicesAreEqual.java
diff --git a/src/java/htsjdk/tribble/example/ProfileIndexReading.java b/src/main/java/htsjdk/tribble/example/ProfileIndexReading.java
similarity index 100%
rename from src/java/htsjdk/tribble/example/ProfileIndexReading.java
rename to src/main/java/htsjdk/tribble/example/ProfileIndexReading.java
diff --git a/src/java/htsjdk/tribble/exception/CodecLineParsingException.java b/src/main/java/htsjdk/tribble/exception/CodecLineParsingException.java
similarity index 100%
rename from src/java/htsjdk/tribble/exception/CodecLineParsingException.java
rename to src/main/java/htsjdk/tribble/exception/CodecLineParsingException.java
diff --git a/src/java/htsjdk/tribble/exception/UnsortedFileException.java b/src/main/java/htsjdk/tribble/exception/UnsortedFileException.java
similarity index 100%
rename from src/java/htsjdk/tribble/exception/UnsortedFileException.java
rename to src/main/java/htsjdk/tribble/exception/UnsortedFileException.java
diff --git a/src/java/htsjdk/tribble/gelitext/DiploidGenotype.java b/src/main/java/htsjdk/tribble/gelitext/DiploidGenotype.java
similarity index 100%
rename from src/java/htsjdk/tribble/gelitext/DiploidGenotype.java
rename to src/main/java/htsjdk/tribble/gelitext/DiploidGenotype.java
diff --git a/src/java/htsjdk/tribble/gelitext/GeliTextCodec.java b/src/main/java/htsjdk/tribble/gelitext/GeliTextCodec.java
similarity index 100%
rename from src/java/htsjdk/tribble/gelitext/GeliTextCodec.java
rename to src/main/java/htsjdk/tribble/gelitext/GeliTextCodec.java
diff --git a/src/java/htsjdk/tribble/gelitext/GeliTextFeature.java b/src/main/java/htsjdk/tribble/gelitext/GeliTextFeature.java
similarity index 100%
rename from src/java/htsjdk/tribble/gelitext/GeliTextFeature.java
rename to src/main/java/htsjdk/tribble/gelitext/GeliTextFeature.java
diff --git a/src/java/htsjdk/tribble/index/AbstractIndex.java b/src/main/java/htsjdk/tribble/index/AbstractIndex.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/AbstractIndex.java
rename to src/main/java/htsjdk/tribble/index/AbstractIndex.java
diff --git a/src/java/htsjdk/tribble/index/Block.java b/src/main/java/htsjdk/tribble/index/Block.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/Block.java
rename to src/main/java/htsjdk/tribble/index/Block.java
diff --git a/src/java/htsjdk/tribble/index/ChrIndex.java b/src/main/java/htsjdk/tribble/index/ChrIndex.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/ChrIndex.java
rename to src/main/java/htsjdk/tribble/index/ChrIndex.java
diff --git a/src/java/htsjdk/tribble/index/DynamicIndexCreator.java b/src/main/java/htsjdk/tribble/index/DynamicIndexCreator.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/DynamicIndexCreator.java
rename to src/main/java/htsjdk/tribble/index/DynamicIndexCreator.java
diff --git a/src/java/htsjdk/tribble/index/Index.java b/src/main/java/htsjdk/tribble/index/Index.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/Index.java
rename to src/main/java/htsjdk/tribble/index/Index.java
diff --git a/src/java/htsjdk/tribble/index/IndexCreator.java b/src/main/java/htsjdk/tribble/index/IndexCreator.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/IndexCreator.java
rename to src/main/java/htsjdk/tribble/index/IndexCreator.java
diff --git a/src/java/htsjdk/tribble/index/IndexFactory.java b/src/main/java/htsjdk/tribble/index/IndexFactory.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/IndexFactory.java
rename to src/main/java/htsjdk/tribble/index/IndexFactory.java
diff --git a/src/java/htsjdk/tribble/index/MutableIndex.java b/src/main/java/htsjdk/tribble/index/MutableIndex.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/MutableIndex.java
rename to src/main/java/htsjdk/tribble/index/MutableIndex.java
diff --git a/src/java/htsjdk/tribble/index/TribbleIndexCreator.java b/src/main/java/htsjdk/tribble/index/TribbleIndexCreator.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/TribbleIndexCreator.java
rename to src/main/java/htsjdk/tribble/index/TribbleIndexCreator.java
diff --git a/src/java/htsjdk/tribble/index/interval/Interval.java b/src/main/java/htsjdk/tribble/index/interval/Interval.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/interval/Interval.java
rename to src/main/java/htsjdk/tribble/index/interval/Interval.java
diff --git a/src/java/htsjdk/tribble/index/interval/IntervalIndexCreator.java b/src/main/java/htsjdk/tribble/index/interval/IntervalIndexCreator.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/interval/IntervalIndexCreator.java
rename to src/main/java/htsjdk/tribble/index/interval/IntervalIndexCreator.java
diff --git a/src/java/htsjdk/tribble/index/interval/IntervalTree.java b/src/main/java/htsjdk/tribble/index/interval/IntervalTree.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/interval/IntervalTree.java
rename to src/main/java/htsjdk/tribble/index/interval/IntervalTree.java
diff --git a/src/java/htsjdk/tribble/index/interval/IntervalTreeIndex.java b/src/main/java/htsjdk/tribble/index/interval/IntervalTreeIndex.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/interval/IntervalTreeIndex.java
rename to src/main/java/htsjdk/tribble/index/interval/IntervalTreeIndex.java
diff --git a/src/java/htsjdk/tribble/index/linear/LinearIndex.java b/src/main/java/htsjdk/tribble/index/linear/LinearIndex.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/linear/LinearIndex.java
rename to src/main/java/htsjdk/tribble/index/linear/LinearIndex.java
diff --git a/src/java/htsjdk/tribble/index/linear/LinearIndexCreator.java b/src/main/java/htsjdk/tribble/index/linear/LinearIndexCreator.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/linear/LinearIndexCreator.java
rename to src/main/java/htsjdk/tribble/index/linear/LinearIndexCreator.java
diff --git a/src/java/htsjdk/tribble/index/tabix/TabixFormat.java b/src/main/java/htsjdk/tribble/index/tabix/TabixFormat.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/tabix/TabixFormat.java
rename to src/main/java/htsjdk/tribble/index/tabix/TabixFormat.java
diff --git a/src/java/htsjdk/tribble/index/tabix/TabixIndex.java b/src/main/java/htsjdk/tribble/index/tabix/TabixIndex.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/tabix/TabixIndex.java
rename to src/main/java/htsjdk/tribble/index/tabix/TabixIndex.java
diff --git a/src/java/htsjdk/tribble/index/tabix/TabixIndexCreator.java b/src/main/java/htsjdk/tribble/index/tabix/TabixIndexCreator.java
similarity index 100%
rename from src/java/htsjdk/tribble/index/tabix/TabixIndexCreator.java
rename to src/main/java/htsjdk/tribble/index/tabix/TabixIndexCreator.java
diff --git a/src/java/htsjdk/tribble/readers/AsciiLineReader.java b/src/main/java/htsjdk/tribble/readers/AsciiLineReader.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/AsciiLineReader.java
rename to src/main/java/htsjdk/tribble/readers/AsciiLineReader.java
diff --git a/src/java/htsjdk/tribble/readers/AsciiLineReaderIterator.java b/src/main/java/htsjdk/tribble/readers/AsciiLineReaderIterator.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/AsciiLineReaderIterator.java
rename to src/main/java/htsjdk/tribble/readers/AsciiLineReaderIterator.java
diff --git a/src/java/htsjdk/tribble/readers/LineIterator.java b/src/main/java/htsjdk/tribble/readers/LineIterator.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/LineIterator.java
rename to src/main/java/htsjdk/tribble/readers/LineIterator.java
diff --git a/src/java/htsjdk/tribble/readers/LineIteratorImpl.java b/src/main/java/htsjdk/tribble/readers/LineIteratorImpl.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/LineIteratorImpl.java
rename to src/main/java/htsjdk/tribble/readers/LineIteratorImpl.java
diff --git a/src/java/htsjdk/tribble/readers/LineReader.java b/src/main/java/htsjdk/tribble/readers/LineReader.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/LineReader.java
rename to src/main/java/htsjdk/tribble/readers/LineReader.java
diff --git a/src/main/java/htsjdk/tribble/readers/LineReaderUtil.java b/src/main/java/htsjdk/tribble/readers/LineReaderUtil.java
new file mode 100644
index 0000000..83a0545
--- /dev/null
+++ b/src/main/java/htsjdk/tribble/readers/LineReaderUtil.java
@@ -0,0 +1,56 @@
+package htsjdk.tribble.readers;
+
+import java.io.InputStream;
+import java.io.StringReader;
+
+/**
+ * A collection of factories for generating {@link LineReader}s.
+ *
+ * @Deprecated use {@link SynchronousLineReader} directly.
+ * @author mccowan
+ */
+ at Deprecated
+public class LineReaderUtil {
+    @Deprecated
+    public enum LineReaderOption {
+        ASYNCHRONOUS,   //Note: the asynchronous option has no effect - this class does not provide asynchronous reading anymore
+        SYNCHRONOUS
+    }
+
+    /**
+     * Creates a line reader from the given stream.
+     * @Deprecated use <code>new SynchronousLineReader(stream);</code>
+     */
+    @Deprecated
+    public static LineReader fromBufferedStream(final InputStream stream) {
+        return new SynchronousLineReader(stream);
+    }
+
+    /**
+     * Creates a line reader from the given string reader.
+     * @Deprecated use <code>new SynchronousLineReader(stringReader);</code>
+     */
+    @Deprecated
+    public static LineReader fromStringReader(final StringReader stringReader) {
+        return new SynchronousLineReader(stringReader);
+    }
+
+    /**
+     * Creates a line reader from the given string reader.
+     * @Deprecated Asynchronous mode is not going to be supported. Use <code>new SynchronousLineReader(stringReader);</code>
+     */
+    @Deprecated
+    public static LineReader fromStringReader(final StringReader stringReader, final Object ignored) {
+        return new SynchronousLineReader(stringReader);
+    }
+
+    /**
+     * Convenience factory for composing a LineReader from an InputStream.
+     * @Deprecated Asynchronous mode is not going to be supported. Use <code>new SynchronousLineReader(bufferedStream);</code>
+     */
+    @Deprecated
+    public static LineReader fromBufferedStream(final InputStream bufferedStream, final Object ignored) {
+        return new SynchronousLineReader(bufferedStream);
+    }
+
+}
diff --git a/src/java/htsjdk/tribble/readers/LongLineBufferedReader.java b/src/main/java/htsjdk/tribble/readers/LongLineBufferedReader.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/LongLineBufferedReader.java
rename to src/main/java/htsjdk/tribble/readers/LongLineBufferedReader.java
diff --git a/src/java/htsjdk/tribble/readers/Positional.java b/src/main/java/htsjdk/tribble/readers/Positional.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/Positional.java
rename to src/main/java/htsjdk/tribble/readers/Positional.java
diff --git a/src/java/htsjdk/tribble/readers/PositionalBufferedStream.java b/src/main/java/htsjdk/tribble/readers/PositionalBufferedStream.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/PositionalBufferedStream.java
rename to src/main/java/htsjdk/tribble/readers/PositionalBufferedStream.java
diff --git a/src/main/java/htsjdk/tribble/readers/SynchronousLineReader.java b/src/main/java/htsjdk/tribble/readers/SynchronousLineReader.java
new file mode 100644
index 0000000..5de8b22
--- /dev/null
+++ b/src/main/java/htsjdk/tribble/readers/SynchronousLineReader.java
@@ -0,0 +1,61 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.tribble.readers;
+
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.RuntimeIOException;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+
+/**
+ * Implementation of {@link LineReader} that reads lines directly from the underlying stream or reader.
+ */
+public final class SynchronousLineReader implements LineReader{
+    private final LongLineBufferedReader longLineBufferedReader;
+
+    public SynchronousLineReader(final InputStream stream){
+        this(new InputStreamReader(stream));
+    }
+
+    public SynchronousLineReader(final Reader reader){
+        this.longLineBufferedReader = new LongLineBufferedReader(reader);
+    }
+
+    @Override
+    public String readLine() {
+        try {
+            return longLineBufferedReader.readLine();
+        } catch (final IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    @Override
+    public void close() {
+        CloserUtil.close(longLineBufferedReader);
+    }
+}
\ No newline at end of file
diff --git a/src/java/htsjdk/tribble/readers/TabixIteratorLineReader.java b/src/main/java/htsjdk/tribble/readers/TabixIteratorLineReader.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/TabixIteratorLineReader.java
rename to src/main/java/htsjdk/tribble/readers/TabixIteratorLineReader.java
diff --git a/src/java/htsjdk/tribble/readers/TabixReader.java b/src/main/java/htsjdk/tribble/readers/TabixReader.java
similarity index 100%
rename from src/java/htsjdk/tribble/readers/TabixReader.java
rename to src/main/java/htsjdk/tribble/readers/TabixReader.java
diff --git a/src/java/htsjdk/tribble/readme.txt b/src/main/java/htsjdk/tribble/readme.txt
similarity index 100%
rename from src/java/htsjdk/tribble/readme.txt
rename to src/main/java/htsjdk/tribble/readme.txt
diff --git a/src/java/htsjdk/tribble/util/FTPHelper.java b/src/main/java/htsjdk/tribble/util/FTPHelper.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/FTPHelper.java
rename to src/main/java/htsjdk/tribble/util/FTPHelper.java
diff --git a/src/java/htsjdk/tribble/util/HTTPHelper.java b/src/main/java/htsjdk/tribble/util/HTTPHelper.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/HTTPHelper.java
rename to src/main/java/htsjdk/tribble/util/HTTPHelper.java
diff --git a/src/java/htsjdk/tribble/util/LittleEndianInputStream.java b/src/main/java/htsjdk/tribble/util/LittleEndianInputStream.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/LittleEndianInputStream.java
rename to src/main/java/htsjdk/tribble/util/LittleEndianInputStream.java
diff --git a/src/java/htsjdk/tribble/util/LittleEndianOutputStream.java b/src/main/java/htsjdk/tribble/util/LittleEndianOutputStream.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/LittleEndianOutputStream.java
rename to src/main/java/htsjdk/tribble/util/LittleEndianOutputStream.java
diff --git a/src/java/htsjdk/tribble/util/MathUtils.java b/src/main/java/htsjdk/tribble/util/MathUtils.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/MathUtils.java
rename to src/main/java/htsjdk/tribble/util/MathUtils.java
diff --git a/src/java/htsjdk/tribble/util/ParsingUtils.java b/src/main/java/htsjdk/tribble/util/ParsingUtils.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/ParsingUtils.java
rename to src/main/java/htsjdk/tribble/util/ParsingUtils.java
diff --git a/src/java/htsjdk/tribble/util/RemoteURLHelper.java b/src/main/java/htsjdk/tribble/util/RemoteURLHelper.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/RemoteURLHelper.java
rename to src/main/java/htsjdk/tribble/util/RemoteURLHelper.java
diff --git a/src/java/htsjdk/tribble/util/TabixUtils.java b/src/main/java/htsjdk/tribble/util/TabixUtils.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/TabixUtils.java
rename to src/main/java/htsjdk/tribble/util/TabixUtils.java
diff --git a/src/java/htsjdk/tribble/util/URLHelper.java b/src/main/java/htsjdk/tribble/util/URLHelper.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/URLHelper.java
rename to src/main/java/htsjdk/tribble/util/URLHelper.java
diff --git a/src/java/htsjdk/tribble/util/ftp/.gitignore b/src/main/java/htsjdk/tribble/util/ftp/.gitignore
similarity index 100%
rename from src/java/htsjdk/tribble/util/ftp/.gitignore
rename to src/main/java/htsjdk/tribble/util/ftp/.gitignore
diff --git a/src/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculation.java b/src/main/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculation.java
similarity index 100%
rename from src/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculation.java
rename to src/main/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculation.java
diff --git a/src/main/java/htsjdk/variant/bcf2/BCF2Codec.java b/src/main/java/htsjdk/variant/bcf2/BCF2Codec.java
new file mode 100644
index 0000000..a945764
--- /dev/null
+++ b/src/main/java/htsjdk/variant/bcf2/BCF2Codec.java
@@ -0,0 +1,491 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.bcf2;
+
+import htsjdk.tribble.BinaryFeatureCodec;
+import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureCodecHeader;
+import htsjdk.tribble.TribbleException;
+import htsjdk.tribble.readers.*;
+import htsjdk.variant.utils.GeneralUtils;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.GenotypeBuilder;
+import htsjdk.variant.variantcontext.LazyGenotypesContext;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import htsjdk.variant.variantcontext.VariantContextUtils;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFCompoundHeaderLine;
+import htsjdk.variant.vcf.VCFConstants;
+import htsjdk.variant.vcf.VCFContigHeaderLine;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLineType;
+
+import java.io.ByteArrayInputStream;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Decode BCF2 files
+ */
+public final class BCF2Codec extends BinaryFeatureCodec<VariantContext> {
+    private final static int ALLOWED_MAJOR_VERSION = 2;
+    private final static int MIN_MINOR_VERSION = 1;
+
+    private BCFVersion bcfVersion = null;
+
+    private VCFHeader header = null;
+
+    /**
+     * Maps offsets (encoded in BCF) into contig names (from header) for the CHROM field
+     */
+    private final ArrayList<String> contigNames = new ArrayList<String>();
+
+    /**
+     * Maps header string names (encoded in VCF) into strings found in the BCF header
+     *
+     * Initialized when processing the header
+     */
+    private ArrayList<String> dictionary;
+
+    /**
+     * Our decoder that reads low-level objects from the BCF2 records
+     */
+    private final BCF2Decoder decoder = new BCF2Decoder();
+
+    /**
+     * Provides some sanity checking on the header
+     */
+    private final static int MAX_HEADER_SIZE = 0x08000000;
+
+    /**
+     * Genotype field decoders that are initialized when the header is read
+     */
+    private BCF2GenotypeFieldDecoders gtFieldDecoders = null;
+
+    /**
+     * A cached array of GenotypeBuilders for efficient genotype decoding.
+     *
+     * Caching it allows us to avoid recreating this intermediate data
+     * structure each time we decode genotypes
+     */
+    private GenotypeBuilder[] builders = null;
+
+    // for error handling
+    private int recordNo = 0;
+    private int pos = 0;
+
+
+    // ----------------------------------------------------------------------
+    //
+    // Feature codec interface functions
+    //
+    // ----------------------------------------------------------------------
+
+    @Override
+    public Feature decodeLoc( final PositionalBufferedStream inputStream ) {
+        return decode(inputStream);
+    }
+
+    @Override
+    public VariantContext decode( final PositionalBufferedStream inputStream ) {
+        try {
+            recordNo++;
+            final VariantContextBuilder builder = new VariantContextBuilder();
+
+            final int sitesBlockSize = decoder.readBlockSize(inputStream);
+            final int genotypeBlockSize = decoder.readBlockSize(inputStream);
+
+            decoder.readNextBlock(sitesBlockSize, inputStream);
+            decodeSiteLoc(builder);
+            final SitesInfoForDecoding info = decodeSitesExtendedInfo(builder);
+
+            decoder.readNextBlock(genotypeBlockSize, inputStream);
+            createLazyGenotypesDecoder(info, builder);
+            return builder.fullyDecoded(true).make();
+        } catch ( IOException e ) {
+            throw new TribbleException("Failed to read BCF file", e);
+        }
+    }
+
+    @Override
+    public Class<VariantContext> getFeatureType() {
+        return VariantContext.class;
+    }
+
+    @Override
+    public FeatureCodecHeader readHeader( final PositionalBufferedStream inputStream ) {
+        try {
+            // note that this reads the magic as well, and so does double duty
+            bcfVersion = BCFVersion.readBCFVersion(inputStream);
+            if ( bcfVersion == null )
+                error("Input stream does not contain a BCF encoded file; BCF magic header info not found");
+
+            if ( bcfVersion.getMajorVersion() != ALLOWED_MAJOR_VERSION )
+                error("BCF2Codec can only process BCF2 files, this file has major version " + bcfVersion.getMajorVersion());
+            if ( bcfVersion.getMinorVersion() < MIN_MINOR_VERSION )
+                error("BCF2Codec can only process BCF2 files with minor version >= " + MIN_MINOR_VERSION + " but this file has minor version " + bcfVersion.getMinorVersion());
+
+            if ( GeneralUtils.DEBUG_MODE_ENABLED ) {
+                System.err.println("Parsing data stream with BCF version " + bcfVersion);
+            }
+
+            final int headerSizeInBytes = BCF2Type.INT32.read(inputStream);
+
+            if ( headerSizeInBytes <= 0 || headerSizeInBytes > MAX_HEADER_SIZE) // no bigger than 8 MB
+                error("BCF2 header has invalid length: " + headerSizeInBytes + " must be >= 0 and < "+ MAX_HEADER_SIZE);
+
+            final byte[] headerBytes = new byte[headerSizeInBytes];
+            if ( inputStream.read(headerBytes) != headerSizeInBytes )
+                error("Couldn't read all of the bytes specified in the header length = " + headerSizeInBytes);
+
+            final PositionalBufferedStream bps = new PositionalBufferedStream(new ByteArrayInputStream(headerBytes));
+            final LineIterator lineIterator = new LineIteratorImpl(new SynchronousLineReader(bps));
+            final VCFCodec headerParser = new VCFCodec();
+            this.header = (VCFHeader) headerParser.readActualHeader(lineIterator);
+            bps.close();
+        } catch ( IOException e ) {
+            throw new TribbleException("I/O error while reading BCF2 header");
+        }
+
+        // create the config offsets
+        if ( ! header.getContigLines().isEmpty() ) {
+            contigNames.clear();
+            for ( final VCFContigHeaderLine contig : header.getContigLines()) {
+                if ( contig.getID() == null || contig.getID().equals("") )
+                    error("found a contig with an invalid ID " + contig);
+                contigNames.add(contig.getID());
+            }
+        } else {
+            error("Didn't find any contig lines in BCF2 file header");
+        }
+
+        // create the string dictionary
+        dictionary = parseDictionary(header);
+
+        // prepare the genotype field decoders
+        gtFieldDecoders = new BCF2GenotypeFieldDecoders(header);
+
+        // create and initialize the genotype builder array
+        final int nSamples = header.getNGenotypeSamples();
+        builders = new GenotypeBuilder[nSamples];
+        for ( int i = 0; i < nSamples; i++ ) {
+            builders[i] = new GenotypeBuilder(header.getGenotypeSamples().get(i));
+        }
+
+        // position right before next line (would be right before first real record byte at end of header)
+        return new FeatureCodecHeader(header, inputStream.getPosition());
+    }
+
+    @Override
+    public boolean canDecode( final String path ) {
+        FileInputStream fis = null;
+        try {
+            fis = new FileInputStream(path);
+            final BCFVersion version = BCFVersion.readBCFVersion(fis);
+            return version != null && version.getMajorVersion() == ALLOWED_MAJOR_VERSION;
+        } catch ( FileNotFoundException e ) {
+            return false;
+        } catch ( IOException e ) {
+            return false;
+        } finally {
+            try {
+                if ( fis != null ) fis.close();
+            } catch ( IOException e ) {
+                // do nothing
+            }
+        }
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // implicit block
+    //
+    // The first four records of BCF are inline untype encoded data of:
+    //
+    // 4 byte integer chrom offset
+    // 4 byte integer start
+    // 4 byte integer ref length
+    // 4 byte float qual
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Decode the sites level data from this classes decoder
+     *
+     * @param builder
+     * @return
+     */
+    private final void decodeSiteLoc(final VariantContextBuilder builder) throws IOException {
+        final int contigOffset = decoder.decodeInt(BCF2Type.INT32);
+        final String contig = lookupContigName(contigOffset);
+        builder.chr(contig);
+
+        this.pos = decoder.decodeInt(BCF2Type.INT32) + 1; // GATK is one based, BCF2 is zero-based
+        final int refLength = decoder.decodeInt(BCF2Type.INT32);
+        builder.start((long)pos);
+        builder.stop((long)(pos + refLength - 1)); // minus one because GATK has closed intervals but BCF2 is open
+    }
+
+    /**
+     * Decode the sites level data from this classes decoder
+     *
+     * @param builder
+     * @return
+     */
+    private final SitesInfoForDecoding decodeSitesExtendedInfo(final VariantContextBuilder builder) throws IOException {
+        final Object qual = decoder.decodeSingleValue(BCF2Type.FLOAT);
+        if ( qual != null ) {
+            builder.log10PError(((Double)qual) / -10.0);
+        }
+
+        final int nAlleleInfo = decoder.decodeInt(BCF2Type.INT32);
+        final int nFormatSamples = decoder.decodeInt(BCF2Type.INT32);
+        final int nAlleles = nAlleleInfo >> 16;
+        final int nInfo = nAlleleInfo & 0x0000FFFF;
+        final int nFormatFields = nFormatSamples >> 24;
+        final int nSamples = nFormatSamples & 0x00FFFFF;
+
+        if ( header.getNGenotypeSamples() != nSamples )
+            error("Reading BCF2 files with different numbers of samples per record " +
+                    "is not currently supported.  Saw " + header.getNGenotypeSamples() +
+                    " samples in header but have a record with " + nSamples + " samples");
+
+        decodeID(builder);
+        final List<Allele> alleles = decodeAlleles(builder, pos, nAlleles);
+        decodeFilter(builder);
+        decodeInfo(builder, nInfo);
+
+        final SitesInfoForDecoding info = new SitesInfoForDecoding(nFormatFields, nSamples, alleles);
+        if ( ! info.isValid() )
+            error("Sites info is malformed: " + info);
+        return info;
+    }
+
+    protected final static class SitesInfoForDecoding {
+        final int nFormatFields;
+        final int nSamples;
+        final List<Allele> alleles;
+
+        private SitesInfoForDecoding(final int nFormatFields, final int nSamples, final List<Allele> alleles) {
+            this.nFormatFields = nFormatFields;
+            this.nSamples = nSamples;
+            this.alleles = alleles;
+        }
+
+        public boolean isValid() {
+            return nFormatFields >= 0 &&
+                    nSamples >= 0 &&
+                    alleles != null && ! alleles.isEmpty() && alleles.get(0).isReference();
+        }
+
+        @Override
+        public String toString() {
+            return String.format("nFormatFields = %d, nSamples = %d, alleles = %s", nFormatFields, nSamples, alleles);
+        }
+    }
+
+    /**
+     * Decode the id field in this BCF2 file and store it in the builder
+     * @param builder
+     */
+    private void decodeID( final VariantContextBuilder builder ) throws IOException {
+        final String id = (String)decoder.decodeTypedValue();
+
+        if ( id == null )
+            builder.noID();
+        else
+            builder.id(id);
+    }
+
+    /**
+     * Decode the alleles from this BCF2 file and put the results in builder
+     * @param builder
+     * @param pos
+     * @param nAlleles
+     * @return the alleles
+     */
+    private List<Allele> decodeAlleles( final VariantContextBuilder builder, final int pos, final int nAlleles ) throws IOException {
+        // TODO -- probably need inline decoder for efficiency here (no sense in going bytes -> string -> vector -> bytes
+        List<Allele> alleles = new ArrayList<Allele>(nAlleles);
+        String ref = null;
+
+        for ( int i = 0; i < nAlleles; i++ ) {
+            final String alleleBases = (String)decoder.decodeTypedValue();
+
+            final boolean isRef = i == 0;
+            final Allele allele = Allele.create(alleleBases, isRef);
+            if ( isRef ) ref = alleleBases;
+
+            alleles.add(allele);
+        }
+        assert ref != null;
+
+        builder.alleles(alleles);
+
+        assert !ref.isEmpty();
+
+        return alleles;
+    }
+
+    /**
+     * Decode the filter field of this BCF2 file and store the result in the builder
+     * @param builder
+     */
+    private void decodeFilter( final VariantContextBuilder builder ) throws IOException {
+        final Object value = decoder.decodeTypedValue();
+
+        if ( value == null )
+            builder.unfiltered();
+        else {
+            if ( value instanceof Integer ) {
+                // fast path for single integer result
+                final String filterString = getDictionaryString((Integer)value);
+                if ( VCFConstants.PASSES_FILTERS_v4.equals(filterString))
+                    builder.passFilters();
+                else
+                    builder.filter(filterString);
+            } else {
+                for ( final int offset : (List<Integer>)value )
+                    builder.filter(getDictionaryString(offset));
+            }
+        }
+    }
+
+    /**
+     * Loop over the info field key / value pairs in this BCF2 file and decode them into the builder
+     *
+     * @param builder
+     * @param numInfoFields
+     */
+    private void decodeInfo( final VariantContextBuilder builder, final int numInfoFields ) throws IOException {
+        if ( numInfoFields == 0 )
+            // fast path, don't bother doing any work if there are no fields
+            return;
+
+        final Map<String, Object> infoFieldEntries = new HashMap<String, Object>(numInfoFields);
+        for ( int i = 0; i < numInfoFields; i++ ) {
+            final String key = getDictionaryString();
+            Object value = decoder.decodeTypedValue();
+            final VCFCompoundHeaderLine metaData = VariantContextUtils.getMetaDataForField(header, key);
+            if ( metaData.getType() == VCFHeaderLineType.Flag ) value = true; // special case for flags
+            infoFieldEntries.put(key, value);
+        }
+
+        builder.attributes(infoFieldEntries);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Decoding Genotypes
+    //
+    // --------------------------------------------------------------------------------
+
+    /**
+     * Create the lazy loader for the genotypes data, and store it in the builder
+     * so that the VC will be able to decode on demand the genotypes data
+     *
+     * @param siteInfo
+     * @param builder
+     */
+    private void createLazyGenotypesDecoder( final SitesInfoForDecoding siteInfo,
+                                             final VariantContextBuilder builder ) {
+        if (siteInfo.nSamples > 0) {
+            final LazyGenotypesContext.LazyParser lazyParser =
+                    new BCF2LazyGenotypesDecoder(this, siteInfo.alleles, siteInfo.nSamples, siteInfo.nFormatFields, builders);
+
+            final LazyData lazyData = new LazyData(header, siteInfo.nFormatFields, decoder.getRecordBytes());
+            final LazyGenotypesContext lazy = new LazyGenotypesContext(lazyParser, lazyData, header.getNGenotypeSamples());
+
+            // did we resort the sample names?  If so, we need to load the genotype data
+            if ( !header.samplesWereAlreadySorted() )
+                lazy.decode();
+
+            builder.genotypesNoValidation(lazy);
+        }
+    }
+
+    public static class LazyData {
+        final public VCFHeader header;
+        final public int nGenotypeFields;
+        final public byte[] bytes;
+
+        public LazyData(final VCFHeader header, final int nGenotypeFields, final byte[] bytes) {
+            this.header = header;
+            this.nGenotypeFields = nGenotypeFields;
+            this.bytes = bytes;
+        }
+    }
+
+    private final String getDictionaryString() throws IOException {
+        return getDictionaryString((Integer) decoder.decodeTypedValue());
+    }
+
+    protected final String getDictionaryString(final int offset) {
+        return dictionary.get(offset);
+    }
+
+    /**
+     * Translate the config offset as encoded in the BCF file into the actual string
+     * name of the contig from the dictionary
+     *
+     * @param contigOffset
+     * @return
+     */
+    private final String lookupContigName( final int contigOffset ) {
+        return contigNames.get(contigOffset);
+    }
+
+    private final ArrayList<String> parseDictionary(final VCFHeader header) {
+        final ArrayList<String> dict = BCF2Utils.makeDictionary(header);
+
+        // if we got here we never found a dictionary, or there are no elements in the dictionary
+        if ( dict.isEmpty() )
+            error("Dictionary header element was absent or empty");
+
+        return dict;
+    }
+
+    /**
+     * @return the VCFHeader we found in this BCF2 file
+     */
+    protected VCFHeader getHeader() {
+        return header;
+    }
+
+    protected BCF2GenotypeFieldDecoders.Decoder getGenotypeFieldDecoder(final String field) {
+        return gtFieldDecoders.getDecoder(field);
+    }
+
+    private void error(final String message) throws RuntimeException {
+        throw new TribbleException(String.format("%s, at record %d with position %d:", message, recordNo, pos));
+    }
+}
diff --git a/src/java/htsjdk/variant/bcf2/BCF2Decoder.java b/src/main/java/htsjdk/variant/bcf2/BCF2Decoder.java
similarity index 100%
rename from src/java/htsjdk/variant/bcf2/BCF2Decoder.java
rename to src/main/java/htsjdk/variant/bcf2/BCF2Decoder.java
diff --git a/src/java/htsjdk/variant/bcf2/BCF2GenotypeFieldDecoders.java b/src/main/java/htsjdk/variant/bcf2/BCF2GenotypeFieldDecoders.java
similarity index 100%
rename from src/java/htsjdk/variant/bcf2/BCF2GenotypeFieldDecoders.java
rename to src/main/java/htsjdk/variant/bcf2/BCF2GenotypeFieldDecoders.java
diff --git a/src/java/htsjdk/variant/bcf2/BCF2LazyGenotypesDecoder.java b/src/main/java/htsjdk/variant/bcf2/BCF2LazyGenotypesDecoder.java
similarity index 100%
rename from src/java/htsjdk/variant/bcf2/BCF2LazyGenotypesDecoder.java
rename to src/main/java/htsjdk/variant/bcf2/BCF2LazyGenotypesDecoder.java
diff --git a/src/java/htsjdk/variant/bcf2/BCF2Type.java b/src/main/java/htsjdk/variant/bcf2/BCF2Type.java
similarity index 100%
rename from src/java/htsjdk/variant/bcf2/BCF2Type.java
rename to src/main/java/htsjdk/variant/bcf2/BCF2Type.java
diff --git a/src/java/htsjdk/variant/bcf2/BCF2Utils.java b/src/main/java/htsjdk/variant/bcf2/BCF2Utils.java
similarity index 100%
rename from src/java/htsjdk/variant/bcf2/BCF2Utils.java
rename to src/main/java/htsjdk/variant/bcf2/BCF2Utils.java
diff --git a/src/java/htsjdk/variant/bcf2/BCFVersion.java b/src/main/java/htsjdk/variant/bcf2/BCFVersion.java
similarity index 100%
rename from src/java/htsjdk/variant/bcf2/BCFVersion.java
rename to src/main/java/htsjdk/variant/bcf2/BCFVersion.java
diff --git a/src/main/java/htsjdk/variant/example/PrintVariantsExample.java b/src/main/java/htsjdk/variant/example/PrintVariantsExample.java
new file mode 100755
index 0000000..4471cea
--- /dev/null
+++ b/src/main/java/htsjdk/variant/example/PrintVariantsExample.java
@@ -0,0 +1,103 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ */
+package htsjdk.variant.example;
+
+import htsjdk.samtools.Defaults;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.ProgressLogger;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.readers.LineIterator;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.writer.Options;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFHeader;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.Arrays;
+import java.util.stream.Collectors;
+
+/**
+ * This is a example program showing how to use Feature readers and (optionally) writers.
+ * It's also useful for measuring time.
+ * An example invocation is:
+ * java -cp dist/htsjdk-2.1.1.jar htsjdk.variant.example.PrintVariantsExample in.vcf out.vcf
+ * <p>
+ * Arguments:
+ * - the first argument is the input file (VCF)
+ * - the second argument is optional and is the name of the output file (nothing gets written if this argument is missing)
+ */
+public final class PrintVariantsExample {
+    private PrintVariantsExample() {
+    }
+
+    private static final Log log = Log.getInstance(PrintVariantsExample.class);
+
+    public static void main(final String[] args) throws IOException {
+        if (args.length < 1) {
+            System.out.println("Usage: " + PrintVariantsExample.class.getCanonicalName() + " inFile [outFile]");
+            System.exit(1);
+        }
+        final File inputFile = new File(args[0]);
+        final File outputFile = args.length >= 2 ? new File(args[1]) : null;
+
+        final long start = System.currentTimeMillis();
+
+        log.info("Start with args:" + Arrays.toString(args));
+        printConfigurationInfo();
+
+        try(final VariantContextWriter writer = outputFile == null ? null : new VariantContextWriterBuilder().setOutputFile(outputFile).setOutputFileType(VariantContextWriterBuilder.OutputType.VCF).unsetOption(Options.INDEX_ON_THE_FLY).build();
+            final AbstractFeatureReader<VariantContext, LineIterator> reader = AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), new VCFCodec(), false)){
+
+            log.info(reader.getClass().getSimpleName() + " hasIndex " + reader.hasIndex());
+            if (writer != null){
+                log.info(writer.getClass().getSimpleName());
+                writer.writeHeader((VCFHeader) reader.getHeader());
+            }
+
+            final ProgressLogger pl = new ProgressLogger(log, 1000000);
+            for (final VariantContext vc : reader.iterator()) {
+                if (writer != null){
+                    writer.add(vc);
+                }
+                pl.record(vc.getContig(), vc.getStart());
+            }
+        }
+
+        final long end = System.currentTimeMillis();
+        log.info(String.format("Done. Elapsed time %.3f seconds", (end - start) / 1000.0));
+    }
+
+    private static void printConfigurationInfo() throws IOException {
+        log.info("Executing as " +
+                System.getProperty("user.name") + '@' + InetAddress.getLocalHost().getHostName() +
+                " on " + System.getProperty("os.name") + ' ' + System.getProperty("os.version") +
+                ' ' + System.getProperty("os.arch") + "; " + System.getProperty("java.vm.name") +
+                ' ' + System.getProperty("java.runtime.version"));
+
+        log.info(Defaults.allDefaults().entrySet().stream().map(e -> e.getKey() + ':' + e.getValue()).collect(Collectors.<String>joining(" ")));
+    }
+}
diff --git a/src/java/htsjdk/variant/utils/GeneralUtils.java b/src/main/java/htsjdk/variant/utils/GeneralUtils.java
similarity index 100%
rename from src/java/htsjdk/variant/utils/GeneralUtils.java
rename to src/main/java/htsjdk/variant/utils/GeneralUtils.java
diff --git a/src/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractor.java b/src/main/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractor.java
similarity index 100%
rename from src/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractor.java
rename to src/main/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractor.java
diff --git a/src/java/htsjdk/variant/variantcontext/Allele.java b/src/main/java/htsjdk/variant/variantcontext/Allele.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/Allele.java
rename to src/main/java/htsjdk/variant/variantcontext/Allele.java
diff --git a/src/java/htsjdk/variant/variantcontext/CommonInfo.java b/src/main/java/htsjdk/variant/variantcontext/CommonInfo.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/CommonInfo.java
rename to src/main/java/htsjdk/variant/variantcontext/CommonInfo.java
diff --git a/src/java/htsjdk/variant/variantcontext/FastGenotype.java b/src/main/java/htsjdk/variant/variantcontext/FastGenotype.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/FastGenotype.java
rename to src/main/java/htsjdk/variant/variantcontext/FastGenotype.java
diff --git a/src/java/htsjdk/variant/variantcontext/Genotype.java b/src/main/java/htsjdk/variant/variantcontext/Genotype.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/Genotype.java
rename to src/main/java/htsjdk/variant/variantcontext/Genotype.java
diff --git a/src/java/htsjdk/variant/variantcontext/GenotypeBuilder.java b/src/main/java/htsjdk/variant/variantcontext/GenotypeBuilder.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/GenotypeBuilder.java
rename to src/main/java/htsjdk/variant/variantcontext/GenotypeBuilder.java
diff --git a/src/java/htsjdk/variant/variantcontext/GenotypeJEXLContext.java b/src/main/java/htsjdk/variant/variantcontext/GenotypeJEXLContext.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/GenotypeJEXLContext.java
rename to src/main/java/htsjdk/variant/variantcontext/GenotypeJEXLContext.java
diff --git a/src/java/htsjdk/variant/variantcontext/GenotypeLikelihoods.java b/src/main/java/htsjdk/variant/variantcontext/GenotypeLikelihoods.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/GenotypeLikelihoods.java
rename to src/main/java/htsjdk/variant/variantcontext/GenotypeLikelihoods.java
diff --git a/src/java/htsjdk/variant/variantcontext/GenotypeType.java b/src/main/java/htsjdk/variant/variantcontext/GenotypeType.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/GenotypeType.java
rename to src/main/java/htsjdk/variant/variantcontext/GenotypeType.java
diff --git a/src/java/htsjdk/variant/variantcontext/GenotypesContext.java b/src/main/java/htsjdk/variant/variantcontext/GenotypesContext.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/GenotypesContext.java
rename to src/main/java/htsjdk/variant/variantcontext/GenotypesContext.java
diff --git a/src/java/htsjdk/variant/variantcontext/JEXLMap.java b/src/main/java/htsjdk/variant/variantcontext/JEXLMap.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/JEXLMap.java
rename to src/main/java/htsjdk/variant/variantcontext/JEXLMap.java
diff --git a/src/java/htsjdk/variant/variantcontext/LazyGenotypesContext.java b/src/main/java/htsjdk/variant/variantcontext/LazyGenotypesContext.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/LazyGenotypesContext.java
rename to src/main/java/htsjdk/variant/variantcontext/LazyGenotypesContext.java
diff --git a/src/java/htsjdk/variant/variantcontext/VariantContext.java b/src/main/java/htsjdk/variant/variantcontext/VariantContext.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/VariantContext.java
rename to src/main/java/htsjdk/variant/variantcontext/VariantContext.java
diff --git a/src/main/java/htsjdk/variant/variantcontext/VariantContextBuilder.java b/src/main/java/htsjdk/variant/variantcontext/VariantContextBuilder.java
new file mode 100644
index 0000000..33844db
--- /dev/null
+++ b/src/main/java/htsjdk/variant/variantcontext/VariantContextBuilder.java
@@ -0,0 +1,498 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext;
+
+import htsjdk.variant.vcf.VCFConstants;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * <p>Builder class for <code>VariantContext</code>.</p>
+ *
+ * <p>Some basic assumptions here:</p>
+ * <ol>
+ * <li> data isn't protectively copied.  If you provide an attribute map to
+ * the build, and modify it later, the builder will see this and so will any
+ * resulting variant contexts.  It's best not to modify collections provided
+ * to a builder.</li>
+ *
+ * <li> the system uses the standard builder model, allowing the simple construction idiom:
+ *<blockquote>
+ *   <code>builder.source("a").genotypes(gc).id("x").make()</code> => <code>VariantContext</code>
+ *</blockquote></li>
+ *<li>The best way to copy a VariantContext is:
+ *<blockquote>
+ *   <code>new VariantContextBuilder(vc).make()</code> => a copy of VC
+ *</blockquote>
+ * <li> validation of arguments is done at the during the final <code>make()</code> call, so a
+ * <code>VariantContextBuilder</code> can exist in an inconsistent state as long as those issues
+ * are resolved before the call to <code>make()</code> is issued.
+ *</ol>
+ * @author depristo
+ */
+public class VariantContextBuilder {
+    // required fields
+    private boolean fullyDecoded = false;
+    private String source = null;
+    private String contig = null;
+    private long start = -1;
+    private long stop = -1;
+    private Collection<Allele> alleles = null;
+
+    // optional -> these are set to the appropriate default value
+    private String ID = VCFConstants.EMPTY_ID_FIELD;
+    private GenotypesContext genotypes = GenotypesContext.NO_GENOTYPES;
+    private double log10PError = VariantContext.NO_LOG10_PERROR;
+    private Set<String> filters = null;
+    private Map<String, Object> attributes = null;
+    private boolean attributesCanBeModified = false;
+
+    /** enum of what must be validated */
+    final private EnumSet<VariantContext.Validation> toValidate = EnumSet.noneOf(VariantContext.Validation.class);
+
+    /**
+     * Create an empty VariantContextBuilder where all values adopt their default values.  Note that
+     * source, chr, start, stop, and alleles must eventually be filled in, or the resulting VariantContext
+     * will throw an error.
+     */
+    public VariantContextBuilder() {}
+
+    /**
+     * Create an empty VariantContextBuilder where all values adopt their default values, but the bare min.
+     * of info (source, chr, start, stop, and alleles) have been provided to start.
+     */
+    public VariantContextBuilder(final String source, final String contig, final long start, final long stop, final Collection<Allele> alleles) {
+        this.source = source;
+        this.contig = contig;
+        this.start = start;
+        this.stop = stop;
+        this.alleles = alleles;
+        this.attributes = Collections.emptyMap(); // immutable
+        toValidate.add(VariantContext.Validation.ALLELES);
+    }
+
+    /**
+     * Returns a new builder based on parent -- the new VC will have all fields initialized
+     * to their corresponding values in parent.  This is the best way to create a derived VariantContext
+     *
+     * @param parent  Cannot be null
+     */
+    public VariantContextBuilder(final VariantContext parent) {
+        if ( parent == null ) throw new IllegalArgumentException("BUG: VariantContextBuilder parent argument cannot be null in VariantContextBuilder");
+        this.alleles = parent.getAlleles();
+        this.attributes = parent.getAttributes();
+        this.attributesCanBeModified = false;
+        this.contig = parent.getContig();
+        this.filters = parent.getFiltersMaybeNull();
+        this.genotypes = parent.getGenotypes();
+        this.ID = parent.getID();
+        this.log10PError = parent.getLog10PError();
+        this.source = parent.getSource();
+        this.start = parent.getStart();
+        this.stop = parent.getEnd();
+        this.fullyDecoded = parent.isFullyDecoded();
+    }
+
+    public VariantContextBuilder(final VariantContextBuilder parent) {
+        if ( parent == null ) throw new IllegalArgumentException("BUG: VariantContext parent argument cannot be null in VariantContextBuilder");
+        this.alleles = parent.alleles;
+        this.attributesCanBeModified = false;
+        this.contig = parent.contig;
+        this.genotypes = parent.genotypes;
+        this.ID = parent.ID;
+        this.log10PError = parent.log10PError;
+        this.source = parent.source;
+        this.start = parent.start;
+        this.stop = parent.stop;
+        this.fullyDecoded = parent.fullyDecoded;
+
+        this.attributes(parent.attributes);
+        this.filters(parent.filters);
+    }
+
+    public VariantContextBuilder copy() {
+        return new VariantContextBuilder(this);
+    }
+
+    /**
+     * Tells this builder to use this collection of alleles for the resulting VariantContext
+     *
+     * @param alleles
+     * @return this builder
+     */
+    public VariantContextBuilder alleles(final Collection<Allele> alleles) {
+        this.alleles = alleles;
+        toValidate.add(VariantContext.Validation.ALLELES);
+        return this;
+    }
+
+    public VariantContextBuilder alleles(final List<String> alleleStrings) {
+        final List<Allele> alleles = new ArrayList<Allele>(alleleStrings.size());
+
+        for ( int i = 0; i < alleleStrings.size(); i++ ) {
+            alleles.add(Allele.create(alleleStrings.get(i), i == 0));
+        }
+
+        return alleles(alleles);
+    }
+
+    public VariantContextBuilder alleles(final String ... alleleStrings) {
+        return alleles(Arrays.asList(alleleStrings));
+    }
+
+    public List<Allele> getAlleles() {
+        return new ArrayList<Allele>(alleles);
+    }
+
+    /**
+     * Tells this builder to use this map of attributes for the resulting <code>VariantContext</code>. The
+     * contents of the Map are copied to a new Map to ensure that modifications to the provided Map post-invocation
+     * don't affect the VariantContext and also to ensure additional attributes can be added in case the provided
+     * map doesn't support changes (e.g. UnmodifiableMap).
+     *
+     * Attributes can be <code>null</code> -> meaning there are no attributes.  After
+     * calling this routine the builder assumes it can modify the attributes
+     * object here, if subsequent calls are made to set attribute values
+     *
+     * Value for each attribute must be of a type that implements {@link Serializable} or else
+     * serialization will fail.
+     *
+     * @param attributes a Map of attributes to replace any existing attributes with
+     */
+    public VariantContextBuilder attributes(final Map<String, ?> attributes) {
+        this.attributes = new HashMap<>();
+        if (attributes != null) this.attributes.putAll(attributes);
+        this.attributesCanBeModified = true;
+        return this;
+    }
+
+    /**
+     * Puts the key -> value mapping into this builder's attributes
+     *
+     * @param key key for the attribute
+     * @param value value for the attribute (must be of a type that implements {@link Serializable} or else serialization will fail)
+     */
+    public VariantContextBuilder attribute(final String key, final Object value) {
+        makeAttributesModifiable();
+        attributes.put(key, value);
+        return this;
+    }
+
+    /**
+     * Removes key if present in the attributes
+     *
+     * @param key  key to remove
+     * @return
+     */
+    public VariantContextBuilder rmAttribute(final String key) {
+        makeAttributesModifiable();
+        attributes.remove(key);
+        return this;
+    }
+
+    /**
+     * Removes list of keys if present in the attributes
+     *
+     * @param keys  list of keys to remove
+     * @return
+     */
+    public VariantContextBuilder rmAttributes(final List<String> keys) {
+        makeAttributesModifiable();
+        for ( final String key : keys )
+            attributes.remove(key);
+        return this;
+    }
+
+    /**
+     * Makes the attributes field modifiable.  In many cases attributes is just a pointer to an immutable
+     * collection, so methods that want to add / remove records require the attributes to be copied to a
+     */
+    private void makeAttributesModifiable() {
+        if ( ! attributesCanBeModified ) {
+            this.attributesCanBeModified = true;
+            if (attributes == null) {
+            	this.attributes = new HashMap<String, Object>();
+            } else {
+            	this.attributes = new HashMap<String, Object>(attributes);
+            }
+        }
+    }
+
+    /**
+     * This builder's filters are set to this value
+     *
+     * filters can be <code>null</code> -> meaning there are no filters
+     * @param filters
+     */
+    public VariantContextBuilder filters(final Set<String> filters) {
+        this.filters = filters;
+        return this;
+    }
+
+    /**
+     * {@link #filters}
+     *
+     * @param filters
+     * @return
+     */
+    public VariantContextBuilder filters(final String ... filters) {
+        filters(new LinkedHashSet<String>(Arrays.asList(filters)));
+        return this;
+    }
+
+    public VariantContextBuilder filter(final String filter) {
+        if ( this.filters == null ) this.filters = new LinkedHashSet<String>(1);
+        this.filters.add(filter);
+        return this;
+    }
+
+    /**
+     * Tells this builder that the resulting VariantContext should have PASS filters
+     *
+     * @return
+     */
+    public VariantContextBuilder passFilters() {
+        return filters(VariantContext.PASSES_FILTERS);
+    }
+
+    /**
+     * Tells this builder that the resulting VariantContext be unfiltered
+     *
+     * @return
+     */
+    public VariantContextBuilder unfiltered() {
+        this.filters = null;
+        return this;
+    }
+
+    /**
+     * Tells this builder that the resulting <code>VariantContext</code> should use this genotype's <code>GenotypeContext</code>.
+     *
+     * Note that genotypes can be <code>null</code> -> meaning there are no genotypes
+     *
+     * @param genotypes
+     */
+    public VariantContextBuilder genotypes(final GenotypesContext genotypes) {
+        this.genotypes = genotypes;
+        if ( genotypes != null )
+            toValidate.add(VariantContext.Validation.GENOTYPES);
+        return this;
+    }
+
+    public VariantContextBuilder genotypesNoValidation(final GenotypesContext genotypes) {
+        this.genotypes = genotypes;
+        return this;
+    }
+
+    /**
+     * Tells this builder that the resulting <code>VariantContext</code> should use a <code>GenotypeContext</code> containing genotypes
+     *
+     * Note that genotypes can be <code>null</code>, meaning there are no genotypes
+     *
+     * @param genotypes
+     */
+    public VariantContextBuilder genotypes(final Collection<Genotype> genotypes) {
+        return genotypes(GenotypesContext.copy(genotypes));
+    }
+
+    /**
+     * Tells this builder that the resulting <code>VariantContext</code> should use a <code>GenotypeContext</code> containing genotypes
+     * @param genotypes
+     */
+    public VariantContextBuilder genotypes(final Genotype ... genotypes) {
+        return genotypes(GenotypesContext.copy(Arrays.asList(genotypes)));
+    }
+
+    /**
+     * Tells this builder that the resulting VariantContext should not contain any GenotypeContext
+     */
+    public VariantContextBuilder noGenotypes() {
+        this.genotypes = null;
+        return this;
+    }
+
+    /**
+     * Tells us that the resulting VariantContext should have ID
+     * @param ID
+     * @return
+     */
+    public VariantContextBuilder id(final String ID) {
+        this.ID = ID;
+        return this;
+    }
+
+    /**
+     * Tells us that the resulting VariantContext should not have an ID
+     * @return
+     */
+    public VariantContextBuilder noID() {
+        return id(VCFConstants.EMPTY_ID_FIELD);
+    }
+
+    /**
+     * Tells us that the resulting VariantContext should have log10PError
+     * @param log10PError
+     * @return
+     */
+    public VariantContextBuilder log10PError(final double log10PError) {
+        this.log10PError = log10PError;
+        return this;
+    }
+
+    /**
+     * Tells us that the resulting VariantContext should have source field set to source
+     * @param source
+     * @return
+     */
+    public VariantContextBuilder source(final String source) {
+        this.source = source;
+        return this;
+    }
+
+    /**
+     * Tells us that the resulting VariantContext should have the specified location
+     * @param contig
+     * @param start
+     * @param stop
+     * @return
+     */
+    public VariantContextBuilder loc(final String contig, final long start, final long stop) {
+        this.contig = contig;
+        this.start = start;
+        this.stop = stop;
+        toValidate.add(VariantContext.Validation.ALLELES);
+        return this;
+    }
+
+    /**
+     * Tells us that the resulting VariantContext should have the specified contig chr
+     * @param contig
+     * @return
+     */
+    public VariantContextBuilder chr(final String contig) {
+        this.contig = contig;
+        return this;
+    }
+
+    /**
+     * Tells us that the resulting VariantContext should have the specified contig start
+     * @param start
+     * @return
+     */
+    public VariantContextBuilder start(final long start) {
+        this.start = start;
+        toValidate.add(VariantContext.Validation.ALLELES);
+        return this;
+    }
+
+    /**
+     * Tells us that the resulting VariantContext should have the specified contig stop
+     * @param stop
+     * @return
+     */
+    public VariantContextBuilder stop(final long stop) {
+        this.stop = stop;
+        return this;
+    }
+
+    /**
+     * @see #computeEndFromAlleles(java.util.List, int, int) with endForSymbolicAlleles == -1
+     */
+    public VariantContextBuilder computeEndFromAlleles(final List<Allele> alleles, final int start) {
+        return computeEndFromAlleles(alleles, start, -1);
+    }
+
+    /**
+     * Compute the end position for this VariantContext from the alleles themselves
+     *
+     * assigns this builder the stop position computed.
+     *
+     * @param alleles the list of alleles to consider.  The reference allele must be the first one
+     * @param start the known start position of this event
+     * @param endForSymbolicAlleles the end position to use if any of the alleles is symbolic.  Can be -1
+     *                              if no is expected but will throw an error if one is found
+     * @return this builder
+     */
+    public VariantContextBuilder computeEndFromAlleles(final List<Allele> alleles, final int start, final int endForSymbolicAlleles) {
+        stop(VariantContextUtils.computeEndFromAlleles(alleles, start, endForSymbolicAlleles));
+        return this;
+    }
+
+    /**
+     * @return true if this builder contains fully decoded data
+     *
+     * See VariantContext for more information
+     */
+    public boolean isFullyDecoded() {
+        return fullyDecoded;
+    }
+
+    /**
+     * Sets this builder's fully decoded state to true.
+     *
+     * A fully decoded builder indicates that all fields are represented by their
+     * proper java objects (e.g., Integer(10) not "10").
+     *
+     * See VariantContext for more information
+     *
+     * @param isFullyDecoded
+     */
+    public VariantContextBuilder fullyDecoded(boolean isFullyDecoded) {
+        this.fullyDecoded = isFullyDecoded;
+        return this;
+    }
+
+    /**
+     * Takes all of the builder data provided up to this point, and instantiates
+     * a freshly allocated VariantContext with all of the builder data.  This
+     * VariantContext is validated as appropriate and if not failing QC (and
+     * throwing an exception) is returned.
+     *
+     * Note that this function can be called multiple times to create multiple
+     * VariantContexts from the same builder.
+     */
+    public VariantContext make() {
+        return make(false);
+    }
+
+    public VariantContext make(final boolean leaveModifyableAsIs) {
+        if(!leaveModifyableAsIs) attributesCanBeModified = false;
+
+        return new VariantContext(source, ID, contig, start, stop, alleles,
+                genotypes, log10PError, filters, attributes,
+                fullyDecoded, toValidate);
+    }
+}
diff --git a/src/java/htsjdk/variant/variantcontext/VariantContextComparator.java b/src/main/java/htsjdk/variant/variantcontext/VariantContextComparator.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/VariantContextComparator.java
rename to src/main/java/htsjdk/variant/variantcontext/VariantContextComparator.java
diff --git a/src/java/htsjdk/variant/variantcontext/VariantContextUtils.java b/src/main/java/htsjdk/variant/variantcontext/VariantContextUtils.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/VariantContextUtils.java
rename to src/main/java/htsjdk/variant/variantcontext/VariantContextUtils.java
diff --git a/src/java/htsjdk/variant/variantcontext/VariantJEXLContext.java b/src/main/java/htsjdk/variant/variantcontext/VariantJEXLContext.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/VariantJEXLContext.java
rename to src/main/java/htsjdk/variant/variantcontext/VariantJEXLContext.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/CompoundFilter.java b/src/main/java/htsjdk/variant/variantcontext/filter/CompoundFilter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/CompoundFilter.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/CompoundFilter.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/FilteringIterator.java b/src/main/java/htsjdk/variant/variantcontext/filter/FilteringIterator.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/FilteringIterator.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/FilteringIterator.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIterator.java b/src/main/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIterator.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIterator.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIterator.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilter.java b/src/main/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilter.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilter.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilter.java b/src/main/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilter.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilter.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilter.java b/src/main/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilter.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilter.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/PassingVariantFilter.java b/src/main/java/htsjdk/variant/variantcontext/filter/PassingVariantFilter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/PassingVariantFilter.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/PassingVariantFilter.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/SnpFilter.java b/src/main/java/htsjdk/variant/variantcontext/filter/SnpFilter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/SnpFilter.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/SnpFilter.java
diff --git a/src/java/htsjdk/variant/variantcontext/filter/VariantContextFilter.java b/src/main/java/htsjdk/variant/variantcontext/filter/VariantContextFilter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/filter/VariantContextFilter.java
rename to src/main/java/htsjdk/variant/variantcontext/filter/VariantContextFilter.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriter.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriter.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/BCF2Encoder.java b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2Encoder.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/BCF2Encoder.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/BCF2Encoder.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/BCF2FieldEncoder.java b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldEncoder.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/BCF2FieldEncoder.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldEncoder.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriter.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriter.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriterManager.java b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriterManager.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriterManager.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriterManager.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/BCF2Writer.java b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2Writer.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/BCF2Writer.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/BCF2Writer.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/IndexingVariantContextWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/IndexingVariantContextWriter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/IndexingVariantContextWriter.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/IndexingVariantContextWriter.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/IntGenotypeFieldAccessors.java b/src/main/java/htsjdk/variant/variantcontext/writer/IntGenotypeFieldAccessors.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/IntGenotypeFieldAccessors.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/IntGenotypeFieldAccessors.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/Options.java b/src/main/java/htsjdk/variant/variantcontext/writer/Options.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/Options.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/Options.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriter.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriter.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriterBase.java b/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriterBase.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriterBase.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriterBase.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/VCFWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/VCFWriter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/VCFWriter.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/VCFWriter.java
diff --git a/src/java/htsjdk/variant/variantcontext/writer/VariantContextWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriter.java
similarity index 100%
rename from src/java/htsjdk/variant/variantcontext/writer/VariantContextWriter.java
rename to src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriter.java
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java
new file mode 100644
index 0000000..56c8b8b
--- /dev/null
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java
@@ -0,0 +1,537 @@
+/*
+* Copyright (c) 2014 The Broad Institute
+*
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+*
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext.writer;
+
+import htsjdk.samtools.Defaults;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Md5CalculatingOutputStream;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.index.IndexCreator;
+import htsjdk.tribble.index.tabix.TabixFormat;
+import htsjdk.tribble.index.tabix.TabixIndexCreator;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
+import java.util.EnumSet;
+
+/*
+ * Created with IntelliJ IDEA.
+ * User: thibault
+ * Date: 3/7/14
+ * Time: 2:07 PM
+ */
+/**
+ * @author thibault
+ * 
+ * <p>
+ * Provides methods for creating <code>VariantContextWriter</code>s using the Builder pattern.
+ * Replaces <code>VariantContextWriterFactory</code>.
+ * </p>
+ * <p>
+ * The caller must choose an output file or an output stream for the <code>VariantContextWriter</code> to write to.
+ * When a file is chosen, the output stream is created implicitly based on Defaults and options passed to the builder.
+ * When a stream is chosen, it is passed unchanged to the <code>VariantContextWriter</code>.
+ * </p>
+ * <p>
+ * Example: Create a series of files with buffering and indexing on the fly.
+ * Determine the appropriate file type based on filename.
+ * </p>
+
+   <pre>
+   VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+       .setReferenceDictionary(refDict)
+       .setOption(Options.INDEX_ON_THE_FLY)
+       .setBuffer(8192);
+ 
+   VariantContextWriter sample1_writer = builder
+       .setOutputFile("sample1.vcf")
+       .build();
+   VariantContextWriter sample2_writer = builder
+       .setOutputFile("sample2.bcf")
+       .build();
+   VariantContextWriter sample3_writer = builder
+       .setOutputFile("sample3.vcf.bgzf")
+       .build();
+   </pre>
+   
+   <p>
+ * Example: Explicitly turn off buffering and explicitly set the file type
+ * </p>
+ * 
+ * <pre>
+   VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+       .setReferenceDictionary(refDict)
+       .setOption(Options.INDEX_ON_THE_FLY)
+       .unsetBuffering();
+ 
+   VariantContextWriter sample1_writer = builder
+       .setOutputFile("sample1.custom_extension")
+       .setOutputFileType(OutputType.VCF)
+       .build();
+   VariantContextWriter sample2_writer = builder
+       .setOutputFile("sample2.custom_extension")
+       .setOutputFileType(OutputType.BLOCK_COMPRESSED_VCF)
+       .build();
+   </pre>
+ */
+public class VariantContextWriterBuilder {
+    public static final EnumSet<Options> DEFAULT_OPTIONS = EnumSet.of(Options.INDEX_ON_THE_FLY);
+    public static final EnumSet<Options> NO_OPTIONS = EnumSet.noneOf(Options.class);
+
+    public enum OutputType {
+        UNSPECIFIED,
+        VCF,
+        BCF,
+        BLOCK_COMPRESSED_VCF,
+        VCF_STREAM,
+        BCF_STREAM
+    }
+
+    public static final EnumSet<OutputType> FILE_TYPES = EnumSet.of(OutputType.VCF, OutputType.BCF, OutputType.BLOCK_COMPRESSED_VCF);
+    public static final EnumSet<OutputType> STREAM_TYPES = EnumSet.of(OutputType.VCF_STREAM, OutputType.BCF_STREAM);
+
+    private SAMSequenceDictionary refDict = null;
+    private OutputType outType = OutputType.UNSPECIFIED;
+    private File outFile = null;
+    private OutputStream outStream = null;
+    private IndexCreator idxCreator = null;
+    private int bufferSize = Defaults.BUFFER_SIZE;
+    private boolean createMD5 = Defaults.CREATE_MD5;
+    protected EnumSet<Options> options = DEFAULT_OPTIONS.clone();
+
+    /**
+     * Default constructor.  Adds <code>USE_ASYNC_IO</code> to the Options if it is present in Defaults.
+     */
+    public VariantContextWriterBuilder() {
+        if (Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE) {
+            options.add(Options.USE_ASYNC_IO);
+        }
+    }
+
+    /**
+     * Set the reference dictionary to be used by <code>VariantContextWriter</code>s created by this builder.
+     *
+     * @param refDict the reference dictionary
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setReferenceDictionary(final SAMSequenceDictionary refDict) {
+        this.refDict = refDict;
+        return this;
+    }
+
+    /**
+     * Set the output file for the next <code>VariantContextWriter</code> created by this builder.
+     * Determines file type implicitly from the filename.
+     *
+     * @param outFile the file the <code>VariantContextWriter</code> will write to
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setOutputFile(final File outFile) {
+        this.outFile = outFile;
+        this.outStream = null;
+        this.outType = determineOutputTypeFromFile(outFile);
+        return this;
+    }
+
+    /**
+     * Set the output file for the next <code>VariantContextWriter</code> created by this builder.
+     * Determines file type implicitly from the filename.
+     *
+     * @param outFile the file the <code>VariantContextWriter</code> will write to
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setOutputFile(final String outFile) {
+        return setOutputFile(new File(outFile));
+    }
+
+    /**
+     * Set the output file type for the next <code>VariantContextWriter</code> created by this builder.
+     *
+     * @param outType the type of file the <code>VariantContextWriter</code> will write to
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setOutputFileType(final OutputType outType) {
+        if (!FILE_TYPES.contains(outType))
+            throw new IllegalArgumentException("Must choose a file type, not other output types.");
+
+        if (this.outFile == null || this.outStream != null)
+            throw new IllegalArgumentException("Cannot set a file type if the output is not to a file.");
+
+        this.outType = outType;
+        return this;
+    }
+
+    /**
+     * Set the output VCF stream for the next <code>VariantContextWriter</code> created by this builder.
+     * If buffered writing is desired, caller must provide some kind of buffered <code>OutputStream</code>.
+     *
+     * @param outStream the output stream to write to
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setOutputVCFStream(final OutputStream outStream) {
+        this.outStream = outStream;
+        this.outFile = null;
+        this.outType = OutputType.VCF_STREAM;
+        return this;
+    }
+
+    /**
+     * Set the output BCF stream for the next <code>VariantContextWriter</code> created by this builder.
+     * If buffered writing is desired, caller must provide some kind of buffered <code>OutputStream</code>.
+     *
+     * @param outStream the output stream to write to
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setOutputBCFStream(final OutputStream outStream) {
+        this.outStream = outStream;
+        this.outFile = null;
+        this.outType = OutputType.BCF_STREAM;
+        return this;
+    }
+
+    /**
+     * Set the output stream (VCF, by default) for the next <code>VariantContextWriter</code> created by this builder.
+     * If buffered writing is desired, caller must provide some kind of buffered <code>OutputStream</code>.
+     *
+     * @param outStream the output stream to write to
+     * @return this VariantContextWriterBuilder
+     */
+    public VariantContextWriterBuilder setOutputStream(final OutputStream outStream) {
+        return setOutputVCFStream(outStream);
+    }
+
+    /**
+     * Set an IndexCreator for the next <code>VariantContextWriter</code> created by this builder.
+     *
+     * @param idxCreator the <code>IndexCreator</code> to use
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setIndexCreator(final IndexCreator idxCreator) {
+        this.idxCreator = idxCreator;
+        return this;
+    }
+
+    /**
+     * Do not pass an <code>IndexCreator</code> to the next <code>VariantContextWriter</code> created by this builder.
+     *
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder clearIndexCreator() {
+        this.idxCreator = null;
+        return this;
+    }
+
+    /**
+     * Set a buffer size for the file output stream passed to the next <code>VariantContextWriter</code> created by this builder.
+     * Set to 0 for no buffering.
+     * Does not affect OutputStreams passed directly to <code>VariantContextWriterBuilder</code>.
+     *
+     * @param bufferSize the buffer size to use
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setBuffer(final int bufferSize) {
+        this.bufferSize = bufferSize;
+        return this;
+    }
+
+    /**
+     * Do not use buffering in the next <code>VariantContextWriter</code> created by this builder.
+     * Does not affect <code>OutputStream</code>s passed directly to <code>VariantContextWriterBuilder</code>.
+     *
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder unsetBuffering() {
+        this.bufferSize = 0;
+        return this;
+    }
+
+    /**
+     * Choose whether to also create an MD5 digest file for the next <code>VariantContextWriter</code> created by this builder.
+     *
+     * @param createMD5 boolean, <code>true</code> to create an MD5 digest
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setCreateMD5(final boolean createMD5) {
+        this.createMD5 = createMD5;
+        return this;
+    }
+
+    /**
+     * Create an MD5 digest file for the next <code>VariantContextWriter</code> created by this builder.
+     *
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setCreateMD5() {
+        return setCreateMD5(true);
+    }
+
+    /**
+     * Don't create an MD5 digest file for the next <code>VariantContextWriter</code> created by this builder.
+     *
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder unsetCreateMD5() {
+        return setCreateMD5(false);
+    }
+
+    /**
+     * Replace the set of <code>Options</code> for the <code>VariantContextWriterBuilder</code> with a new set.
+     *
+     * @param options the complete set of options to use
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setOptions(final EnumSet<Options> options) {
+        this.options = options;
+        return this;
+    }
+
+    /**
+     * Add one option to the set of <code>Options</code> for the <code>VariantContextWriterBuilder</code>, if it's not already present.
+     *
+     * @param option the option to set
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder setOption(final Options option) {
+        this.options.add(option);
+        return this;
+    }
+
+    /**
+     * Remove one option from the set of <code>Options</code> for the <code>VariantContextWriterBuilder</code>, if it's present.
+     *
+     * @param option the option to unset
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder unsetOption(final Options option) {
+        this.options.remove(option);
+        return this;
+    }
+
+    /**
+     * Set or unset option depending on the boolean given
+     * @param option the option to modify
+     * @param setIt true to set the option, false to unset it.
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public VariantContextWriterBuilder modifyOption(final Options option, final boolean setIt) {
+        return (setIt) ? this.setOption(option) : this.unsetOption(option);
+    }
+
+    /**
+     * Add one option to the set of default <code>Options</code> that will be used as the initial set of options
+     * for all VariantContextWriterBuilders created after this call.
+     *
+     * @param option the option to set
+     */
+    public static void setDefaultOption(final Options option) {
+        VariantContextWriterBuilder.DEFAULT_OPTIONS.add(option);
+    }
+
+    /**
+     * Remove an option from the set of default <code>Options</code> that will be used as the initial set of options
+     * for all VariantContextWriterBuilders created after this call.
+     *
+     * @param option the option to unset
+     * @return this <code>VariantContextWriterBuilder</code>
+     */
+    public static void unsetDefaultOption(final Options option) {
+        VariantContextWriterBuilder.DEFAULT_OPTIONS.remove(option);
+    }
+
+    /**
+     * Remove all options from the set of <code>Options</code> for the <code>VariantContextWriterBuilder</code>.
+     *
+     * @return this VariantContextWriterBuilder
+     */
+    public VariantContextWriterBuilder clearOptions() {
+        this.options = NO_OPTIONS.clone();
+        return this;
+    }
+
+    /**
+     * Used for testing; tests if the option is set
+     * @param option the option to test
+     * @return true if the option is set, false otherwise.
+     */
+    boolean isOptionSet(final Options option) {
+        return this.options.contains(option);
+    }
+
+    /**
+     * Validate and build the <code>VariantContextWriter</code>.
+     *
+     * @return the <code>VariantContextWriter</code> as specified by previous method calls
+     * @throws RuntimeIOException if the writer is configured to write to a file, and the corresponding path does not exist.
+     * @throws IllegalArgumentException if no output file or stream is specified.
+     * @throws IllegalArgumentException if <code>Options.INDEX_ON_THE_FLY</code> is specified and no reference dictionary is provided.
+     * @throws IllegalArgumentException if <code>Options.INDEX_ON_THE_FLY</code> is specified and a stream output is specified.
+     */
+    public VariantContextWriter build() {
+        VariantContextWriter writer = null;
+
+        // don't allow FORCE_BCF to modify the outType state
+        OutputType typeToBuild = this.outType;
+
+        if (this.options.contains(Options.FORCE_BCF)) {
+            if (FILE_TYPES.contains(this.outType))
+                typeToBuild = OutputType.BCF;
+            else if (STREAM_TYPES.contains(this.outType))
+                typeToBuild = OutputType.BCF_STREAM;
+        }
+
+        OutputStream outStreamFromFile = this.outStream;
+        if (FILE_TYPES.contains(this.outType)) {
+            try {
+                outStreamFromFile = IOUtil.maybeBufferOutputStream(new FileOutputStream(outFile), bufferSize);
+            } catch (final FileNotFoundException e) {
+                throw new RuntimeIOException("File not found: " + outFile, e);
+            }
+
+            if (createMD5)
+                outStreamFromFile = new Md5CalculatingOutputStream(outStreamFromFile, new File(outFile.getAbsolutePath() + ".md5"));
+        }
+
+        switch (typeToBuild) {
+            case UNSPECIFIED:
+                throw new IllegalArgumentException("Must specify file or stream output type.");
+            case VCF:
+                if ((refDict == null) && (options.contains(Options.INDEX_ON_THE_FLY)))
+                    throw new IllegalArgumentException("A reference dictionary is required for creating Tribble indices on the fly");
+
+                writer = createVCFWriter(outFile, outStreamFromFile);
+                break;
+            case BLOCK_COMPRESSED_VCF:
+                if (refDict == null)
+                    idxCreator = new TabixIndexCreator(TabixFormat.VCF);
+                else
+                    idxCreator = new TabixIndexCreator(refDict, TabixFormat.VCF);
+
+                writer = createVCFWriter(outFile, new BlockCompressedOutputStream(outStreamFromFile, outFile));
+                break;
+            case BCF:
+                if ((refDict == null) && (options.contains(Options.INDEX_ON_THE_FLY)))
+                    throw new IllegalArgumentException("A reference dictionary is required for creating Tribble indices on the fly");
+
+                writer = createBCFWriter(outFile, outStreamFromFile);
+                break;
+            case VCF_STREAM:
+                if (options.contains(Options.INDEX_ON_THE_FLY))
+                    throw new IllegalArgumentException("VCF index creation not supported for stream output.");
+
+                writer = createVCFWriter(null, outStream);
+                break;
+            case BCF_STREAM:
+                if (options.contains(Options.INDEX_ON_THE_FLY))
+                    throw new IllegalArgumentException("BCF index creation not supported for stream output.");
+
+                writer = createBCFWriter(null, outStream);
+                break;
+        }
+
+        if (this.options.contains(Options.USE_ASYNC_IO))
+            writer = new AsyncVariantContextWriter(writer, AsyncVariantContextWriter.DEFAULT_QUEUE_SIZE);
+
+        return writer;
+     }
+
+    /**
+     * Attempts to determine the type of file/data to write based on the File path being
+     * written to. Will attempt to determine using the logical filename; if that fails it will
+     * attempt to resolve any symlinks and try again.  If that fails, and the output file exists
+     * but is neither a file or directory then VCF_STREAM is returned.
+     */
+    protected static OutputType determineOutputTypeFromFile(final File f) {
+        if (isBCF(f)) {
+            return OutputType.BCF;
+        } else if (isCompressedVCF(f)) {
+            return OutputType.BLOCK_COMPRESSED_VCF;
+        } else if (isVCF(f)) {
+            return OutputType.VCF;
+        }
+        else {
+            // See if we have a special file (device, named pipe, etc.)
+            final File canonical = new File(IOUtil.getFullCanonicalPath(f));
+            if (!canonical.equals(f)) {
+                return determineOutputTypeFromFile(canonical);
+            }
+            else if (f.exists() && !f.isFile() && !f.isDirectory()) {
+                return OutputType.VCF_STREAM;
+            } else {
+                return OutputType.UNSPECIFIED;
+            }
+        }
+    }
+
+    private static boolean isVCF(final File outFile) {
+        return outFile != null && outFile.getName().endsWith(".vcf");
+    }
+
+    private static boolean isBCF(final File outFile) {
+        return outFile != null && outFile.getName().endsWith(".bcf");
+    }
+
+    private static boolean isCompressedVCF(final File outFile) {
+        if (outFile == null)
+            return false;
+
+        return AbstractFeatureReader.hasBlockCompressedExtension(outFile);
+    }
+
+    private VariantContextWriter createVCFWriter(final File writerFile, final OutputStream writerStream) {
+        if (idxCreator == null) {
+            return new VCFWriter(writerFile, writerStream, refDict,
+                    options.contains(Options.INDEX_ON_THE_FLY),
+                    options.contains(Options.DO_NOT_WRITE_GENOTYPES),
+                    options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
+                    options.contains(Options.WRITE_FULL_FORMAT_FIELD));
+        }
+        else {
+            return new VCFWriter(writerFile, writerStream, refDict, idxCreator,
+                    options.contains(Options.INDEX_ON_THE_FLY),
+                    options.contains(Options.DO_NOT_WRITE_GENOTYPES),
+                    options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
+                    options.contains(Options.WRITE_FULL_FORMAT_FIELD));
+        }
+    }
+
+    private VariantContextWriter createBCFWriter(final File writerFile, final OutputStream writerStream) {
+        if (idxCreator == null) {
+            return new BCF2Writer(writerFile, writerStream, refDict,
+                    options.contains(Options.INDEX_ON_THE_FLY),
+                    options.contains(Options.DO_NOT_WRITE_GENOTYPES));
+        }
+        else {
+            return new BCF2Writer(writerFile, writerStream, refDict, idxCreator,
+                    options.contains(Options.INDEX_ON_THE_FLY),
+                    options.contains(Options.DO_NOT_WRITE_GENOTYPES));
+        }
+    }
+}
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java
new file mode 100644
index 0000000..e1e0026
--- /dev/null
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java
@@ -0,0 +1,282 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext.writer;
+
+import htsjdk.samtools.Defaults;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.index.IndexCreator;
+import htsjdk.tribble.index.tabix.TabixFormat;
+import htsjdk.tribble.index.tabix.TabixIndexCreator;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
+import java.util.EnumSet;
+
+/**
+ * Factory methods to create VariantContext writers
+ *
+ * @author depristo
+ * @since 5/12
+ *
+ * @deprecated Replaced by {@link VariantContextWriterBuilder}
+ */
+ at Deprecated
+public class VariantContextWriterFactory {
+
+    public static final EnumSet<Options> DEFAULT_OPTIONS = EnumSet.of(Options.INDEX_ON_THE_FLY);
+    public static final EnumSet<Options> NO_OPTIONS = EnumSet.noneOf(Options.class);
+
+    static {
+        if (Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE) {
+            DEFAULT_OPTIONS.add(Options.USE_ASYNC_IO);
+        }
+    }
+
+    private VariantContextWriterFactory() {}
+
+    public static VariantContextWriter create(final File location, final SAMSequenceDictionary refDict) {
+        return create(location, openOutputStream(location), refDict, DEFAULT_OPTIONS);
+    }
+
+    public static VariantContextWriter create(final File location, final SAMSequenceDictionary refDict, final EnumSet<Options> options) {
+        return create(location, openOutputStream(location), refDict, options);
+    }
+
+    /**
+     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter create(final File location,
+                                              final OutputStream output,
+                                              final SAMSequenceDictionary refDict) {
+        return create(location, output, refDict, DEFAULT_OPTIONS);
+    }
+
+    /**
+     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter create(final OutputStream output,
+                                              final SAMSequenceDictionary refDict,
+                                              final EnumSet<Options> options) {
+        return create(null, output, refDict, options);
+    }
+
+    /**
+     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
+     *                 but does not control where the file is written
+     * @param output This is where the BCF is actually written. If buffered writing is desired, caller must provide
+     *               some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter createBcf2(final File location,
+                                                  final OutputStream output,
+                                                  final SAMSequenceDictionary refDict,
+                                                  final EnumSet<Options> options) {
+        return maybeWrapWithAsyncWriter(new BCF2Writer(location, output, refDict,
+                options.contains(Options.INDEX_ON_THE_FLY),
+                options.contains(Options.DO_NOT_WRITE_GENOTYPES)), options);
+    }
+
+    /**
+     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
+     *                 but does not control where the file is written
+     * @param output This is where the BCF is actually written.  If buffered writing is desired, caller must provide
+     *               some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter createBcf2(final File location,
+                                                  final OutputStream output,
+                                                  final SAMSequenceDictionary refDict,
+                                                  final IndexCreator indexCreator,
+                                                  final EnumSet<Options> options) {
+        return maybeWrapWithAsyncWriter(new BCF2Writer(location, output, refDict, indexCreator,
+                options.contains(Options.INDEX_ON_THE_FLY),
+                options.contains(Options.DO_NOT_WRITE_GENOTYPES)), options);
+    }
+
+    /**
+     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
+     *                 but does not control where the file is written
+     * @param output This is where the VCF is actually written. If buffered writing is desired, caller must provide
+     *               some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter createVcf(final File location,
+                                                 final OutputStream output,
+                                                 final SAMSequenceDictionary refDict,
+                                                 final EnumSet<Options> options) {
+        return maybeWrapWithAsyncWriter(new VCFWriter(location, output, refDict,
+                options.contains(Options.INDEX_ON_THE_FLY),
+                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
+                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
+                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
+    }
+
+    /**
+     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
+     *                 but does not control where the file is written
+     * @param output This is where the VCF is actually written.  If buffered writing is desired, caller must provide
+     *               some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter createVcf(final File location,
+                                                 final OutputStream output,
+                                                 final SAMSequenceDictionary refDict,
+                                                 final IndexCreator indexCreator,
+                                                 final EnumSet<Options> options) {
+        return maybeWrapWithAsyncWriter(new VCFWriter(location, output, refDict, indexCreator,
+                options.contains(Options.INDEX_ON_THE_FLY),
+                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
+                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
+                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
+    }
+
+    /**
+     * @param location Note that this parameter is used to producing intelligent log messages,
+     *                 but does not control where the file is written
+     * @param output This is where the VCF is actually written.  If buffered writing is desired, caller must provide
+     *               some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter createBlockCompressedVcf(final File location,
+                                                                final OutputStream output,
+                                                                final SAMSequenceDictionary refDict,
+                                                                final EnumSet<Options> options) {
+        final TabixIndexCreator indexCreator;
+        if (options.contains(Options.INDEX_ON_THE_FLY)) {
+            indexCreator = new TabixIndexCreator(refDict, TabixFormat.VCF);
+        } else {
+            indexCreator = null;
+        }
+        return maybeWrapWithAsyncWriter(new VCFWriter(location, BlockCompressedOutputStream.maybeBgzfWrapOutputStream(location, output),
+                refDict, indexCreator,
+                options.contains(Options.INDEX_ON_THE_FLY),
+                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
+                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
+                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
+    }
+
+    /**
+     * @param location Note that this parameter is used to producing intelligent log messages,
+     *                 but does not control where the file is written
+     * @param output This is where the VCF is actually written. If buffered writing is desired, caller must provide
+     *               some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter createBlockCompressedVcf(final File location,
+                                                                final OutputStream output,
+                                                                final SAMSequenceDictionary refDict,
+                                                                final IndexCreator indexCreator,
+                                                                final EnumSet<Options> options) {
+        return maybeWrapWithAsyncWriter(new VCFWriter(location, BlockCompressedOutputStream.maybeBgzfWrapOutputStream(location, output),
+                refDict, indexCreator,
+                options.contains(Options.INDEX_ON_THE_FLY),
+                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
+                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
+                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
+    }
+
+    public static VariantContextWriter create(final File location,
+        final OutputStream output,
+        final SAMSequenceDictionary refDict,
+        final EnumSet<Options> options) {
+
+        if (isBCFOutput(location, options)) {
+            return createBcf2(location, output, refDict, options);
+        } else if (isCompressedVcf(location)) {
+            return createBlockCompressedVcf(location, output, refDict, options);
+        } else {
+            return createVcf(location, output, refDict, options);
+        }
+    }
+
+    /**
+     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
+     */
+    public static VariantContextWriter create(final File location,
+                                              final OutputStream output,
+                                              final SAMSequenceDictionary refDict,
+                                              final IndexCreator indexCreator,
+                                              final EnumSet<Options> options) {
+
+        if (isBCFOutput(location, options)) {
+            return createBcf2(location, output, refDict, indexCreator, options);
+        } else if (isCompressedVcf(location)) {
+            return createBlockCompressedVcf(location, output, refDict, indexCreator, options);
+        } else {
+            return createVcf(location, output, refDict, indexCreator, options);
+        }
+    }
+
+    private static VariantContextWriter maybeWrapWithAsyncWriter(final VariantContextWriter writer,
+                                                                 final EnumSet<Options> options) {
+        if (options.contains(Options.USE_ASYNC_IO)) {
+            return new AsyncVariantContextWriter(writer, AsyncVariantContextWriter.DEFAULT_QUEUE_SIZE);
+        }
+        else return writer;
+    }
+
+    /**
+     * Should we output a BCF file based solely on the name of the file at location?
+     *
+     * @param location
+     * @return
+     */
+    public static boolean isBCFOutput(final File location) {
+        return isBCFOutput(location, EnumSet.noneOf(Options.class));
+    }
+
+    public static boolean isBCFOutput(final File location, final EnumSet<Options> options) {
+        return options.contains(Options.FORCE_BCF) || (location != null && location.getName().contains(".bcf"));
+    }
+
+    public static boolean isCompressedVcf(final File location) {
+        if (location == null)
+            return false;
+
+        return AbstractFeatureReader.hasBlockCompressedExtension(location);
+    }
+
+    public static VariantContextWriter sortOnTheFly(final VariantContextWriter innerWriter, final int maxCachingStartDistance) {
+        return sortOnTheFly(innerWriter, maxCachingStartDistance, false);
+    }
+
+    public static VariantContextWriter sortOnTheFly(final VariantContextWriter innerWriter, final int maxCachingStartDistance, final boolean takeOwnershipOfInner) {
+        return new SortingVariantContextWriter(innerWriter, maxCachingStartDistance, takeOwnershipOfInner);
+    }
+
+    /**
+     * Returns a output stream writing to location, or throws an exception if this fails
+     * @param location
+     * @return
+     */
+    protected static OutputStream openOutputStream(final File location) {
+        try {
+            return IOUtil.maybeBufferOutputStream(new FileOutputStream(location));
+        } catch (final FileNotFoundException e) {
+            throw new RuntimeIOException(location + ": Unable to create VCF writer", e);
+        }
+    }
+}
diff --git a/src/java/htsjdk/variant/vcf/AbstractVCFCodec.java b/src/main/java/htsjdk/variant/vcf/AbstractVCFCodec.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/AbstractVCFCodec.java
rename to src/main/java/htsjdk/variant/vcf/AbstractVCFCodec.java
diff --git a/src/java/htsjdk/variant/vcf/VCF3Codec.java b/src/main/java/htsjdk/variant/vcf/VCF3Codec.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCF3Codec.java
rename to src/main/java/htsjdk/variant/vcf/VCF3Codec.java
diff --git a/src/java/htsjdk/variant/vcf/VCFCodec.java b/src/main/java/htsjdk/variant/vcf/VCFCodec.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFCodec.java
rename to src/main/java/htsjdk/variant/vcf/VCFCodec.java
diff --git a/src/java/htsjdk/variant/vcf/VCFCompoundHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFCompoundHeaderLine.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFCompoundHeaderLine.java
rename to src/main/java/htsjdk/variant/vcf/VCFCompoundHeaderLine.java
diff --git a/src/java/htsjdk/variant/vcf/VCFConstants.java b/src/main/java/htsjdk/variant/vcf/VCFConstants.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFConstants.java
rename to src/main/java/htsjdk/variant/vcf/VCFConstants.java
diff --git a/src/java/htsjdk/variant/vcf/VCFContigHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFContigHeaderLine.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFContigHeaderLine.java
rename to src/main/java/htsjdk/variant/vcf/VCFContigHeaderLine.java
diff --git a/src/java/htsjdk/variant/vcf/VCFEncoder.java b/src/main/java/htsjdk/variant/vcf/VCFEncoder.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFEncoder.java
rename to src/main/java/htsjdk/variant/vcf/VCFEncoder.java
diff --git a/src/java/htsjdk/variant/vcf/VCFFileReader.java b/src/main/java/htsjdk/variant/vcf/VCFFileReader.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFFileReader.java
rename to src/main/java/htsjdk/variant/vcf/VCFFileReader.java
diff --git a/src/java/htsjdk/variant/vcf/VCFFilterHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFFilterHeaderLine.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFFilterHeaderLine.java
rename to src/main/java/htsjdk/variant/vcf/VCFFilterHeaderLine.java
diff --git a/src/java/htsjdk/variant/vcf/VCFFormatHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFFormatHeaderLine.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFFormatHeaderLine.java
rename to src/main/java/htsjdk/variant/vcf/VCFFormatHeaderLine.java
diff --git a/src/java/htsjdk/variant/vcf/VCFHeader.java b/src/main/java/htsjdk/variant/vcf/VCFHeader.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFHeader.java
rename to src/main/java/htsjdk/variant/vcf/VCFHeader.java
diff --git a/src/java/htsjdk/variant/vcf/VCFHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFHeaderLine.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFHeaderLine.java
rename to src/main/java/htsjdk/variant/vcf/VCFHeaderLine.java
diff --git a/src/java/htsjdk/variant/vcf/VCFHeaderLineCount.java b/src/main/java/htsjdk/variant/vcf/VCFHeaderLineCount.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFHeaderLineCount.java
rename to src/main/java/htsjdk/variant/vcf/VCFHeaderLineCount.java
diff --git a/src/main/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java b/src/main/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java
new file mode 100644
index 0000000..071d815
--- /dev/null
+++ b/src/main/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java
@@ -0,0 +1,183 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.vcf;
+
+import htsjdk.tribble.TribbleException;
+
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A class for translating between vcf header versions
+ */
+public class VCFHeaderLineTranslator {
+    private static Map<VCFHeaderVersion,VCFLineParser> mapping;
+
+    static {
+        mapping = new HashMap<VCFHeaderVersion,VCFLineParser>();
+        mapping.put(VCFHeaderVersion.VCF4_0,new VCF4Parser());
+        mapping.put(VCFHeaderVersion.VCF4_1,new VCF4Parser());
+        mapping.put(VCFHeaderVersion.VCF4_2,new VCF4Parser());
+        mapping.put(VCFHeaderVersion.VCF3_3,new VCF3Parser());
+        mapping.put(VCFHeaderVersion.VCF3_2,new VCF3Parser());
+    }
+
+    public static Map<String,String> parseLine(VCFHeaderVersion version, String valueLine, List<String> expectedTagOrder) {
+        return mapping.get(version).parseLine(valueLine,expectedTagOrder);
+    }
+}
+
+
+interface VCFLineParser {
+    public Map<String,String> parseLine(String valueLine, List<String> expectedTagOrder);
+}
+
+
+/**
+ * a class that handles the to and from disk for VCF 4 lines
+ */
+class VCF4Parser implements VCFLineParser {
+    /**
+     * parse a VCF4 line
+     * @param valueLine the line
+     * @return a mapping of the tags parsed out
+     */
+    public Map<String, String> parseLine(String valueLine, List<String> expectedTagOrder) {
+        // our return map
+        Map<String, String> ret = new LinkedHashMap<String, String>();
+
+        // a builder to store up characters as we go
+        StringBuilder builder = new StringBuilder();
+
+        // store the key when we're parsing out the values
+        String key = "";
+
+        // where are we in the stream of characters?
+        int index = 0;
+
+        // are we inside a quotation? we don't special case ',' then
+        boolean inQuote = false;
+
+        // if we are in a quote and we see a backslash followed by quote, treat it as an escaped quote
+        boolean escape = false;
+
+        // a little switch machine to parse out the tags. Regex ended up being really complicated and ugly [yes, but this machine is getting ugly now... MAD]
+        for (char c: valueLine.toCharArray()) {
+            if ( c == '\"') {
+                if (escape) {
+                    builder.append(c);
+                    escape = false;
+                } else {
+                    inQuote = !inQuote;
+                }
+            } else if ( inQuote ) {
+                if (escape) {
+                    // in VCF 4.2 spec the only valid characters to escape are double quote and backslash; otherwise copy the backslash through
+                    if (c == '\\') {
+                        builder.append(c);
+                    } else {
+                        builder.append('\\');
+                        builder.append(c);
+                    }
+                    escape = false;
+                } else if (c != '\\') {
+                    builder.append(c);
+                } else {
+                    escape = true;
+                }
+            } else {
+                escape = false;
+                switch (c) {
+                    case ('<') : if (index == 0) break; // if we see a open bracket at the beginning, ignore it
+                    case ('>') : if (index == valueLine.length()-1) ret.put(key,builder.toString().trim()); break; // if we see a close bracket, and we're at the end, add an entry to our list
+                    case ('=') : key = builder.toString().trim(); builder = new StringBuilder(); break; // at an equals, copy the key and reset the builder
+                    case (',') : ret.put(key,builder.toString().trim()); builder = new StringBuilder(); break; // drop the current key value to the return map
+                    default: builder.append(c); // otherwise simply append to the current string
+                }
+            }
+            
+            index++;
+        }
+
+        if (inQuote) {
+            throw new TribbleException.InvalidHeader("Unclosed quote in header line value " + valueLine);
+        }
+
+        // validate the tags against the expected list
+        index = 0;
+        if ( expectedTagOrder != null ) {
+            if ( ret.size() > expectedTagOrder.size() )
+                throw new TribbleException.InvalidHeader("unexpected tag count " + ret.size() + " in line " + valueLine);
+            for ( String str : ret.keySet() ) {
+                if ( !expectedTagOrder.get(index).equals(str) )
+                    throw new TribbleException.InvalidHeader("Unexpected tag " + str + " in line " + valueLine);
+                index++;
+            }
+        }
+        return ret;
+    }
+}
+
+class VCF3Parser implements VCFLineParser {
+
+    public Map<String, String> parseLine(String valueLine, List<String> expectedTagOrder) {
+        // our return map
+        Map<String, String> ret = new LinkedHashMap<String, String>();
+
+        // a builder to store up characters as we go
+        StringBuilder builder = new StringBuilder();
+
+        // where are we in the stream of characters?
+        int index = 0;
+        // where in the expected tag order are we?
+        int tagIndex = 0;
+
+        // are we inside a quotation? we don't special case ',' then
+        boolean inQuote = false;
+
+        // a little switch machine to parse out the tags. Regex ended up being really complicated and ugly
+        for (char c: valueLine.toCharArray()) {
+            switch (c) {
+                case ('\"') : inQuote = !inQuote; break; // a quote means we ignore ',' in our strings, keep track of it
+                case (',') : if (!inQuote) { ret.put(expectedTagOrder.get(tagIndex++),builder.toString()); builder = new StringBuilder(); break; } // drop the current key value to the return map
+                default: builder.append(c); // otherwise simply append to the current string
+            }
+            index++;
+        }
+        ret.put(expectedTagOrder.get(tagIndex++),builder.toString());
+        
+        // validate the tags against the expected list
+        index = 0;
+        if (tagIndex != expectedTagOrder.size()) throw new IllegalArgumentException("Unexpected tag count " + tagIndex + ", we expected " + expectedTagOrder.size());
+        for (String str : ret.keySet()){
+            if (!expectedTagOrder.get(index).equals(str)) throw new IllegalArgumentException("Unexpected tag " + str + " in string " + valueLine);
+            index++;
+        }
+        return ret;
+    }
+}
\ No newline at end of file
diff --git a/src/java/htsjdk/variant/vcf/VCFHeaderLineType.java b/src/main/java/htsjdk/variant/vcf/VCFHeaderLineType.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFHeaderLineType.java
rename to src/main/java/htsjdk/variant/vcf/VCFHeaderLineType.java
diff --git a/src/java/htsjdk/variant/vcf/VCFHeaderVersion.java b/src/main/java/htsjdk/variant/vcf/VCFHeaderVersion.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFHeaderVersion.java
rename to src/main/java/htsjdk/variant/vcf/VCFHeaderVersion.java
diff --git a/src/java/htsjdk/variant/vcf/VCFIDHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFIDHeaderLine.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFIDHeaderLine.java
rename to src/main/java/htsjdk/variant/vcf/VCFIDHeaderLine.java
diff --git a/src/java/htsjdk/variant/vcf/VCFInfoHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFInfoHeaderLine.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFInfoHeaderLine.java
rename to src/main/java/htsjdk/variant/vcf/VCFInfoHeaderLine.java
diff --git a/src/java/htsjdk/variant/vcf/VCFRecordCodec.java b/src/main/java/htsjdk/variant/vcf/VCFRecordCodec.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFRecordCodec.java
rename to src/main/java/htsjdk/variant/vcf/VCFRecordCodec.java
diff --git a/src/java/htsjdk/variant/vcf/VCFSimpleHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFSimpleHeaderLine.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFSimpleHeaderLine.java
rename to src/main/java/htsjdk/variant/vcf/VCFSimpleHeaderLine.java
diff --git a/src/java/htsjdk/variant/vcf/VCFStandardHeaderLines.java b/src/main/java/htsjdk/variant/vcf/VCFStandardHeaderLines.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFStandardHeaderLines.java
rename to src/main/java/htsjdk/variant/vcf/VCFStandardHeaderLines.java
diff --git a/src/java/htsjdk/variant/vcf/VCFUtils.java b/src/main/java/htsjdk/variant/vcf/VCFUtils.java
similarity index 100%
rename from src/java/htsjdk/variant/vcf/VCFUtils.java
rename to src/main/java/htsjdk/variant/vcf/VCFUtils.java
diff --git a/src/scripts/build_intel_deflater.sh b/src/scripts/build_intel_deflater.sh
deleted file mode 100644
index f139cf0..0000000
--- a/src/scripts/build_intel_deflater.sh
+++ /dev/null
@@ -1,78 +0,0 @@
-#! /bin/bash
-#
-# The MIT License
-#
-# Copyright (c) 2013 The Broad Institute
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-#
-
-# Build libIntelDeflater.so, the JNI library that wraps Intel IPP compression library and igzip.
-# Note that this is not built as part of standard release process.  Rather, it is built manually and then
-# copied to htsjdk/lib/jni.
-
-# Assumes OpenJDK exists at $OPENJDK.  I used openjdk-7-fcs-src-b147-27_jun_2011.zip
-# Assumes that Picard-public java sources have been compiled
-# Assumes IPP8_CODE_SAMPLES_DIR points to Intel IPP sample code built with -fPIC
-# Assumes IPP8_INSTALL_DIR points to composer_xe_2013_sp1 installation
-# Assumes IGZIP_LIB points to the directory containing libigzip0c.a
-source ${IPP8_INSTALL_DIR}/bin/ippvars.sh intel64
-
-set -e
-
-if [ "$OPENJDK" = "" ]
-then echo "ERROR: OPENJDK environment variable not defined." >&2
-     exit 1
-fi
-
-if [ "$IPP8_CODE_SAMPLES_DIR" = "" ]
-then echo "ERROR: IPP8_CODE_SAMPLES_DIR environment variable not defined." >&2
-     exit 1
-fi
-
-if [ "$IPP8_INSTALL_DIR" = "" ]
-then echo "ERROR: IPP8_INSTALL_DIR environment variable not defined." >&2
-     exit 1
-fi
-if [ "$IGZIP_LIB" = "" ]
-then echo "ERROR: IGZIP_LIB environment variable not defined." >&2
-     exit 1
-fi
-
-rootdir=$(dirname $(dirname $(dirname $0)))
-
-
-builddir=$rootdir/lib_build
-rm -rf $builddir
-mkdir -p $builddir
-
-echo $rootdir
-# Create JNI C header file
-javah -jni -classpath $rootdir/classes -d $builddir htsjdk.samtools.util.zip.IntelDeflater
-
-# Compile source and create library.
-gcc -I$builddir -I$rootdir/src/c/inteldeflater/ -I$JAVA_HOME/include/ -I$JAVA_HOME/include/linux/ -I$OPENJDK/jdk/src/share/native/common/ \
--I$OPENJDK/jdk/src/solaris/native/common/ -c -O3 -fPIC $rootdir/src/c/inteldeflater/IntelDeflater.c
-gcc  -z noexecstack -shared -o $builddir/libIntelDeflater.so IntelDeflater.o  -L${IPP8_CODE_SAMPLES_DIR}/__cmake/data-compression.intel64.make.static.release/__lib/release \
--lzlib  -lstdc++ -Wl,-Bstatic  -L$IGZIP_LIB -ligzip0c -lbfp754  -ldecimal  -liomp5  -liompstubs5  -lipgo  -lippac  -lippcc  -lippch  -lippcv  \
--lippdc  -lippdi  -lippgen  -lippi  -lippj  -lippm  -lippr  -lippsc  -lippvc  -lippvm  -lirng  -lmatmul  -lpdbx  \
--lpdbxinst  -lsvml  -lipps  -limf  -lirc  -lirc_s  -lippcore -Wl,-Bdynamic
-
-
-
diff --git a/src/test/java/htsjdk/cram/io/ExternalCompressionTest.java b/src/test/java/htsjdk/cram/io/ExternalCompressionTest.java
new file mode 100644
index 0000000..09f6e49
--- /dev/null
+++ b/src/test/java/htsjdk/cram/io/ExternalCompressionTest.java
@@ -0,0 +1,30 @@
+package htsjdk.samtools.cram.io;
+
+import org.apache.commons.compress.utils.IOUtils;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+
+public class ExternalCompressionTest {
+    public static final File BZIP2_FILE = new File("src/test/resources/htsjdk/samtools/cram/io/bzip2-test.bz2");
+    public static final byte [] TEST_BYTES = "This is a simple string to test BZip2".getBytes();
+
+    @Test
+    public void testBZip2Decompression() throws IOException {
+        final byte [] input = Files.readAllBytes(BZIP2_FILE.toPath());
+        final byte [] output = ExternalCompression.unbzip2(input);
+        Assert.assertEquals(output, "BZip2 worked".getBytes());
+    }
+
+    @Test
+    public void testBZip2Roundtrip() throws IOException {
+        final byte [] compressed = ExternalCompression.bzip2(TEST_BYTES);
+        final byte [] restored = ExternalCompression.unbzip2(compressed);
+        Assert.assertEquals(TEST_BYTES, restored);
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/AbstractBAMFileIndexTest.java b/src/test/java/htsjdk/samtools/AbstractBAMFileIndexTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/AbstractBAMFileIndexTest.java
rename to src/test/java/htsjdk/samtools/AbstractBAMFileIndexTest.java
diff --git a/src/test/java/htsjdk/samtools/BAMCigarOverflowTest.java b/src/test/java/htsjdk/samtools/BAMCigarOverflowTest.java
new file mode 100644
index 0000000..dd630f9
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/BAMCigarOverflowTest.java
@@ -0,0 +1,29 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CloserUtil;
+import org.testng.annotations.Test;
+import static org.testng.Assert.assertEquals;
+
+import java.io.File;
+
+/**
+ * Test the fix of a bug reported by s-andrews in which the use of an arithmetic rather than a logical right shift in BinaryCigarCodec.binaryCigarToCigarElement()
+ * causes an overflow in the CIGAR when reading a BAM file for a read that spans a very large intron.
+ */
+public class BAMCigarOverflowTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    @Test
+    public void testCigarOverflow() throws Exception {
+        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT).open(new File(TEST_DATA_DIR, "BAMCigarOverflowTest/CigarOverflowTest.bam"));
+
+        //Load the single read from the BAM file.
+        final SAMRecord testBAMRecord = reader.iterator().next();
+        CloserUtil.close(reader);
+
+        //The BAM file that exposed the bug triggered a SAM validation error because the bin field of the BAM record did not equal the computed value. Here we test for this error.
+        //Cast to int to avoid an ambiguity in the assertEquals() call between assertEquals(int,int) and assertEquals(Object,Object).
+        assertEquals(testBAMRecord.computeIndexingBin(), (int) testBAMRecord.getIndexingBin());
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/BAMFileIndexTest.java b/src/test/java/htsjdk/samtools/BAMFileIndexTest.java
new file mode 100755
index 0000000..170bc47
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/BAMFileIndexTest.java
@@ -0,0 +1,527 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.StopWatch;
+import htsjdk.samtools.util.StringUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
+
+import static org.testng.Assert.*;
+
+/**
+ * Test BAM file indexing.
+ */
+public class BAMFileIndexTest {
+    private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
+    private final boolean mVerbose = false;
+
+    @Test
+    public void testGetSearchBins()
+            throws Exception {
+        final DiskBasedBAMFileIndex bfi = new DiskBasedBAMFileIndex(new File(BAM_FILE.getPath() + ".bai"),
+                null);    // todo can null be replaced with a Sequence dictionary for the BAM_FILE?
+        final long[] bins = bfi.getSpanOverlapping(1, 0, 0).toCoordinateArray();
+        /***
+         if (bins == null) {
+         System.out.println("Search bins: " + bins);
+         return;
+         }
+         System.out.println("Search bins:");
+         for (int i = 0; i < bins.length; i++) {
+         System.out.println(" " + Long.toHexString(bins[i]));
+         }
+         ***/
+        assertNotNull(bins);
+        assertEquals(bins.length, 2);
+    }
+
+    @Test
+    public void testSpecificQueries()
+            throws Exception {
+        assertEquals(runQueryTest(BAM_FILE, "chrM", 10400, 10600, true), 1);
+        assertEquals(runQueryTest(BAM_FILE, "chrM", 10400, 10600, false), 2);
+    }
+
+    @Test(groups = {"slow"})
+    public void testRandomQueries()
+            throws Exception {
+        runRandomTest(BAM_FILE, 1000, new Random());
+    }
+
+    @Test
+    public void testWholeChromosomes() {
+        checkChromosome("chrM", 23);
+        checkChromosome("chr1", 885);
+        checkChromosome("chr2", 837);
+        /***
+         checkChromosome("chr3", 683);
+         checkChromosome("chr4", 633);
+         checkChromosome("chr5", 611);
+         checkChromosome("chr6", 585);
+         checkChromosome("chr7", 521);
+         checkChromosome("chr8", 507);
+         checkChromosome("chr9", 388);
+         checkChromosome("chr10", 477);
+         checkChromosome("chr11", 467);
+         checkChromosome("chr12", 459);
+         checkChromosome("chr13", 327);
+         checkChromosome("chr14", 310);
+         checkChromosome("chr15", 280);
+         checkChromosome("chr16", 278);
+         checkChromosome("chr17", 269);
+         checkChromosome("chr18", 265);
+         checkChromosome("chr19", 178);
+         checkChromosome("chr20", 228);
+         checkChromosome("chr21", 123);
+         checkChromosome("chr22", 121);
+         checkChromosome("chrX", 237);
+         checkChromosome("chrY", 29);
+         ***/
+    }
+
+    @Test
+    public void testQueryUnmapped() {
+        final StopWatch linearScan = new StopWatch();
+        final StopWatch queryUnmapped = new StopWatch();
+        int unmappedCountFromLinearScan = 0;
+        final File bamFile = BAM_FILE;
+        final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
+        linearScan.start();
+        CloseableIterator<SAMRecord> it = reader.iterator();
+        int mappedCount = 0;
+        while (it.hasNext()) {
+            final SAMRecord rec = it.next();
+            if (rec.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
+                unmappedCountFromLinearScan = 1;
+                break;
+            }
+            ++mappedCount;
+        }
+        linearScan.stop();
+        System.out.println("Found start of unmapped reads.  Num mapped reads: " + mappedCount);
+        System.out.println("Time so far: " + linearScan.getElapsedTimeSecs());
+        linearScan.start();
+
+        while (it.hasNext()) {
+            final SAMRecord rec = it.next();
+            Assert.assertEquals(rec.getReferenceIndex().intValue(), SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+            ++unmappedCountFromLinearScan;
+        }
+        it.close();
+        linearScan.stop();
+        queryUnmapped.start();
+        it = reader.queryUnmapped();
+        int unmappedCountFromQueryUnmapped = 0;
+        while (it.hasNext()) {
+            final SAMRecord rec = it.next();
+            Assert.assertEquals(rec.getReferenceIndex().intValue(), SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+            ++unmappedCountFromQueryUnmapped;
+        }
+        it.close();
+        queryUnmapped.stop();
+        System.out.println("Linear scan total time: " + linearScan.getElapsedTimeSecs());
+        System.out.println("queryUnmapped time: " + queryUnmapped.getElapsedTimeSecs());
+        System.out.println("Number of unmapped reads:" + unmappedCountFromQueryUnmapped);
+        Assert.assertEquals(unmappedCountFromQueryUnmapped, unmappedCountFromLinearScan);
+        CloserUtil.close(reader);
+    }
+
+    @Test
+    public void testQueryAlignmentStart() {
+        final SamReader reader = SamReaderFactory.makeDefault().open(BAM_FILE);
+        CloseableIterator<SAMRecord> it = reader.queryAlignmentStart("chr1", 202160268);
+        Assert.assertEquals(countElements(it), 2);
+        it.close();
+        it = reader.queryAlignmentStart("chr1", 201595153);
+        Assert.assertEquals(countElements(it), 1);
+        it.close();
+        // There are records that overlap this position, but none that start here
+        it = reader.queryAlignmentStart("chrM", 10400);
+        Assert.assertEquals(countElements(it), 0);
+        it.close();
+        // One past the last chr1 record
+        it = reader.queryAlignmentStart("chr1", 246817509);
+        Assert.assertEquals(countElements(it), 0);
+        it.close();
+        CloserUtil.close(reader);
+    }
+
+    @Test
+    public void testQueryMate() {
+        final SamReader reader = SamReaderFactory.makeDefault().open(BAM_FILE);
+
+        // Both ends mapped
+        SAMRecord rec = getSingleRecordStartingAt(reader, "chrM", 1687);
+        SAMRecord mate = reader.queryMate(rec);
+        assertMate(rec, mate);
+        SAMRecord originalRec = reader.queryMate(mate);
+        Assert.assertEquals(originalRec, rec);
+
+        // One end mapped
+        rec = getSingleRecordStartingAt(reader, "chr11", 48720338);
+        mate = reader.queryMate(rec);
+        assertMate(rec, mate);
+        originalRec = reader.queryMate(mate);
+        Assert.assertEquals(originalRec, rec);
+
+        // Both ends mapped
+        final CloseableIterator<SAMRecord> it = reader.queryUnmapped();
+        rec = null;
+        while (it.hasNext()) {
+            final SAMRecord next = it.next();
+            if (next.getReadName().equals("2615")) {
+                rec = next;
+                break;
+            }
+        }
+        it.close();
+        Assert.assertNotNull(rec);
+        mate = reader.queryMate(rec);
+        assertMate(rec, mate);
+        originalRec = reader.queryMate(mate);
+        Assert.assertEquals(originalRec, rec);
+        CloserUtil.close(reader);
+    }
+
+    private void assertMate(final SAMRecord rec, final SAMRecord mate) {
+        Assert.assertNotNull(mate);
+        Assert.assertEquals(mate.getReadName(), rec.getReadName());
+        Assert.assertEquals(mate.getReferenceIndex(), rec.getMateReferenceIndex());
+        if (SAMUtils.getMateCigarString(rec) != null) {
+            Assert.assertEquals(mate.getCigarString(), SAMUtils.getMateCigarString(rec));
+        }
+        Assert.assertEquals(mate.getAlignmentStart(), rec.getMateAlignmentStart());
+        Assert.assertFalse(mate.getFirstOfPairFlag() == rec.getFirstOfPairFlag());
+    }
+
+    /**
+     * Compare the results of a multi-interval query versus the union of the results from each interval done
+     * separately.
+     */
+    @Test(dataProvider = "testMultiIntervalQueryDataProvider")
+    public void testMultiIntervalQuery(final boolean contained) {
+        final List<String> referenceNames = getReferenceNames(BAM_FILE);
+
+        final QueryInterval[] intervals = generateRandomIntervals(referenceNames.size(), 1000, new Random());
+        final Set<SAMRecord> multiIntervalRecords = new HashSet<SAMRecord>();
+        final Set<SAMRecord> singleIntervalRecords = new HashSet<SAMRecord>();
+        final SamReader reader = SamReaderFactory.makeDefault().open(BAM_FILE);
+        for (final QueryInterval interval : intervals) {
+            consumeAll(singleIntervalRecords, reader.query(referenceNames.get(interval.referenceIndex), interval.start, interval.end, contained));
+        }
+
+        final QueryInterval[] optimizedIntervals = QueryInterval.optimizeIntervals(intervals);
+        consumeAll(multiIntervalRecords, reader.query(optimizedIntervals, contained));
+        final Iterator<SAMRecord> singleIntervalRecordIterator = singleIntervalRecords.iterator();
+        boolean failed = false;
+        while (singleIntervalRecordIterator.hasNext()) {
+            final SAMRecord record = singleIntervalRecordIterator.next();
+            if (!multiIntervalRecords.remove(record)) {
+                System.out.println("SingleIntervalQuery found " + record + " but MultiIntervalQuery did not");
+                failed = true;
+            }
+        }
+        for (final SAMRecord record : multiIntervalRecords) {
+            System.out.println("MultiIntervalQuery found " + record + " but SingleIntervalQuery did not");
+            failed = true;
+        }
+        Assert.assertFalse(failed);
+        CloserUtil.close(reader);
+    }
+
+    @DataProvider(name = "testMultiIntervalQueryDataProvider")
+    private Object[][] testMultiIntervalQueryDataProvider() {
+        return new Object[][]{{true}, {false}};
+    }
+
+    @Test
+    public void testUnmappedMateWithCoordinate() throws Exception {
+        // TODO: Use SAMRecordSetBuilder when it is able to create a pair with one end unmapped
+        final String samText = "@HD\tVN:1.0\tSO:coordinate\n" +
+                "@SQ\tSN:chr1\tLN:101\n" +
+                "@SQ\tSN:chr2\tLN:101\n" +
+                "@SQ\tSN:chr3\tLN:101\n" +
+                "@SQ\tSN:chr4\tLN:101\n" +
+                "@SQ\tSN:chr5\tLN:101\n" +
+                "@SQ\tSN:chr6\tLN:101\n" +
+                "@SQ\tSN:chr7\tLN:404\n" +
+                "@SQ\tSN:chr8\tLN:202\n" +
+                "@RG\tID:0\tSM:Hi,Mom!\n" +
+                "@PG\tID:1\tPN:Hey!\tVN:2.0\n" +
+                "one_end_mapped\t73\tchr7\t100\t255\t101M\t*\t0\t0\tCAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN\t)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&\tRG:Z:0\n" +
+                "one_end_mapped\t133\tchr7\t100\t0\t*\t=\t100\t0\tNCGCGGCATCNCGATTTCTTTCCGCAGCTAACCTCCCGACAGATCGGCAGCGCGTCGTGTAGGTTATTATGGTACATCTTGTCGTGCGGCNAGAGCATACA\t&/15445666651/566666553+2/14/&/555512+3/)-'/-&-'*+))*''13+3)'//++''/'))/3+&*5++)&'2+&+/*&-&&*)&-./1'1\tRG:Z:0\n";
+        final ByteArrayInputStream bis = new ByteArrayInputStream(StringUtil.stringToBytes(samText));
+        final File bamFile = File.createTempFile("BAMFileIndexTest.", BamFileIoUtils.BAM_FILE_EXTENSION);
+        bamFile.deleteOnExit();
+        final SamReader textReader = SamReaderFactory.makeDefault().open(SamInputResource.of(bis));
+        SAMFileWriterFactory samFileWriterFactory = new SAMFileWriterFactory();
+        samFileWriterFactory.setCreateIndex(true);
+        final SAMFileWriter writer = samFileWriterFactory.makeBAMWriter(textReader.getFileHeader(), true, bamFile);
+        for (final SAMRecord rec : textReader) {
+            writer.addAlignment(rec);
+        }
+        writer.close();
+        final SamReader bamReader = SamReaderFactory.makeDefault().open(bamFile);
+        SamFiles.findIndex(bamFile).deleteOnExit();
+        Assert.assertEquals(countElements(bamReader.queryContained("chr7", 100, 100)), 1);
+        Assert.assertEquals(countElements(bamReader.queryOverlapping("chr7", 100, 100)), 2);
+        bamReader.close();
+        textReader.close();
+    }
+
+    private <E> void consumeAll(final Collection<E> collection, final CloseableIterator<E> iterator) {
+        while (iterator.hasNext()) {
+            collection.add(iterator.next());
+        }
+        iterator.close();
+    }
+
+    private SAMRecord getSingleRecordStartingAt(final SamReader reader, final String sequence, final int alignmentStart) {
+        final CloseableIterator<SAMRecord> it = reader.queryAlignmentStart(sequence, alignmentStart);
+        Assert.assertTrue(it.hasNext());
+        final SAMRecord rec = it.next();
+        Assert.assertNotNull(rec);
+        Assert.assertFalse(it.hasNext());
+        it.close();
+        return rec;
+    }
+
+    private int countElements(final CloseableIterator<SAMRecord> it) {
+        int num;
+        for (num = 0; it.hasNext(); ++num, it.next()) {
+        }
+        it.close();
+        return num;
+    }
+
+    private void checkChromosome(final String name, final int expectedCount) {
+        int count = runQueryTest(BAM_FILE, name, 0, 0, true);
+        assertEquals(count, expectedCount);
+        count = runQueryTest(BAM_FILE, name, 0, 0, false);
+        assertEquals(count, expectedCount);
+    }
+
+    private void runRandomTest(final File bamFile, final int count, final Random generator) {
+        final List<String> referenceNames = getReferenceNames(bamFile);
+        final QueryInterval[] intervals = generateRandomIntervals(referenceNames.size(), count, generator);
+        for (final QueryInterval interval : intervals) {
+            final String refName = referenceNames.get(interval.referenceIndex);
+            final int startPos = interval.start;
+            final int endPos = interval.end;
+            System.out.println("Testing query " + refName + ":" + startPos + "-" + endPos + " ...");
+            try {
+                runQueryTest(bamFile, refName, startPos, endPos, true);
+                runQueryTest(bamFile, refName, startPos, endPos, false);
+            } catch (final Throwable exc) {
+                String message = "Query test failed: " + refName + ":" + startPos + "-" + endPos;
+                message += ": " + exc.getMessage();
+                throw new RuntimeException(message, exc);
+            }
+        }
+    }
+
+    private QueryInterval[] generateRandomIntervals(final int numReferences, final int count, final Random generator) {
+        final QueryInterval[] intervals = new QueryInterval[count];
+        final int maxCoordinate = 10000000;
+        for (int i = 0; i < count; i++) {
+            final int referenceIndex = generator.nextInt(numReferences);
+            final int coord1 = generator.nextInt(maxCoordinate + 1);
+            final int coord2 = generator.nextInt(maxCoordinate + 1);
+            final int startPos = Math.min(coord1, coord2);
+            final int endPos = Math.max(coord1, coord2);
+            intervals[i] = new QueryInterval(referenceIndex, startPos, endPos);
+        }
+
+        return intervals;
+    }
+
+    private List<String> getReferenceNames(final File bamFile) {
+        final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
+        final List<String> result = new ArrayList<String>();
+        final List<SAMSequenceRecord> seqRecords = reader.getFileHeader().getSequenceDictionary().getSequences();
+        for (final SAMSequenceRecord seqRecord : seqRecords) {
+            if (seqRecord.getSequenceName() != null) {
+                result.add(seqRecord.getSequenceName());
+            }
+        }
+        CloserUtil.close(reader);
+        return result;
+    }
+
+    private int runQueryTest(final File bamFile, final String sequence, final int startPos, final int endPos, final boolean contained) {
+        verbose("Testing query " + sequence + ":" + startPos + "-" + endPos + " ...");
+        final SamReader reader1 = SamReaderFactory.makeDefault().open(bamFile);
+        final SamReader reader2 = SamReaderFactory.makeDefault().open(bamFile);
+        final Iterator<SAMRecord> iter1 = reader1.query(sequence, startPos, endPos, contained);
+        final Iterator<SAMRecord> iter2 = reader2.iterator();
+        // Compare ordered iterators.
+        // Confirm that iter1 is a subset of iter2 that properly filters.
+        SAMRecord record1 = null;
+        SAMRecord record2 = null;
+        int count1 = 0;
+        int count2 = 0;
+        int beforeCount = 0;
+        int afterCount = 0;
+        while (true) {
+            if (record1 == null && iter1.hasNext()) {
+                record1 = iter1.next();
+                count1++;
+            }
+            if (record2 == null && iter2.hasNext()) {
+                record2 = iter2.next();
+                count2++;
+            }
+            // System.out.println("Iteration:");
+            // System.out.println(" Record1 = " + ((record1 == null) ? "null" : record1.format()));
+            // System.out.println(" Record2 = " + ((record2 == null) ? "null" : record2.format()));
+            if (record1 == null && record2 == null) {
+                break;
+            }
+            if (record1 == null) {
+                checkPassesFilter(false, record2, sequence, startPos, endPos, contained);
+                record2 = null;
+                afterCount++;
+                continue;
+            }
+            assertNotNull(record2);
+            final int ordering = compareCoordinates(record1, record2);
+            if (ordering > 0) {
+                checkPassesFilter(false, record2, sequence, startPos, endPos, contained);
+                record2 = null;
+                beforeCount++;
+                continue;
+            }
+            assertTrue(ordering == 0);
+            checkPassesFilter(true, record1, sequence, startPos, endPos, contained);
+            checkPassesFilter(true, record2, sequence, startPos, endPos, contained);
+            assertEquals(record1.getReadName(), record2.getReadName());
+            assertEquals(record1.getReadString(), record2.getReadString());
+            record1 = null;
+            record2 = null;
+        }
+        CloserUtil.close(reader1);
+        CloserUtil.close(reader2);
+        verbose("Checked " + count1 + " records against " + count2 + " records.");
+        verbose("Found " + (count2 - beforeCount - afterCount) + " records matching.");
+        verbose("Found " + beforeCount + " records before.");
+        verbose("Found " + afterCount + " records after.");
+        return count1;
+    }
+
+    private void checkPassesFilter(final boolean expected, final SAMRecord record, final String sequence, final int startPos, final int endPos, final boolean contained) {
+        final boolean passes = passesFilter(record, sequence, startPos, endPos, contained);
+        if (passes != expected) {
+            System.out.println("Error: Record erroneously " +
+                    (passes ? "passed" : "failed") +
+                    " filter.");
+            System.out.println(" Record: " + record.getSAMString());
+            System.out.println(" Filter: " + sequence + ":" +
+                    startPos + "-" + endPos +
+                    " (" + (contained ? "contained" : "overlapping") + ")");
+            assertEquals(passes, expected);
+        }
+    }
+
+    private boolean passesFilter(final SAMRecord record, final String sequence, final int startPos, final int endPos, final boolean contained) {
+        if (record == null) {
+            return false;
+        }
+        if (!safeEquals(record.getReferenceName(), sequence)) {
+            return false;
+        }
+        final int alignmentStart = record.getAlignmentStart();
+        int alignmentEnd = record.getAlignmentEnd();
+        if (alignmentStart <= 0) {
+            assertTrue(record.getReadUnmappedFlag());
+            return false;
+        }
+        if (alignmentEnd <= 0) {
+            // For indexing-only records, treat as single base alignment.
+            assertTrue(record.getReadUnmappedFlag());
+            alignmentEnd = alignmentStart;
+        }
+        if (contained) {
+            if (startPos != 0 && alignmentStart < startPos) {
+                return false;
+            }
+            if (endPos != 0 && alignmentEnd > endPos) {
+                return false;
+            }
+        } else {
+            if (startPos != 0 && alignmentEnd < startPos) {
+                return false;
+            }
+            if (endPos != 0 && alignmentStart > endPos) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    private int compareCoordinates(final SAMRecord record1, final SAMRecord record2) {
+        final int seqIndex1 = record1.getReferenceIndex();
+        final int seqIndex2 = record2.getReferenceIndex();
+        if (seqIndex1 == -1) {
+            return ((seqIndex2 == -1) ? 0 : -1);
+        } else if (seqIndex2 == -1) {
+            return 1;
+        }
+        int result = seqIndex1 - seqIndex2;
+        if (result != 0) {
+            return result;
+        }
+        result = record1.getAlignmentStart() - record2.getAlignmentStart();
+        return result;
+    }
+
+    private boolean safeEquals(final Object o1, final Object o2) {
+        if (o1 == o2) {
+            return true;
+        } else if (o1 == null || o2 == null) {
+            return false;
+        } else {
+            return o1.equals(o2);
+        }
+    }
+
+    private void verbose(final String text) {
+        if (mVerbose) {
+            System.out.println("# " + text);
+        }
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/BAMFileWriterTest.java b/src/test/java/htsjdk/samtools/BAMFileWriterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/BAMFileWriterTest.java
rename to src/test/java/htsjdk/samtools/BAMFileWriterTest.java
diff --git a/src/test/java/htsjdk/samtools/BAMIndexWriterTest.java b/src/test/java/htsjdk/samtools/BAMIndexWriterTest.java
new file mode 100644
index 0000000..09f9236
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/BAMIndexWriterTest.java
@@ -0,0 +1,227 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.IOUtil;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * Test BAM file index creation
+ */
+public class BAMIndexWriterTest {
+    // Two input files for basic test
+    private final String BAM_FILE_LOCATION = "src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam";
+    private final String BAI_FILE_LOCATION = "src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai";
+    private final File BAM_FILE = new File(BAM_FILE_LOCATION);
+    private final File BAI_FILE = new File(BAI_FILE_LOCATION);
+
+    private final boolean mVerbose = true;
+
+    @Test(enabled = true)
+    public void testWriteText() throws Exception {
+        // Compare the text form of the c-generated bai file and a java-generated one
+        final File cBaiTxtFile = File.createTempFile("cBai.", ".bai.txt");
+        BAMIndexer.createAndWriteIndex(BAI_FILE, cBaiTxtFile, true);
+        verbose("Wrote textual C BAM Index file " + cBaiTxtFile);
+
+        final File javaBaiFile = File.createTempFile("javaBai.", "java.bai");
+        final File javaBaiTxtFile = new File(javaBaiFile.getAbsolutePath() + ".txt");
+        final SamReader bam = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS).open(BAM_FILE);
+        BAMIndexer.createIndex(bam, javaBaiFile);
+        verbose("Wrote binary Java BAM Index file " + javaBaiFile);
+
+        // now, turn the bai file into text
+        BAMIndexer.createAndWriteIndex(javaBaiFile, javaBaiTxtFile, true);
+        // and compare them
+        verbose("diff " + javaBaiTxtFile + " " + cBaiTxtFile);
+        IOUtil.assertFilesEqual(javaBaiTxtFile, cBaiTxtFile);
+        cBaiTxtFile.deleteOnExit();
+        javaBaiFile.deleteOnExit();
+        javaBaiTxtFile.deleteOnExit();
+        CloserUtil.close(bam);
+    }
+
+    @Test(enabled = true)
+    public void testWriteBinary() throws Exception {
+        // Compare java-generated bai file with c-generated and sorted bai file
+        final File javaBaiFile = File.createTempFile("javaBai.", ".bai");
+        final SamReader bam = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS).open(BAM_FILE);
+        BAMIndexer.createIndex(bam, javaBaiFile);
+        verbose("Wrote binary java BAM Index file " + javaBaiFile);
+
+        final File cRegeneratedBaiFile = File.createTempFile("cBai.", ".bai");
+        BAMIndexer.createAndWriteIndex(BAI_FILE, cRegeneratedBaiFile, false);
+        verbose("Wrote sorted C binary BAM Index file " + cRegeneratedBaiFile);
+
+        // Binary compare of javaBaiFile and cRegeneratedBaiFile should be the same
+        verbose("diff " + javaBaiFile + " " + cRegeneratedBaiFile);
+        IOUtil.assertFilesEqual(javaBaiFile, cRegeneratedBaiFile);
+        javaBaiFile.deleteOnExit();
+        cRegeneratedBaiFile.deleteOnExit();
+        CloserUtil.close(bam);
+    }
+
+    @Test(enabled = false, dataProvider = "linearIndexTestData")
+    /** Test linear index at specific references and windows */
+    public void testLinearIndex(String testName, String filepath, int problemReference, int problemWindowStart, int problemWindowEnd, int expectedCount) {
+        final SamReader sfr = SamReaderFactory.makeDefault().open(new File(filepath));
+        for (int problemWindow = problemWindowStart; problemWindow <= problemWindowEnd; problemWindow++) {
+            int count = countAlignmentsInWindow(problemReference, problemWindow, sfr, expectedCount);
+            if (expectedCount != -1)
+                assertEquals(expectedCount, count);
+        }
+        CloserUtil.close(sfr);
+    }
+
+    @DataProvider(name = "linearIndexTestData")
+    public Object[][] getLinearIndexTestData() {
+        // Add data here for test cases, reference, and windows where linear index needs testing
+        return new Object[][]{
+                new Object[]{"index_test", BAM_FILE_LOCATION, 1, 29, 66, -1},  // 29-66
+                new Object[]{"index_test", BAM_FILE_LOCATION, 1, 68, 118, -1},  // 29-66
+
+        };
+    }
+
+    private int countAlignmentsInWindow(int reference, int window, SamReader reader, int expectedCount) {
+        final int SIXTEEN_K = 1 << 14;       // 1 << LinearIndex.BAM_LIDX_SHIFT
+        final int start = window >> 14;             // window * SIXTEEN_K;
+        final int stop = ((window + 1) >> 14) - 1; // (window + 1 * SIXTEEN_K) - 1;
+
+        final String chr = reader.getFileHeader().getSequence(reference).getSequenceName();
+
+        // get records for the entire linear index window
+        SAMRecordIterator iter = reader.queryOverlapping(chr, start, stop);
+        SAMRecord rec;
+        int count = 0;
+        while (iter.hasNext()) {
+            rec = iter.next();
+            count++;
+            if (expectedCount == -1)
+                System.err.println(rec.getReadName());
+        }
+        iter.close();
+        return count;
+    }
+
+
+    @Test(enabled = false, dataProvider = "indexComparisonData")
+    /** Test linear index at all references and windows, comparing with existing index */
+    public void compareLinearIndex(String testName, String bamFile, String bamIndexFile) throws IOException {
+        // compare index generated from bamFile with existing bamIndex file
+        // by testing all the references' windows and comparing the counts
+
+        // 1. generate bai file
+        // 2. count its references
+        // 3. count bamIndex references comparing counts
+
+        // 1. generate bai file
+        File bam = new File(bamFile);
+        assertTrue(bam.exists(), testName + " input bam file doesn't exist: " + bamFile);
+
+        File indexFile1 = createIndexFile(bam);
+        assertTrue(indexFile1.exists(), testName + " generated bam file's index doesn't exist: " + indexFile1);
+
+        // 2. count its references
+        File indexFile2 = new File(bamIndexFile);
+        assertTrue(indexFile2.exists(), testName + " input index file doesn't exist: " + indexFile2);
+
+        final CachingBAMFileIndex existingIndex1 = new CachingBAMFileIndex(indexFile1, null); // todo null sequence dictionary?
+        final CachingBAMFileIndex existingIndex2 = new CachingBAMFileIndex(indexFile2, null);
+        final int n_ref = existingIndex1.getNumberOfReferences();
+        assertEquals(n_ref, existingIndex2.getNumberOfReferences());
+
+        final SamReader reader1 = SamReaderFactory.makeDefault().disable(SamReaderFactory.Option.EAGERLY_DECODE).open(bam);
+
+        final SamReader reader2 = SamReaderFactory.makeDefault().disable(SamReaderFactory.Option.EAGERLY_DECODE).open(bam);
+
+        System.out.println("Comparing " + n_ref + " references in " + indexFile1 + " and " + indexFile2);
+
+        for (int i = 0; i < n_ref; i++) {
+            final BAMIndexContent content1 = existingIndex1.getQueryResults(i);
+            final BAMIndexContent content2 = existingIndex2.getQueryResults(i);
+            if (content1 == null) {
+                assertTrue(content2 == null, "No content for 1st bam index, but content for second at reference" + i);
+                continue;
+            }
+            int[] counts1 = new int[LinearIndex.MAX_LINEAR_INDEX_SIZE];
+            int[] counts2 = new int[LinearIndex.MAX_LINEAR_INDEX_SIZE];
+            LinearIndex li1 = content1.getLinearIndex();
+            LinearIndex li2 = content2.getLinearIndex();
+            // todo not li1 and li2 sizes may differ. Implies 0's in the smaller index windows
+            // 3. count bamIndex references comparing counts
+            int baiSize = Math.max(li1.size(), li2.size());
+            for (int win = 0; win < baiSize; win++) {
+                counts1[win] = countAlignmentsInWindow(i, win, reader1, 0);
+                counts2[win] = countAlignmentsInWindow(i, win, reader2, counts1[win]);
+                assertEquals(counts2[win], counts1[win], "Counts don't match for reference " + i +
+                        " window " + win);
+            }
+        }
+
+        indexFile1.deleteOnExit();
+
+    }
+
+    @DataProvider(name = "indexComparisonData")
+    public Object[][] getIndexComparisonData() {
+        // enter bam file and alternate index file to be tested against generated bam index
+        return new Object[][]{
+                new Object[]{"index_test", BAM_FILE_LOCATION, BAI_FILE_LOCATION},
+        };
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void testRequireCoordinateSortOrder() {
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.queryname);
+
+        new BAMIndexer(new ByteArrayOutputStream(), header);
+    }
+
+    /** generates the index file using the latest java index generating code */
+    private File createIndexFile(File bamFile) throws IOException {
+        final File bamIndexFile = File.createTempFile("Bai.", ".bai");
+        final SamReader bam = SamReaderFactory.makeDefault().open(bamFile);
+        BAMIndexer.createIndex(bam, bamIndexFile);
+        verbose("Wrote BAM Index file " + bamIndexFile);
+        bam.close();
+        return bamIndexFile;
+    }
+
+    private void verbose(final String text) {
+        if (mVerbose) {
+            System.out.println("#BAMIndexWriterTest " + text);
+        }
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/BAMIteratorTest.java b/src/test/java/htsjdk/samtools/BAMIteratorTest.java
new file mode 100644
index 0000000..5fa9e7d
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/BAMIteratorTest.java
@@ -0,0 +1,71 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CloserUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * @author alecw at broadinstitute.org
+ */
+public class BAMIteratorTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    @Test(dataProvider = "dataProvider")
+    public void testIterateEmptyBam(final String bam) throws Exception {
+        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, bam));
+        int numRecords = 0;
+        for (final SAMRecord rec : reader) {
+            ++numRecords;
+        }
+        Assert.assertEquals(numRecords, 0);
+        CloserUtil.close(reader);
+    }
+
+    @Test(dataProvider = "dataProvider")
+    public void testQueryUnmappedEmptyBam(final String bam) throws Exception {
+        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, bam));
+        final CloseableIterator<SAMRecord> it = reader.queryUnmapped();
+        int numRecords = 0;
+        while (it.hasNext()) {
+            it.next();
+            ++numRecords;
+        }
+        Assert.assertEquals(numRecords, 0);
+        CloserUtil.close(reader);
+    }
+
+    @DataProvider(name = "dataProvider")
+    public Object[][] bams() {
+        return new Object[][]{
+                {"empty.bam"},
+                {"empty_no_empty_gzip_block.bam"}
+        };
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilterTest.java b/src/test/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilterTest.java
new file mode 100644
index 0000000..7c0bb1f
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilterTest.java
@@ -0,0 +1,146 @@
+package htsjdk.samtools;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.Random;
+
+public class BAMQueryMultipleIntervalsIteratorFilterTest {
+
+    private final byte[] BASES = {'A', 'C', 'G', 'T'};
+    private final Random random = new Random();
+
+    @DataProvider(name="compareIntervalToRecord")
+    public Object[][] compareIntervalToRecord() {
+        return new Object[][] {
+                { new QueryInterval(0, 20, 20), 0, 10, 5, BAMIteratorFilter.IntervalComparison.AFTER },
+                { new QueryInterval(0, 20, 22), 0, 10, 5, BAMIteratorFilter.IntervalComparison.AFTER },
+                { new QueryInterval(1, 10, 22), 0, 10, 5, BAMIteratorFilter.IntervalComparison.AFTER },
+                { new QueryInterval(1, 0, 22), 0, 10, 5, BAMIteratorFilter.IntervalComparison.AFTER },
+                { new QueryInterval(1, -1, 22), 0, 10, 5, BAMIteratorFilter.IntervalComparison.AFTER },
+                { new QueryInterval(1, 1, 22), 0, 10, 5, BAMIteratorFilter.IntervalComparison.AFTER },
+
+                { new QueryInterval(0, 0, 4), 0, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+                { new QueryInterval(0, 0, 5), 0, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+                { new QueryInterval(0, 0, -1), 1, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+                { new QueryInterval(0, 1, 0), 1, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+                { new QueryInterval(0, 1, -1), 1, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+                { new QueryInterval(0, 0, 0), 1, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+                { new QueryInterval(0, 9, 9), 0, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+                { new QueryInterval(0, 1, 4), 0, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+                { new QueryInterval(0, 1, 4), 1, 10, 5, BAMIteratorFilter.IntervalComparison.BEFORE },
+
+                { new QueryInterval(0, 0, 0), 0, 10, 5, BAMIteratorFilter.IntervalComparison.CONTAINED },
+                { new QueryInterval(0, 1, -1), 0, 1, 100, BAMIteratorFilter.IntervalComparison.CONTAINED },
+                { new QueryInterval(0, 1, 0), 0, 1, 100, BAMIteratorFilter.IntervalComparison.CONTAINED },
+                { new QueryInterval(0, 1, 0), 0, 10, 5, BAMIteratorFilter.IntervalComparison.CONTAINED },
+                { new QueryInterval(0, 1, -1), 0, 10, 5, BAMIteratorFilter.IntervalComparison.CONTAINED },
+                { new QueryInterval(0, 10, 15), 0, 10, 5, BAMIteratorFilter.IntervalComparison.CONTAINED },
+                { new QueryInterval(0, 10, 0), 0, 10, 5, BAMIteratorFilter.IntervalComparison.CONTAINED },
+
+                { new QueryInterval(0, 10, 11), 0, 10, 5, BAMIteratorFilter.IntervalComparison.OVERLAPPING },
+                { new QueryInterval(0, 1, 10), 0, 9, 5, BAMIteratorFilter.IntervalComparison.OVERLAPPING },
+                { new QueryInterval(0, 0, 10), 0, 9, 5, BAMIteratorFilter.IntervalComparison.OVERLAPPING },
+                { new QueryInterval(0, 1, 10), 0, 9, 5, BAMIteratorFilter.IntervalComparison.OVERLAPPING },
+                { new QueryInterval(0, 1, 5), 0, 5, 10, BAMIteratorFilter.IntervalComparison.OVERLAPPING },
+        };
+    }
+
+    @Test(dataProvider = "compareIntervalToRecord")
+    public void testCompareIntervalToRecord(
+            final QueryInterval query,
+            final int refIndex,
+            final int start,
+            final int length,
+            final BAMIteratorFilter.IntervalComparison expectedState)
+    {
+        SAMRecord samRec = getSAMRecord(refIndex, start, length);
+        Assert.assertEquals(BAMQueryMultipleIntervalsIteratorFilter.compareIntervalToRecord(query, samRec), expectedState);
+    }
+
+    @DataProvider(name="compareToFilter")
+    public Object[][] compareToFilter() {
+        return new Object[][] {
+                { new QueryInterval[] { new QueryInterval(0, 10, 11), new QueryInterval(1, 1, 10) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION },
+                { new QueryInterval[] { new QueryInterval(0, 10, 11), new QueryInterval(1, 5, 10) },
+                        1, 1, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION },
+                { new QueryInterval[] { new QueryInterval(0, 20, 20), new QueryInterval(0, 10, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION },
+                { new QueryInterval[] { new QueryInterval(0, 20, 22), new QueryInterval(0, 10, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION },
+                { new QueryInterval[] { new QueryInterval(1, 10, 22), new QueryInterval(0, 10, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION },
+                { new QueryInterval[] { new QueryInterval(1, 0, 22), new QueryInterval(0, 10, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION },
+                { new QueryInterval[] { new QueryInterval(1, -1, 22), new QueryInterval(0, 10, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION },
+                { new QueryInterval[] { new QueryInterval(0, 10, 5), new QueryInterval(1, 1, 22) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION },
+                { new QueryInterval[] { new QueryInterval(0, 1, -1), new QueryInterval(1, 5, 10) },
+                        1, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION},
+
+                { new QueryInterval[] { new QueryInterval(0, 1, 4), new QueryInterval(0, 10, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.STOP_ITERATION },
+                { new QueryInterval[] { new QueryInterval(0, 0, 5), new QueryInterval(0, 10, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.STOP_ITERATION },
+                { new QueryInterval[] { new QueryInterval(0, 0, 5), new QueryInterval(0, 5, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.STOP_ITERATION},
+                { new QueryInterval[] { new QueryInterval(0, 0, 5), new QueryInterval(1, 5, 5) },
+                        1, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.STOP_ITERATION},
+
+                { new QueryInterval[] { new QueryInterval(1, 10, 5), new QueryInterval(1, 10, 10) },
+                        1, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.CONTINUE_ITERATION},
+                { new QueryInterval[] { new QueryInterval(1, 10, 5), new QueryInterval(1, 10, 10) },
+                        1, 10, 5, false, BAMIteratorFilter.FilteringIteratorState.MATCHES_FILTER},
+
+                { new QueryInterval[] { new QueryInterval(0, 0, -1), new QueryInterval(1, 10, 5) },
+                        0, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.MATCHES_FILTER },
+                { new QueryInterval[] { new QueryInterval(0, 1, -1), new QueryInterval(1, 5, -1) },
+                        1, 10, 5, true, BAMIteratorFilter.FilteringIteratorState.MATCHES_FILTER},
+        };
+    }
+
+    @Test(dataProvider = "compareToFilter")
+    public void testCompareToFilter(
+            final QueryInterval[] query,
+            final int refIndex,
+            final int start,
+            final int length,
+            final boolean contained,
+            final BAMIteratorFilter.FilteringIteratorState expectedState)
+    {
+        SAMRecord samRec = getSAMRecord(refIndex, start, length);
+        BAMQueryMultipleIntervalsIteratorFilter it = new BAMQueryMultipleIntervalsIteratorFilter(query, contained);
+        Assert.assertEquals(it.compareToFilter(samRec), expectedState);
+    }
+
+    /**
+     * Fills in bases for the given record to length.
+     */
+    private SAMRecord getSAMRecord(final int refIndex, final int start, final int length) {
+        final byte[] bases = new byte[length];
+
+        SAMFileHeader samHeader = new SAMFileHeader();
+        samHeader.setSequenceDictionary(
+            new SAMSequenceDictionary(
+                Arrays.asList(
+                    new SAMSequenceRecord("chr1", 200),
+                    new SAMSequenceRecord("chr2", 200))
+            )
+        );
+        SAMRecord samRec = new SAMRecord(samHeader);
+        for (int i = 0; i < length; ++i) {
+            bases[i] = BASES[random.nextInt(BASES.length)];
+        }
+        samRec.setReadBases(bases);
+        samRec.setReferenceIndex(refIndex);
+        samRec.setAlignmentStart(start);
+        samRec.setCigarString(length + "M");
+
+        return samRec;
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/BAMRemoteFileTest.java b/src/test/java/htsjdk/samtools/BAMRemoteFileTest.java
new file mode 100644
index 0000000..387de4e
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/BAMRemoteFileTest.java
@@ -0,0 +1,326 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CloserUtil;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
+
+import static org.testng.Assert.*;
+
+/**
+ * Test BAM file indexing.
+ */
+public class BAMRemoteFileTest {
+    private final File BAM_INDEX_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai");
+    private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
+    private final String BAM_URL_STRING = "http://www.broadinstitute.org/~picard/testdata/index_test.bam";
+    private final URL bamURL;
+
+    private final boolean mVerbose = false;
+
+    public BAMRemoteFileTest() throws Exception {
+        bamURL = new URL(BAM_URL_STRING);
+    }
+
+
+    @Test
+    void testRemoteLocal()
+            throws Exception {
+        runLocalRemoteTest(bamURL, BAM_FILE, "chrM", 10400, 10600, false);
+    }
+
+    @Test
+    public void testSpecificQueries()
+            throws Exception {
+        assertEquals(runQueryTest(bamURL, "chrM", 10400, 10600, true), 1);
+        assertEquals(runQueryTest(bamURL, "chrM", 10400, 10600, false), 2);
+    }
+
+    @Test(enabled = true)
+    public void testRandomQueries()
+            throws Exception {
+        runRandomTest(bamURL, 20, new Random());
+    }
+
+    @Test
+    public void testWholeChromosomes() {
+        checkChromosome("chrM", 23);
+        checkChromosome("chr1", 885);
+        checkChromosome("chr2", 837);
+        /***
+         checkChromosome("chr3", 683);
+         checkChromosome("chr4", 633);
+         checkChromosome("chr5", 611);
+         checkChromosome("chr6", 585);
+         checkChromosome("chr7", 521);
+         checkChromosome("chr8", 507);
+         checkChromosome("chr9", 388);
+         checkChromosome("chr10", 477);
+         checkChromosome("chr11", 467);
+         checkChromosome("chr12", 459);
+         checkChromosome("chr13", 327);
+         checkChromosome("chr14", 310);
+         checkChromosome("chr15", 280);
+         checkChromosome("chr16", 278);
+         checkChromosome("chr17", 269);
+         checkChromosome("chr18", 265);
+         checkChromosome("chr19", 178);
+         checkChromosome("chr20", 228);
+         checkChromosome("chr21", 123);
+         checkChromosome("chr22", 121);
+         checkChromosome("chrX", 237);
+         checkChromosome("chrY", 29);
+         ***/
+    }
+
+
+    private void checkChromosome(final String name, final int expectedCount) {
+        int count = runQueryTest(bamURL, name, 0, 0, true);
+        assertEquals(count, expectedCount);
+        count = runQueryTest(bamURL, name, 0, 0, false);
+        assertEquals(count, expectedCount);
+    }
+
+    private void runRandomTest(final URL bamFile, final int count, final Random generator) throws IOException {
+        final int maxCoordinate = 10000000;
+        final List<String> referenceNames = getReferenceNames(bamFile);
+        for (int i = 0; i < count; i++) {
+            final String refName = referenceNames.get(generator.nextInt(referenceNames.size()));
+            final int coord1 = generator.nextInt(maxCoordinate + 1);
+            final int coord2 = generator.nextInt(maxCoordinate + 1);
+            final int startPos = Math.min(coord1, coord2);
+            final int endPos = Math.max(coord1, coord2);
+            System.out.println("Testing query " + refName + ":" + startPos + "-" + endPos + " ...");
+            try {
+                runQueryTest(bamFile, refName, startPos, endPos, true);
+                runQueryTest(bamFile, refName, startPos, endPos, false);
+            } catch (Throwable exc) {
+                String message = "Query test failed: " + refName + ":" + startPos + "-" + endPos;
+                message += ": " + exc.getMessage();
+                throw new RuntimeException(message, exc);
+            }
+        }
+    }
+
+    private List<String> getReferenceNames(final URL bamFile) throws IOException {
+
+
+        final SamReader reader = SamReaderFactory.makeDefault().open(SamInputResource.of(bamFile.openStream()));
+
+        final List<String> result = new ArrayList<String>();
+        final List<SAMSequenceRecord> seqRecords = reader.getFileHeader().getSequenceDictionary().getSequences();
+        for (final SAMSequenceRecord seqRecord : seqRecords) {
+            if (seqRecord.getSequenceName() != null) {
+                result.add(seqRecord.getSequenceName());
+            }
+        }
+        reader.close();
+        return result;
+    }
+
+    private void runLocalRemoteTest(final URL bamURL, final File bamFile, final String sequence, final int startPos, final int endPos, final boolean contained) {
+        verbose("Testing query " + sequence + ":" + startPos + "-" + endPos + " ...");
+        final SamReader reader1 = SamReaderFactory.makeDefault()
+                .disable(SamReaderFactory.Option.EAGERLY_DECODE)
+                .open(SamInputResource.of(bamFile).index(BAM_INDEX_FILE));
+        final SamReader reader2 = SamReaderFactory.makeDefault()
+                .disable(SamReaderFactory.Option.EAGERLY_DECODE)
+                .open(SamInputResource.of(bamURL).index(BAM_INDEX_FILE));
+        final Iterator<SAMRecord> iter1 = reader1.query(sequence, startPos, endPos, contained);
+        final Iterator<SAMRecord> iter2 = reader2.query(sequence, startPos, endPos, contained);
+
+        final List<SAMRecord> records1 = new ArrayList<SAMRecord>();
+        final List<SAMRecord> records2 = new ArrayList<SAMRecord>();
+
+        while (iter1.hasNext()) {
+            records1.add(iter1.next());
+        }
+        while (iter2.hasNext()) {
+            records2.add(iter2.next());
+        }
+
+        assertTrue(records1.size() > 0);
+        assertEquals(records1.size(), records2.size());
+        for (int i = 0; i < records1.size(); i++) {
+            //System.out.println(records1.get(i).format());
+            assertEquals(records1.get(i).getSAMString(), records2.get(i).getSAMString());
+        }
+
+
+    }
+
+    private int runQueryTest(final URL bamURL, final String sequence, final int startPos, final int endPos, final boolean contained) {
+        verbose("Testing query " + sequence + ":" + startPos + "-" + endPos + " ...");
+        final SamReader reader1 = SamReaderFactory.makeDefault()
+                .disable(SamReaderFactory.Option.EAGERLY_DECODE)
+                .open(SamInputResource.of(bamURL).index(BAM_INDEX_FILE));
+        final SamReader reader2 = SamReaderFactory.makeDefault()
+                .disable(SamReaderFactory.Option.EAGERLY_DECODE)
+                .open(SamInputResource.of(bamURL).index(BAM_INDEX_FILE));
+        final Iterator<SAMRecord> iter1 = reader1.query(sequence, startPos, endPos, contained);
+        final Iterator<SAMRecord> iter2 = reader2.iterator();
+        // Compare ordered iterators.
+        // Confirm that iter1 is a subset of iter2 that properly filters.
+        SAMRecord record1 = null;
+        SAMRecord record2 = null;
+        int count1 = 0;
+        int count2 = 0;
+        int beforeCount = 0;
+        int afterCount = 0;
+        while (true) {
+            if (record1 == null && iter1.hasNext()) {
+                record1 = iter1.next();
+                count1++;
+            }
+            if (record2 == null && iter2.hasNext()) {
+                record2 = iter2.next();
+                count2++;
+            }
+            // System.out.println("Iteration:");
+            // System.out.println(" Record1 = " + ((record1 == null) ? "null" : record1.format()));
+            // System.out.println(" Record2 = " + ((record2 == null) ? "null" : record2.format()));
+            if (record1 == null && record2 == null) {
+                break;
+            }
+            if (record1 == null) {
+                checkPassesFilter(false, record2, sequence, startPos, endPos, contained);
+                record2 = null;
+                afterCount++;
+                continue;
+            }
+            assertNotNull(record2);
+            final int ordering = compareCoordinates(record1, record2);
+            if (ordering > 0) {
+                checkPassesFilter(false, record2, sequence, startPos, endPos, contained);
+                record2 = null;
+                beforeCount++;
+                continue;
+            }
+            assertTrue(ordering == 0);
+            checkPassesFilter(true, record1, sequence, startPos, endPos, contained);
+            checkPassesFilter(true, record2, sequence, startPos, endPos, contained);
+            assertEquals(record1.getReadName(), record2.getReadName());
+            assertEquals(record1.getReadString(), record2.getReadString());
+            record1 = null;
+            record2 = null;
+        }
+        CloserUtil.close(reader1);
+        CloserUtil.close(reader2);
+        verbose("Checked " + count1 + " records against " + count2 + " records.");
+        verbose("Found " + (count2 - beforeCount - afterCount) + " records matching.");
+        verbose("Found " + beforeCount + " records before.");
+        verbose("Found " + afterCount + " records after.");
+        return count1;
+    }
+
+    private void checkPassesFilter(final boolean expected, final SAMRecord record, final String sequence, final int startPos, final int endPos, final boolean contained) {
+        final boolean passes = passesFilter(record, sequence, startPos, endPos, contained);
+        if (passes != expected) {
+            System.out.println("Error: Record erroneously " +
+                    (passes ? "passed" : "failed") +
+                    " filter.");
+            System.out.println(" Record: " + record.getSAMString());
+            System.out.println(" Filter: " + sequence + ":" +
+                    startPos + "-" + endPos +
+                    " (" + (contained ? "contained" : "overlapping") + ")");
+            assertEquals(passes, expected);
+        }
+    }
+
+    private boolean passesFilter(final SAMRecord record, final String sequence, final int startPos, final int endPos, final boolean contained) {
+        if (record == null) {
+            return false;
+        }
+        if (!safeEquals(record.getReferenceName(), sequence)) {
+            return false;
+        }
+        final int alignmentStart = record.getAlignmentStart();
+        int alignmentEnd = record.getAlignmentEnd();
+        if (alignmentStart <= 0) {
+            assertTrue(record.getReadUnmappedFlag());
+            return false;
+        }
+        if (alignmentEnd <= 0) {
+            // For indexing-only records, treat as single base alignment.
+            assertTrue(record.getReadUnmappedFlag());
+            alignmentEnd = alignmentStart;
+        }
+        if (contained) {
+            if (startPos != 0 && alignmentStart < startPos) {
+                return false;
+            }
+            if (endPos != 0 && alignmentEnd > endPos) {
+                return false;
+            }
+        } else {
+            if (startPos != 0 && alignmentEnd < startPos) {
+                return false;
+            }
+            if (endPos != 0 && alignmentStart > endPos) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    private int compareCoordinates(final SAMRecord record1, final SAMRecord record2) {
+        final int seqIndex1 = record1.getReferenceIndex();
+        final int seqIndex2 = record2.getReferenceIndex();
+        if (seqIndex1 == -1) {
+            return ((seqIndex2 == -1) ? 0 : -1);
+        } else if (seqIndex2 == -1) {
+            return 1;
+        }
+        int result = seqIndex1 - seqIndex2;
+        if (result != 0) {
+            return result;
+        }
+        result = record1.getAlignmentStart() - record2.getAlignmentStart();
+        return result;
+    }
+
+    private boolean safeEquals(final Object o1, final Object o2) {
+        if (o1 == o2) {
+            return true;
+        } else if (o1 == null || o2 == null) {
+            return false;
+        } else {
+            return o1.equals(o2);
+        }
+    }
+
+    private void verbose(final String text) {
+        if (mVerbose) {
+            System.out.println("# " + text);
+        }
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/BinTest.java b/src/test/java/htsjdk/samtools/BinTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/BinTest.java
rename to src/test/java/htsjdk/samtools/BinTest.java
diff --git a/src/tests/java/htsjdk/samtools/CRAMBAIIndexerTest.java b/src/test/java/htsjdk/samtools/CRAMBAIIndexerTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/CRAMBAIIndexerTest.java
rename to src/test/java/htsjdk/samtools/CRAMBAIIndexerTest.java
diff --git a/src/test/java/htsjdk/samtools/CRAMCRAIIndexerTest.java b/src/test/java/htsjdk/samtools/CRAMCRAIIndexerTest.java
new file mode 100644
index 0000000..19284b2
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMCRAIIndexerTest.java
@@ -0,0 +1,178 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.CRAIEntry;
+import htsjdk.samtools.cram.build.CramContainerIterator;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.reference.FakeReferenceSequenceFile;
+import htsjdk.samtools.seekablestream.ByteArraySeekableStream;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Companion to CRAMBAIIndexerTest, for testing CRAI indices created on cram
+ * streams;
+ */
+public class CRAMCRAIIndexerTest {
+
+    @Test
+    public void testCRAIIndexerFromContainer() throws IOException {
+        final File CRAMFile = new File("src/test/resources/htsjdk/samtools/cram/test2.cram");
+        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/test2.fa");
+        ReferenceSource refSource = new ReferenceSource(refFile);
+        CRAMFileReader reader = new CRAMFileReader(
+                CRAMFile,
+                null,
+                refSource,
+                ValidationStringency.STRICT);
+        SAMFileHeader samHeader = reader.getFileHeader();
+        reader.close();
+
+        FileInputStream fis = new FileInputStream(CRAMFile);
+        CramContainerIterator cit = new CramContainerIterator(fis);
+        ByteArrayOutputStream bos = new ByteArrayOutputStream();
+
+        CRAMCRAIIndexer craiIndexer = new CRAMCRAIIndexer(bos, samHeader);
+        while (cit.hasNext()) {
+            craiIndexer.processContainer(cit.next());
+        }
+        craiIndexer.finish();
+        bos.close();
+
+        List<CRAIEntry> craiEntries = CRAMCRAIIndexer.readIndex(new ByteArrayInputStream(bos.toByteArray())).getCRAIEntries();
+        Assert.assertEquals(craiEntries.size(), 1);
+    }
+
+    @Test
+    public void testCRAIIndexerFromStream() throws IOException {
+        final File CRAMFile = new File("src/test/resources/htsjdk/samtools/cram/test2.cram");
+        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/test2.fa");
+        ReferenceSource refSource = new ReferenceSource(refFile);
+
+        // get the header to use
+        CRAMFileReader reader = new CRAMFileReader(
+                CRAMFile,
+                null,
+                refSource,
+                ValidationStringency.STRICT);
+        SAMFileHeader samHeader = reader.getFileHeader();
+        reader.close();
+
+        ByteArrayOutputStream bos = new ByteArrayOutputStream();
+        CRAMCRAIIndexer craiIndexer = new CRAMCRAIIndexer(bos, samHeader);
+        craiIndexer.writeIndex(new SeekableFileStream(CRAMFile), bos);
+
+        List<CRAIEntry> craiEntries = CRAMCRAIIndexer.readIndex(new ByteArrayInputStream(bos.toByteArray())).getCRAIEntries();
+        Assert.assertEquals(craiEntries.size(), 1);
+    }
+
+    @Test
+    public void testMultiRefContainer() throws IOException, IllegalAccessException {
+        SAMFileHeader samFileHeader = new SAMFileHeader();
+        samFileHeader.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        
+        samFileHeader.addSequence(new SAMSequenceRecord("1", 10));
+        samFileHeader.addSequence(new SAMSequenceRecord("2", 10));
+        samFileHeader.addSequence(new SAMSequenceRecord("3", 10));
+
+        ReferenceSource source = new ReferenceSource(new FakeReferenceSequenceFile(samFileHeader.getSequenceDictionary().getSequences()));
+
+        ByteArrayOutputStream cramBAOS = new ByteArrayOutputStream();
+        ByteArrayOutputStream indexBAOS = new ByteArrayOutputStream();
+
+        // force the containers to be small to ensure there are 2
+        int originalDefaultSize = CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE;
+        CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE = 3;
+
+        try {
+            CRAMContainerStreamWriter containerWriter = new CRAMContainerStreamWriter(cramBAOS, indexBAOS, source, samFileHeader, "test");
+            containerWriter.writeHeader(samFileHeader);
+
+            containerWriter.writeAlignment(createSAMRecord(samFileHeader, 0, 0, 1));
+            containerWriter.writeAlignment(createSAMRecord(samFileHeader, 1, 1, 2));
+            containerWriter.writeAlignment(createSAMRecord(samFileHeader, 2, 1, 3));
+
+            containerWriter.writeAlignment(createSAMRecord(samFileHeader, 3, 1, 3));
+            containerWriter.writeAlignment(createSAMRecord(samFileHeader, 4, 2, 3));
+            containerWriter.writeAlignment(createSAMRecord(samFileHeader, 5, 2, 4));
+
+            containerWriter.finish(true);
+        }
+        finally {
+            // failing to reset this can cause unrelated tests to fail if this test fails
+            CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE = originalDefaultSize;
+        }
+
+        // These tests all fail due to https://github.com/samtools/htsjdk/issues/531
+        // (metadata is incorrect after .crai->.bai conversion)
+        //SeekableStream baiStream = CRAIIndex.openCraiFileAsBaiStream(
+        //        new ByteArrayInputStream(indexBAOS.toByteArray()), samFileHeader.getSequenceDictionary());
+        //BAMIndex index = new CachingBAMFileIndex(baiStream, samFileHeader.getSequenceDictionary());
+        //final BAMIndexMetaData metaData_0 = index.getMetaData(0);
+        //Assert.assertNotNull(metaData_0);
+        //Assert.assertEquals(metaData_0.getAlignedRecordCount(), 1);
+        //final BAMIndexMetaData metaData_1 = index.getMetaData(1);
+        //Assert.assertNotNull(metaData_1);
+        //Assert.assertEquals(metaData_1.getAlignedRecordCount(), 3);
+        //final BAMIndexMetaData metaData_2 = index.getMetaData(2);
+        //Assert.assertNotNull(metaData_2);
+        //Assert.assertEquals(metaData_2.getAlignedRecordCount(), 2);
+
+        // NOTE: this test uses the default index format created by CRAMContainerStreamWriter
+        // which is currently .bai; when the
+        CRAMFileReader cramReader = new CRAMFileReader(
+                new ByteArraySeekableStream(cramBAOS.toByteArray()),
+                new ByteArraySeekableStream(indexBAOS.toByteArray()),
+                source,
+                ValidationStringency.DEFAULT_STRINGENCY
+        );
+        Assert.assertTrue(cramReader.hasIndex());
+
+        Iterator<SAMRecord> it = cramReader.query(new QueryInterval[]{new QueryInterval(0, 0, 5)}, false);
+        long count = getIteratorCount(it);
+        Assert.assertEquals(count, 1);
+
+        it = cramReader.query(new QueryInterval[]{new QueryInterval(1, 0, 5)}, false);
+        count = getIteratorCount(it);
+        Assert.assertEquals(count, 3);
+
+        it = cramReader.query(new QueryInterval[]{new QueryInterval(2, 0, 5)}, false);
+        count = getIteratorCount(it);
+        Assert.assertEquals(count, 2);
+    }
+
+    private static SAMRecord createSAMRecord(SAMFileHeader header, int recordIndex, int seqId, int start) {
+        byte[] bases = "AAAAA".getBytes();
+
+        final SAMRecord record = new SAMRecord(header);
+        record.setReferenceIndex(seqId);
+        record.setAlignmentStart(start);
+        record.setReadBases(bases);
+        record.setBaseQualities(bases);
+        record.setReadName(Integer.toString(recordIndex));
+
+        return record;
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void testRequireCoordinateSortOrder() {
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.queryname);
+
+        new CRAMCRAIIndexer(new ByteArrayOutputStream(), header);
+    }
+
+    private long getIteratorCount(Iterator<SAMRecord> it) {
+        long count = 0;
+        while (it.hasNext()) {
+            count++;
+            it.next();
+        }
+        return count;
+    }
+
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/CRAMComplianceTest.java b/src/test/java/htsjdk/samtools/CRAMComplianceTest.java
new file mode 100644
index 0000000..81cd2f9
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMComplianceTest.java
@@ -0,0 +1,150 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.common.CramVersions;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.Log;
+import org.testng.Assert;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Created by vadim on 28/04/2015.
+ */
+public class CRAMComplianceTest {
+
+    @DataProvider(name = "test1")
+    public Object[][] createData1() {
+        return new Object[][]{
+                {"auxf#values"},
+                {"c1#bounds"},
+                {"c1#clip"},
+                {"c1#noseq"},
+                {"c1#pad1"},
+                {"c1#pad2"},
+                {"c1#pad3"},
+                {"c1#unknown"},
+                {"ce#1"},
+                {"ce#2"},
+                {"ce#5b"},
+                {"ce#5"},
+                {"ce#large_seq"},
+                {"ce#supp"},
+                {"ce#tag_depadded"},
+                {"ce#tag_padded"},
+                {"ce#unmap1"},
+                {"ce#unmap2"},
+                {"ce#unmap"},
+                {"xx#blank"},
+                {"xx#large_aux2"},
+                {"xx#large_aux"},
+                {"xx#minimal"},
+                {"xx#pair"},
+                {"xx#rg"},
+                {"xx#triplet"},
+                {"xx#unsorted"},
+        };
+    }
+
+
+    @BeforeTest
+    public void beforeTest() {
+        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
+    }
+
+    private static class TestCase {
+        File bamFile;
+        File refFile;
+        File cramFile_21;
+        File cramFile_30;
+
+        public TestCase(File root, String name) {
+            bamFile = new File(root, name + ".sam");
+            refFile = new File(root, name.split("#")[0] + ".fa");
+            cramFile_21 = new File(root, name + ".2.1.cram");
+            cramFile_30 = new File(root, name + ".3.0.cram");
+        }
+    }
+
+    @Test(dataProvider = "test1")
+    public void test(String name) throws IOException {
+        TestCase t = new TestCase(new File("src/test/resources/htsjdk/samtools/cram/"), name);
+
+        ReferenceSource source = new ReferenceSource(t.refFile);
+        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(t.bamFile);
+
+        final SAMRecordIterator samRecordIterator = reader.iterator();
+        List<SAMRecord> samRecords = new ArrayList<SAMRecord>();
+        while (samRecordIterator.hasNext())
+            samRecords.add(samRecordIterator.next());
+        SAMFileHeader samFileHeader = reader.getFileHeader();
+        reader.close();
+
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        CRAMFileWriter cramFileWriter = new CRAMFileWriter(baos, source, samFileHeader, name);
+        for (SAMRecord samRecord : samRecords) {
+            cramFileWriter.addAlignment(samRecord);
+        }
+        cramFileWriter.close();
+
+
+        CRAMFileReader cramFileReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (SeekableStream)null, source, ValidationStringency.SILENT);
+        SAMRecordIterator cramFileReaderIterator = cramFileReader.getIterator();
+        for (SAMRecord samRecord : samRecords) {
+            Assert.assertTrue(cramFileReaderIterator.hasNext());
+            SAMRecord restored = cramFileReaderIterator.next();
+            Assert.assertNotNull(restored);
+            assertSameRecords(CramVersions.DEFAULT_CRAM_VERSION.major, samRecord, restored);
+        }
+        Assert.assertFalse(cramFileReaderIterator.hasNext());
+
+        //v2.1 test
+        cramFileReader = new CRAMFileReader(new FileInputStream(t.cramFile_21), (SeekableStream)null, source, ValidationStringency.SILENT);
+        cramFileReaderIterator = cramFileReader.getIterator();
+        for (SAMRecord samRecord : samRecords) {
+            Assert.assertTrue(cramFileReaderIterator.hasNext());
+            SAMRecord restored = cramFileReaderIterator.next();
+            Assert.assertNotNull(restored);
+            assertSameRecords(CramVersions.CRAM_v2_1.major, samRecord, restored);
+        }
+        Assert.assertFalse(cramFileReaderIterator.hasNext());
+
+        //v3.0 test
+        cramFileReader = new CRAMFileReader(new FileInputStream(t.cramFile_30), (SeekableStream)null, source, ValidationStringency.SILENT);
+        cramFileReaderIterator = cramFileReader.getIterator();
+        for (SAMRecord samRecord : samRecords) {
+            Assert.assertTrue(cramFileReaderIterator.hasNext());
+            SAMRecord restored = cramFileReaderIterator.next();
+            Assert.assertNotNull(restored);
+            assertSameRecords(CramVersions.CRAM_v3.major, samRecord, restored);
+        }
+        Assert.assertFalse(cramFileReaderIterator.hasNext());
+    }
+
+    private void assertSameRecords(int majorVersion, SAMRecord record1, SAMRecord record2) {
+        Assert.assertEquals(record2.getFlags(), record1.getFlags());
+        Assert.assertEquals(record2.getReadName(), record1.getReadName());
+        Assert.assertEquals(record2.getReferenceName(), record1.getReferenceName());
+        Assert.assertEquals(record2.getAlignmentStart(), record1.getAlignmentStart());
+
+        /**
+         * Known issue: CRAM v2.1 doesn't handle reads with missing bases correctly. This
+         * causes '*' bases to arise when reading CRAM. Skipping the base comparison asserts.
+         */
+        if (record1.getReadBases() != SAMRecord.NULL_SEQUENCE || majorVersion >= CramVersions.CRAM_v3.major) {
+            Assert.assertEquals(record2.getReadBases(), record1.getReadBases());
+        }
+
+        Assert.assertEquals(record2.getBaseQualities(), record1.getBaseQualities());
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java b/src/test/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java
new file mode 100644
index 0000000..b26f4b0
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java
@@ -0,0 +1,184 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
+import htsjdk.samtools.seekablestream.SeekableMemoryStream;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.Log.LogLevel;
+import htsjdk.samtools.util.RuntimeIOException;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+public class CRAMContainerStreamWriterTest {
+
+    @BeforeClass
+    public void initClass() {
+        Log.setGlobalLogLevel(LogLevel.ERROR);
+    }
+
+    private List<SAMRecord> createRecords(int count) {
+        final List<SAMRecord> list = new ArrayList<SAMRecord>(count);
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        if (builder.getHeader().getReadGroups().isEmpty()) {
+            throw new IllegalStateException("Read group expected in the header");
+        }
+
+        int posInRef = 1;
+        for (int i = 0; i < count / 2; i++) {
+            builder.addPair(Integer.toString(i), i % 2, posInRef += 1, posInRef += 3);
+        }
+        list.addAll(builder.getRecords());
+
+        Collections.sort(list, new SAMRecordCoordinateComparator());
+
+        return list;
+    }
+
+    private SAMFileHeader createSAMHeader(SAMFileHeader.SortOrder sortOrder) {
+        final SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(sortOrder);
+        header.addSequence(new SAMSequenceRecord("chr1", 123));
+        header.addSequence(new SAMSequenceRecord("chr2", 123));
+        SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord("1");
+        header.addReadGroup(readGroupRecord);
+        return header;
+    }
+
+    private ReferenceSource createReferenceSource() {
+        final byte[] refBases = new byte[1024 * 1024];
+        Arrays.fill(refBases, (byte) 'A');
+        InMemoryReferenceSequenceFile rsf = new InMemoryReferenceSequenceFile();
+        rsf.add("chr1", refBases);
+        rsf.add("chr2", refBases);
+        return new ReferenceSource(rsf);
+    }
+
+    private void doTest(final List<SAMRecord> samRecords, final ByteArrayOutputStream outStream, final OutputStream indexStream) {
+        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
+        final ReferenceSource refSource = createReferenceSource();
+
+        final CRAMContainerStreamWriter containerStream = new CRAMContainerStreamWriter(outStream, indexStream, refSource, header, "test");
+        containerStream.writeHeader(header);
+
+        for (SAMRecord record : samRecords) {
+            containerStream.writeAlignment(record);
+        }
+        containerStream.finish(true); // finish and issue EOF
+
+        // read all the records back in
+        final CRAMFileReader cReader = new CRAMFileReader(null, new ByteArrayInputStream(outStream.toByteArray()), refSource);
+        final SAMRecordIterator iterator = cReader.getIterator();
+        int count = 0;
+        while (iterator.hasNext()) {
+            SAMRecord actualRecord = iterator.next();
+            count++;
+        }
+        Assert.assertEquals(count, samRecords.size());
+    }
+
+    @Test(description = "Test CRAMContainerStream no index")
+    public void testCRAMContainerStreamNoIndex() {
+        final List<SAMRecord> samRecords = createRecords(100);
+        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+        doTest(samRecords, outStream, null);
+    }
+
+    @Test(description = "Test CRAMContainerStream aggregating multiple partitions")
+    public void testCRAMContainerAggregatePartitions() throws IOException {
+        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
+        final ReferenceSource refSource = createReferenceSource();
+
+        // create a bunch of records and write them out to separate streams in groups
+        final int nRecs = 100;
+        final int recsPerPartition = 20;
+        final int nPartitions = nRecs/recsPerPartition;
+
+        final List<SAMRecord> samRecords = createRecords(nRecs);
+        final ArrayList<ByteArrayOutputStream> byteStreamArray = new ArrayList<>(nPartitions);
+
+        for (int partition = 0, recNum = 0; partition < nPartitions; partition++) {
+            byteStreamArray.add(partition, new ByteArrayOutputStream());
+            final CRAMContainerStreamWriter containerStream =
+                    new CRAMContainerStreamWriter(byteStreamArray.get(partition), null, refSource, header, "test");
+
+            // don't write a header for the intermediate streams
+            for (int i = 0; i <  recsPerPartition; i++) {
+                containerStream.writeAlignment(samRecords.get(recNum++));
+            }
+            containerStream.finish(false); // finish but don't issue EOF container
+        }
+
+        // now create the final aggregate file by concatenating the individual streams, but this
+        // time with a CRAM and SAM header at the front and an EOF container at the end
+        final ByteArrayOutputStream aggregateStream = new ByteArrayOutputStream();
+        final CRAMContainerStreamWriter aggregateContainerStreamWriter = new CRAMContainerStreamWriter(aggregateStream, null, refSource, header, "test");
+        aggregateContainerStreamWriter .writeHeader(header); // write out one CRAM and SAM header
+        for (int j = 0; j < nPartitions; j++) {
+            byteStreamArray.get(j).writeTo(aggregateStream);
+        }
+        aggregateContainerStreamWriter.finish(true);// write out the EOF container
+
+        // now iterate through all the records in the aggregate file
+        final CRAMFileReader cReader = new CRAMFileReader(null, new ByteArrayInputStream(aggregateStream.toByteArray()), refSource);
+        final SAMRecordIterator iterator = cReader.getIterator();
+        int count = 0;
+        while (iterator.hasNext()) {
+            Assert.assertEquals(iterator.next().toString(), samRecords.get(count).toString());
+            count++;
+        }
+        Assert.assertEquals(count, nRecs);
+    }
+
+    @Test(description = "Test CRAMContainerStream with index")
+    public void testCRAMContainerStreamWithIndex() throws IOException {
+        final List<SAMRecord> samRecords = createRecords(100);
+        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+        final ByteArrayOutputStream indexStream = new ByteArrayOutputStream();
+        doTest(samRecords, outStream, indexStream);
+        outStream.close();
+        indexStream.close();
+
+        // write the file out
+        final File cramTempFile = File.createTempFile("cramContainerStreamTest", ".cram");
+        cramTempFile.deleteOnExit();
+        final OutputStream cramFileStream = new FileOutputStream(cramTempFile);
+        cramFileStream.write(outStream.toByteArray());
+        cramFileStream.close();
+
+        // write the index out
+        final File indexTempFile = File.createTempFile("cramContainerStreamTest", ".bai");
+        indexTempFile.deleteOnExit();
+        OutputStream indexFileStream = new FileOutputStream(indexTempFile);
+        indexFileStream.write(indexStream.toByteArray());
+        indexFileStream.close();
+
+        final ReferenceSource refSource = createReferenceSource();
+        final CRAMFileReader reader = new CRAMFileReader(
+                cramTempFile,
+                indexTempFile,
+                refSource,
+                ValidationStringency.SILENT);
+        final CloseableIterator<SAMRecord> iterator = reader.query(new QueryInterval[]{new QueryInterval(1, 10, 10)}, false);
+        int count = 0;
+        while (iterator.hasNext()) {
+            SAMRecord actualRecord = iterator.next();
+            count++;
+        }
+        Assert.assertEquals(count, 2);
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMEdgeCasesTest.java b/src/test/java/htsjdk/samtools/CRAMEdgeCasesTest.java
new file mode 100644
index 0000000..e77e0e8
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMEdgeCasesTest.java
@@ -0,0 +1,158 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.CRAMException;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.Log;
+import org.testng.Assert;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Iterator;
+
+/**
+ * A collection of CRAM test based on round trip comparison of SAMRecord before and after CRAM compression.
+ */
+public class CRAMEdgeCasesTest {
+
+    @BeforeTest
+    public void beforeTest() {
+        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
+    }
+
+    @Test
+    public void testUnsorted() throws IOException {
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder(false, SAMFileHeader.SortOrder.unsorted);
+        builder.addFrag("1", 0, 2, false);
+        builder.addFrag("1", 0, 1, false);
+        final Collection<SAMRecord> records = builder.getRecords();
+
+        testRecords(records, records.iterator().next().getReadBases());
+    }
+
+    // int test for CRAMException
+    // testing for a contig found in the reads but not in the reference
+    @Test(expectedExceptions = CRAMException.class)
+    public void testContigNotFoundInRef() throws IOException {
+        boolean sawException = false;
+        final File CRAMFile = new File("src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram");
+        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa");
+        final ReferenceSource refSource = new ReferenceSource(refFile);
+        final CRAMIterator iterator = new CRAMIterator(new FileInputStream(CRAMFile), refSource, ValidationStringency.STRICT);
+        while (iterator.hasNext()) {
+            iterator.next();
+        }
+    }
+
+    @Test
+    public void testBizilionTags() throws IOException {
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        builder.addFrag("1", 0, 1, false);
+        SAMRecord record = builder.getRecords().iterator().next();
+        for (int i = 0; i < 1000; i++) {
+            char b1 = (char) ('A' + i / 26);
+            char b2 = (char) ('A' + i % 26);
+            String tag = new String(new char[]{b1, b2});
+            if ("RG".equals(tag)) {
+                continue;
+            }
+            record.setAttribute(tag, i);
+        }
+
+        record.setAlignmentStart(1);
+        testSingleRecord(record, record.getReadBases());
+    }
+
+    @Test
+    public void testNullsAndBeyondRef() throws IOException {
+        testSingleRecord("A".getBytes(), "!".getBytes(), "A".getBytes());
+        testSingleRecord("A".getBytes(), SAMRecord.NULL_QUALS, "A".getBytes());
+        testSingleRecord(SAMRecord.NULL_SEQUENCE, SAMRecord.NULL_QUALS, "A".getBytes());
+        testSingleRecord("AAA".getBytes(), "!!!".getBytes(), "A".getBytes());
+    }
+
+    private void testRecords(Collection<SAMRecord> records, byte[] ref) throws IOException {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        InMemoryReferenceSequenceFile refFile = new InMemoryReferenceSequenceFile();
+        refFile.add("chr1", ref);
+        ReferenceSource source = new ReferenceSource(refFile);
+        final SAMFileHeader header = records.iterator().next().getHeader();
+        CRAMFileWriter cramFileWriter = new CRAMFileWriter(baos, source, header, "whatever");
+
+        Iterator<SAMRecord> it = records.iterator();
+        while (it.hasNext()) {
+            SAMRecord record = it.next();
+            cramFileWriter.addAlignment(record);
+        }
+        cramFileWriter.close();
+
+        CRAMFileReader cramFileReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (SeekableStream) null, source, ValidationStringency.SILENT);
+        final SAMRecordIterator iterator = cramFileReader.getIterator();
+        Assert.assertTrue(iterator.hasNext());
+
+        it = records.iterator();
+        while (it.hasNext()) {
+            SAMRecord record = it.next();
+            SAMRecord s2 = iterator.next();
+            Assert.assertNotNull(s2);
+            Assert.assertEquals(record.getFlags(), s2.getFlags());
+            Assert.assertEquals(record.getReadName(), s2.getReadName());
+            Assert.assertEquals(record.getReferenceName(), s2.getReferenceName());
+            Assert.assertEquals(record.getAlignmentStart(), s2.getAlignmentStart());
+            Assert.assertEquals(record.getReadBases(), s2.getReadBases());
+            Assert.assertEquals(record.getBaseQualities(), s2.getBaseQualities());
+        }
+        Assert.assertFalse(iterator.hasNext());
+    }
+
+    private void testSingleRecord(SAMRecord record, byte[] ref) throws IOException {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        InMemoryReferenceSequenceFile refFile = new InMemoryReferenceSequenceFile();
+        refFile.add("chr1", ref);
+        ReferenceSource source = new ReferenceSource(refFile);
+        CRAMFileWriter cramFileWriter = new CRAMFileWriter(baos, source, record.getHeader(), "whatever");
+        cramFileWriter.addAlignment(record);
+        cramFileWriter.close();
+
+        CRAMFileReader cramFileReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (SeekableStream) null, source, ValidationStringency.SILENT);
+        final SAMRecordIterator iterator = cramFileReader.getIterator();
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord s2 = iterator.next();
+        Assert.assertNotNull(s2);
+        Assert.assertFalse(iterator.hasNext());
+
+        Assert.assertEquals(record.getFlags(), s2.getFlags());
+        Assert.assertEquals(record.getReadName(), s2.getReadName());
+        Assert.assertEquals(record.getReferenceName(), s2.getReferenceName());
+        Assert.assertEquals(record.getAlignmentStart(), s2.getAlignmentStart());
+        Assert.assertEquals(record.getReadBases(), s2.getReadBases());
+        Assert.assertEquals(record.getBaseQualities(), s2.getBaseQualities());
+    }
+
+    private void testSingleRecord(byte[] bases, byte[] scores, byte[] ref) throws IOException {
+        SAMFileHeader header = new SAMFileHeader();
+        header.addReadGroup(new SAMReadGroupRecord("1"));
+        header.addSequence(new SAMSequenceRecord("chr1", ref.length));
+        SAMRecord s = new SAMRecord(header);
+        s.setReadBases(bases);
+        s.setBaseQualities(scores);
+        s.setFlags(0);
+        s.setAlignmentStart(1);
+        s.setReferenceName("chr1");
+        s.setReadName("1");
+        if (bases == SAMRecord.NULL_SEQUENCE) {
+            s.setCigarString("10M");
+        } else {
+            s.setCigarString(s.getReadLength() + "M");
+        }
+
+        testSingleRecord(s, ref);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMFileBAIIndexTest.java b/src/test/java/htsjdk/samtools/CRAMFileBAIIndexTest.java
new file mode 100644
index 0000000..eba2b4c
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMFileBAIIndexTest.java
@@ -0,0 +1,301 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.build.ContainerParser;
+import htsjdk.samtools.cram.build.CramContainerIterator;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.cram.structure.AlignmentSpan;
+import htsjdk.samtools.cram.structure.Container;
+import htsjdk.samtools.reference.FakeReferenceSequenceFile;
+import htsjdk.samtools.seekablestream.ByteArraySeekableStream;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CoordMath;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Log;
+import org.testng.Assert;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.Map;
+import java.util.TreeSet;
+
+/**
+ * A collection of tests for CRAM BAI index write/read that use BAMFileIndexTest/index_test.bam file as the source of the test data.
+ * The test will create a BAI index of the cram file before hand.
+ * The scan* tests check that for every records in the BAM file the query returns the same records from the CRAM file.
+ * Created by Vadim on 14/03/2015.
+ */
+public class CRAMFileBAIIndexTest {
+    private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
+    private File cramFile;
+    private File indexFile;
+    private byte[] cramBytes;
+    private byte[] baiBytes;
+    private ReferenceSource source;
+    private int nofUnmappedReads = 279 ;
+    private int nofMappedReads = 9721;
+    private int nofReads = 10000 ;
+    private int nofReadsPerContainer = 1000 ;
+
+
+    // Mixes testing queryAlignmentStart with each CRAMFileReaderConstructor
+    // Separate into individual tests
+    @Test
+    public void testConstructors () throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(cramFile, indexFile, source, ValidationStringency.SILENT);
+        CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart("chrM", 1500);
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord record = iterator.next();
+
+        Assert.assertEquals(record.getReferenceName(), "chrM");
+        Assert.assertTrue(record.getAlignmentStart() >= 1500);
+        reader.close();
+
+        reader = new CRAMFileReader(new SeekableFileStream(cramFile), indexFile, source, ValidationStringency.SILENT);
+        iterator = reader.queryAlignmentStart("chrM", 1500);
+        Assert.assertTrue(iterator.hasNext());
+        record = iterator.next();
+
+        Assert.assertEquals(record.getReferenceName(), "chrM");
+        Assert.assertTrue(record.getAlignmentStart() >= 1500);
+        reader.close();
+
+        reader = new CRAMFileReader(new SeekableFileStream(cramFile), new SeekableFileStream(indexFile), source, ValidationStringency.SILENT);
+        iterator = reader.queryAlignmentStart("chrM", 1500);
+        Assert.assertTrue(iterator.hasNext());
+        record = iterator.next();
+
+        Assert.assertEquals(record.getReferenceName(), "chrM");
+        Assert.assertTrue(record.getAlignmentStart() >= 1500);
+        reader.close();
+
+        reader = new CRAMFileReader(new SeekableFileStream(cramFile), (File)null, source, ValidationStringency.SILENT);
+        try {
+            reader.queryAlignmentStart("chrM", 1500);
+            Assert.fail("Expecting query to fail when there is no index");
+        } catch (SAMException e) {
+        }
+        reader.close();
+
+        reader = new CRAMFileReader(new SeekableFileStream(cramFile), (SeekableFileStream)null, source, ValidationStringency.SILENT);
+        try {
+            reader.queryAlignmentStart("chrM", 1500);
+            Assert.fail("Expecting query to fail when there is no index");
+        } catch (SAMException e) {
+        }
+        reader.close();
+    }
+
+    // this test is the same as the ones above in testConstructors
+    @Test
+    public void test_chrM_1500_location() throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(cramFile, indexFile, source);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+        CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart("chrM", 1500);
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord record = iterator.next();
+
+        Assert.assertEquals(record.getReferenceName(), "chrM");
+        Assert.assertTrue(record.getAlignmentStart() >= 1500);
+    }
+
+    @Test
+    public void scanMappedReads() throws IOException {
+        SamReader samReader = SamReaderFactory.makeDefault().open(BAM_FILE);
+        SAMRecordIterator samRecordIterator = samReader.iterator();
+        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+
+        int counter = 0;
+        while (samRecordIterator.hasNext()) {
+            SAMRecord samRecord = samRecordIterator.next();
+            if (samRecord.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) break;
+            // test only 1st and 2nd in every 100 to speed the test up:
+            if (counter++ %100 > 1) continue;
+            String s1 = samRecord.getSAMString();
+
+            CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart(samRecord.getReferenceName(), samRecord.getAlignmentStart());
+            Assert.assertTrue(iterator.hasNext(), counter + ": " + s1);
+            SAMRecord cramRecord = iterator.next();
+
+            String s2 = cramRecord.getSAMString();
+
+            Assert.assertEquals(samRecord.getReferenceName(), cramRecord.getReferenceName(), s1 + s2);
+            // default 'overlap' is true, so test records intersect the query:
+            Assert.assertTrue(CoordMath.overlaps(cramRecord.getAlignmentStart(), cramRecord.getAlignmentEnd(), samRecord.getAlignmentStart(), samRecord.getAlignmentEnd()), s1 + s2);
+        }
+        samRecordIterator.close();
+        reader.close();
+
+        Assert.assertEquals(counter, nofMappedReads);
+    }
+
+    @Test
+    public void testNoStringencyConstructor() throws IOException {
+        final File CRAMFile = new File("src/test/resources/htsjdk/samtools/cram/auxf#values.3.0.cram");
+        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/auxf.fa");
+        ReferenceSource refSource = new ReferenceSource(refFile);
+
+        long start = 0;
+        long end = CRAMFile.length();
+        long[] boundaries = new long[] {start << 16, (end - 1) << 16};
+        final CRAMIterator iterator = new CRAMIterator(new SeekableFileStream(CRAMFile), refSource, boundaries);
+        long count = 0;
+        while (iterator.hasNext()) {
+            count++;
+            iterator.next();
+        }
+        Assert.assertEquals(count, 2);
+    }
+
+    @Test
+    public void testIteratorFromFileSpan_WholeFile() throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+
+        final SAMFileSpan allContainers = reader.getFilePointerSpanningReads();
+        final CloseableIterator<SAMRecord> iterator = reader.getIterator(allContainers);
+        Assert.assertTrue(iterator.hasNext());
+        int counter = 0;
+        while (iterator.hasNext()) {
+            iterator.next();
+            counter++;
+        }
+        Assert.assertEquals(counter, nofReads);
+    }
+
+    @Test
+    public void testIteratorFromFileSpan_SecondContainer() throws IOException, IllegalAccessException {
+        CramContainerIterator it = new CramContainerIterator(new ByteArrayInputStream(cramBytes));
+        it.hasNext();
+        it.next();
+        it.hasNext();
+        Container secondContainer = it.next();
+        Assert.assertNotNull(secondContainer);
+        final Map<Integer, AlignmentSpan> references = new ContainerParser(it.getCramHeader().getSamFileHeader()).getReferences(secondContainer, ValidationStringency.STRICT);
+        it.close();
+        int refId = new TreeSet<Integer>(references.keySet()).iterator().next();
+        final AlignmentSpan alignmentSpan = references.get(refId);
+
+        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+
+        final BAMIndex index = reader.getIndex();
+        final SAMFileSpan spanOfSecondContainer = index.getSpanOverlapping(refId, alignmentSpan.getStart(), alignmentSpan.getStart()+ alignmentSpan.getSpan());
+        Assert.assertNotNull(spanOfSecondContainer);
+        Assert.assertFalse(spanOfSecondContainer.isEmpty());
+        Assert.assertTrue(spanOfSecondContainer instanceof BAMFileSpan);
+
+        final CloseableIterator<SAMRecord> iterator = reader.getIterator(spanOfSecondContainer);
+        Assert.assertTrue(iterator.hasNext());
+        int counter = 0;
+        boolean matchFound = false;
+        while (iterator.hasNext()) {
+            final SAMRecord record = iterator.next();
+            if (record.getReferenceIndex().intValue() == refId) {
+                boolean overlaps = CoordMath.overlaps(record.getAlignmentStart(), record.getAlignmentEnd(), alignmentSpan.getStart(), alignmentSpan.getStart()+ alignmentSpan.getSpan());
+                if (overlaps) matchFound = true;
+            }
+            counter++;
+        }
+        Assert.assertTrue(matchFound);
+        Assert.assertTrue(counter <= CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE);
+    }
+
+    @Test
+    public void testQueryInterval() throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
+        QueryInterval[] query = new QueryInterval[]{new QueryInterval(0, 1519, 1520), new QueryInterval(1, 470535, 470536)};
+        final CloseableIterator<SAMRecord> iterator = reader.query(query, false);
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord r1 = iterator.next();
+        Assert.assertEquals(r1.getReadName(), "3968040");
+
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord r2 = iterator.next();
+        Assert.assertEquals(r2.getReadName(), "140419");
+
+        Assert.assertFalse(iterator.hasNext());
+        iterator.close();
+        reader.close();
+    }
+
+    @Test
+    public void scanAllUnmappedReads() throws IOException {
+        SamReader samReader = SamReaderFactory.makeDefault().open(BAM_FILE);
+        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
+        reader.setValidationStringency(ValidationStringency.SILENT);
+        int counter = 0;
+
+        SAMRecordIterator unmappedSamIterator = samReader.queryUnmapped();
+        CloseableIterator<SAMRecord> unmappedCramIterator = reader.queryUnmapped();
+        while (unmappedSamIterator.hasNext()) {
+            Assert.assertTrue(unmappedCramIterator.hasNext());
+            SAMRecord r1 = unmappedSamIterator.next();
+            SAMRecord r2 = unmappedCramIterator.next();
+            Assert.assertEquals(r1.getReadName(), r2.getReadName());
+            Assert.assertEquals(r1.getBaseQualityString(), r2.getBaseQualityString());
+
+            counter++;
+        }
+        Assert.assertFalse(unmappedCramIterator.hasNext());
+        Assert.assertEquals(counter, nofUnmappedReads);
+
+        reader.close();
+    }
+
+    @BeforeTest
+    public void prepare() throws IOException {
+        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
+        source = new ReferenceSource(new FakeReferenceSequenceFile(SamReaderFactory.makeDefault().getFileHeader(BAM_FILE).getSequenceDictionary().getSequences()));
+        cramBytes = cramFromBAM(BAM_FILE, source);
+        cramFile = File.createTempFile(BAM_FILE.getName(), ".cram") ;
+        cramFile.deleteOnExit();
+        indexFile = new File (cramFile.getAbsolutePath() + ".bai");
+        indexFile.deleteOnExit();
+        FileOutputStream fos = new FileOutputStream(cramFile);
+        fos.write(cramBytes);
+        fos.close();
+
+        CRAMBAIIndexer.createIndex(new SeekableFileStream(cramFile), indexFile, null, ValidationStringency.STRICT);
+        baiBytes = readFile(indexFile);
+    }
+
+    private static byte[] readFile(File file) throws FileNotFoundException {
+        FileInputStream fis = new FileInputStream(file);
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        IOUtil.copyStream(fis, baos);
+        return baos.toByteArray();
+    }
+
+    private byte[] cramFromBAM(File bamFile, ReferenceSource source) throws IOException {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
+        final SAMRecordIterator iterator = reader.iterator();
+        // to reduce granularity let's use this hacky approach:
+        int previousValue = CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE ;
+        CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE = nofReadsPerContainer;
+        try {
+            CRAMFileWriter writer = new CRAMFileWriter(baos, source, reader.getFileHeader(), bamFile.getName());
+            while (iterator.hasNext()) {
+                SAMRecord record = iterator.next();
+                writer.addAlignment(record);
+            }
+            writer.close();
+        }
+        finally {
+            // failing to reset this can cause unrelated tests to fail if this test fails
+            CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE = previousValue;
+        }
+        return baos.toByteArray();
+    }
+
+
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMFileCRAIIndexTest.java b/src/test/java/htsjdk/samtools/CRAMFileCRAIIndexTest.java
new file mode 100644
index 0000000..9084a0f
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMFileCRAIIndexTest.java
@@ -0,0 +1,368 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.build.ContainerParser;
+import htsjdk.samtools.cram.build.CramContainerIterator;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.cram.structure.AlignmentSpan;
+import htsjdk.samtools.cram.structure.Container;
+import htsjdk.samtools.reference.FakeReferenceSequenceFile;
+import htsjdk.samtools.seekablestream.ByteArraySeekableStream;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CoordMath;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Log;
+import org.testng.Assert;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.TreeSet;
+
+/**
+ * Companion tests for the ones in CRAMFileBAIIndexTest, but run against a .bai
+ * that has been converted from a .crai.
+ *
+ * A collection of tests for CRAM CRAI index write/read that use BAMFileIndexTest/index_test.bam
+ * file as the source of the test data. The scan* tests check that for every records in the
+ * CRAM file the query returns the same records from the CRAM file.
+ */
+public class CRAMFileCRAIIndexTest {
+    private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
+
+    private final int nofReads = 10000 ;
+    private final int nofReadsPerContainer = 1000 ;
+    private final int nofUnmappedReads = 279 ;
+    private final int nofMappedReads = 9721;
+
+    private File tmpCramFile;
+    private File tmpCraiFile;
+    private byte[] cramBytes;
+    private byte[] craiBytes;
+    private ReferenceSource source;
+
+    @Test
+    public void testFileFileConstructor () throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(
+                tmpCramFile,
+                tmpCraiFile,
+                source,
+                ValidationStringency.STRICT);
+        CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart("chrM", 1500);
+
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord record = iterator.next();
+        Assert.assertEquals(record.getReferenceName(), "chrM");
+        Assert.assertTrue(record.getAlignmentStart() >= 1500);
+        reader.close();
+    }
+
+    @Test
+    public void testStreamFileConstructor () throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(
+                new SeekableFileStream(tmpCramFile),
+                tmpCraiFile,
+                source,
+                ValidationStringency.STRICT);
+        CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart("chrM", 1500);
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord record = iterator.next();
+
+        Assert.assertEquals(record.getReferenceName(), "chrM");
+        Assert.assertTrue(record.getAlignmentStart() >= 1500);
+        reader.close();
+    }
+
+    @Test
+    public void testStreamStreamConstructor() throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(
+                new SeekableFileStream(tmpCramFile),
+                new SeekableFileStream(tmpCraiFile),
+                source,
+                ValidationStringency.STRICT);
+        CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart("chrM", 1500);
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord record = iterator.next();
+
+        Assert.assertEquals(record.getReferenceName(), "chrM");
+        Assert.assertTrue(record.getAlignmentStart() >= 1500);
+        reader.close();
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void testFileFileConstructorNoIndex () throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(
+                new SeekableFileStream(tmpCramFile),
+                (File) null,
+                source,
+                ValidationStringency.STRICT);
+        try {
+            reader.queryAlignmentStart("chrM", 1500);
+        }
+        finally {
+            reader.close();
+        }
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void testStreamStreamConstructorNoIndex () throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(
+                new SeekableFileStream(tmpCramFile),
+                (SeekableFileStream) null,
+                source,
+                ValidationStringency.STRICT);
+        try {
+            reader.queryAlignmentStart("chrM", 1500);
+        }
+        finally {
+            reader.close();
+        }
+    }
+
+    @Test
+    public void testMappedReads() throws IOException {
+
+        try (SamReader samReader = SamReaderFactory.makeDefault().open(BAM_FILE);
+             SAMRecordIterator samRecordIterator = samReader.iterator())
+        {
+            Assert.assertEquals(samReader.getFileHeader().getSortOrder(), SAMFileHeader.SortOrder.coordinate);
+            CRAMFileReader cramReader = new CRAMFileReader(
+                    new ByteArraySeekableStream(cramBytes),
+                    new ByteArraySeekableStream(craiBytes),
+                    source,
+                    ValidationStringency.STRICT);
+
+            int counter = 0;
+            while (samRecordIterator.hasNext()) {
+                SAMRecord samRecord = samRecordIterator.next();
+                if (samRecord.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
+                    break;
+                }
+                if (counter++ % 100 > 1) { // test only 1st and 2nd in every 100 to speed the test up:
+                    continue;
+                }
+                String sam1 = samRecord.getSAMString();
+
+                CloseableIterator<SAMRecord> iterator = cramReader.queryAlignmentStart(
+                        samRecord.getReferenceName(),
+                        samRecord.getAlignmentStart());
+
+                Assert.assertTrue(iterator.hasNext(), counter + ": " + sam1);
+                SAMRecord cramRecord = iterator.next();
+                String sam2 = cramRecord.getSAMString();
+                Assert.assertEquals(samRecord.getReferenceName(), cramRecord.getReferenceName(), sam1 + sam2);
+
+                // default 'overlap' is true, so test records intersect the query:
+                Assert.assertTrue(CoordMath.overlaps(
+                        cramRecord.getAlignmentStart(),
+                        cramRecord.getAlignmentEnd(),
+                        samRecord.getAlignmentStart(),
+                        samRecord.getAlignmentEnd()),
+                        sam1 + sam2);
+            }
+            Assert.assertEquals(counter, nofMappedReads);
+            cramReader.close();
+        }
+    }
+
+    @Test
+    public void testQueryUnmapped() throws IOException {
+        try (final SamReader samReader = SamReaderFactory.makeDefault().open(BAM_FILE);
+             final SAMRecordIterator unmappedSamIterator = samReader.queryUnmapped())
+        {
+            CRAMFileReader reader = new CRAMFileReader(
+                    new ByteArraySeekableStream(cramBytes),
+                    new ByteArraySeekableStream(craiBytes),
+                    source,
+                    ValidationStringency.STRICT);
+            int counter = 0;
+            CloseableIterator<SAMRecord> unmappedCramIterator = reader.queryUnmapped();
+
+            while (unmappedSamIterator.hasNext()) {
+                Assert.assertTrue(unmappedCramIterator.hasNext());
+                SAMRecord r1 = unmappedSamIterator.next();
+                SAMRecord r2 = unmappedCramIterator.next();
+                Assert.assertEquals(r1.getReadName(), r2.getReadName());
+                Assert.assertEquals(r1.getBaseQualityString(), r2.getBaseQualityString());
+                counter++;
+            }
+
+            Assert.assertFalse(unmappedCramIterator.hasNext());
+            Assert.assertEquals(counter, nofUnmappedReads);
+        }
+    }
+
+    @Test
+    public void testIteratorConstructor() throws IOException {
+        final File CRAMFile = new File("src/test/resources/htsjdk/samtools/cram/auxf#values.3.0.cram");
+        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/auxf.fa");
+        ReferenceSource refSource = new ReferenceSource(refFile);
+
+        long[] boundaries = new long[] {0, (CRAMFile.length() - 1) << 16};
+        final CRAMIterator iterator = new CRAMIterator(
+                new SeekableFileStream(CRAMFile),
+                refSource, boundaries,
+                ValidationStringency.STRICT);
+        long count = getIteratorCount(iterator);
+        Assert.assertEquals(count, 2);
+    }
+
+    @Test
+    public void testNoStringencyIteratorConstructor() throws IOException {
+        final File CRAMFile = new File("src/test/resources/htsjdk/samtools/cram/auxf#values.3.0.cram");
+        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/auxf.fa");
+        ReferenceSource refSource = new ReferenceSource(refFile);
+
+        long[] boundaries = new long[] {0, (CRAMFile.length() - 1) << 16};
+        final CRAMIterator iterator = new CRAMIterator(new SeekableFileStream(CRAMFile), refSource, boundaries);
+
+        long count = getIteratorCount(iterator);
+        Assert.assertEquals(count, 2);
+    }
+
+    @Test
+    public void testIteratorWholeFileSpan() throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(
+                new ByteArraySeekableStream(cramBytes),
+                new ByteArraySeekableStream(craiBytes),
+                source,
+                ValidationStringency.STRICT);
+
+        final SAMFileSpan allContainers = reader.getFilePointerSpanningReads();
+        final CloseableIterator<SAMRecord> iterator = reader.getIterator(allContainers);
+        Assert.assertTrue(iterator.hasNext());
+        long count = getIteratorCount(iterator);
+        Assert.assertEquals(count, nofReads);
+    }
+
+    @Test
+    public void testIteratorSecondContainerSpan() throws IOException, IllegalAccessException {
+        CramContainerIterator it = new CramContainerIterator(new ByteArrayInputStream(cramBytes));
+        it.hasNext();
+        it.next();
+        it.hasNext();
+        Container secondContainer = it.next();
+        Assert.assertNotNull(secondContainer);
+        final Map<Integer, AlignmentSpan> references =
+                new ContainerParser(it.getCramHeader().getSamFileHeader()).getReferences(secondContainer, ValidationStringency.STRICT);
+        it.close();
+
+        int refId = new TreeSet<>(references.keySet()).iterator().next();
+        final AlignmentSpan alignmentSpan = references.get(refId);
+
+        CRAMFileReader reader = new CRAMFileReader(
+                new ByteArraySeekableStream(cramBytes),
+                new ByteArraySeekableStream(craiBytes),
+                source,
+                ValidationStringency.STRICT);
+
+        final BAMIndex index = reader.getIndex();
+        final SAMFileSpan spanOfSecondContainer = index.getSpanOverlapping(refId, alignmentSpan.getStart(), alignmentSpan.getStart()+ alignmentSpan.getSpan());
+        Assert.assertNotNull(spanOfSecondContainer);
+        Assert.assertFalse(spanOfSecondContainer.isEmpty());
+        Assert.assertTrue(spanOfSecondContainer instanceof BAMFileSpan);
+
+        final CloseableIterator<SAMRecord> iterator = reader.getIterator(spanOfSecondContainer);
+        Assert.assertTrue(iterator.hasNext());
+        int counter = 0;
+        boolean matchFound = false;
+        while (iterator.hasNext()) {
+            final SAMRecord record = iterator.next();
+            if (record.getReferenceIndex().intValue() == refId) {
+                boolean overlaps = CoordMath.overlaps(record.getAlignmentStart(), record.getAlignmentEnd(), alignmentSpan.getStart(), alignmentSpan.getStart()+ alignmentSpan.getSpan());
+                if (overlaps) matchFound = true;
+            }
+            counter++;
+        }
+        Assert.assertTrue(matchFound);
+        Assert.assertTrue(counter <= CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE);
+    }
+
+    @Test
+    public void testQueryInterval() throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(
+                new ByteArraySeekableStream(cramBytes),
+                new ByteArraySeekableStream(craiBytes),
+                source,
+                ValidationStringency.STRICT);
+        QueryInterval[] query = new QueryInterval[]{new QueryInterval(0, 1519, 1520), new QueryInterval(1, 470535, 470536)};
+        final CloseableIterator<SAMRecord> iterator = reader.query(query, false);
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord r1 = iterator.next();
+        Assert.assertEquals(r1.getReadName(), "3968040");
+
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord r2 = iterator.next();
+        Assert.assertEquals(r2.getReadName(), "140419");
+
+        Assert.assertFalse(iterator.hasNext());
+        iterator.close();
+        reader.close();
+    }
+
+    @BeforeTest
+    public void prepare() throws IOException {
+        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
+        source = new ReferenceSource(new FakeReferenceSequenceFile(
+                SamReaderFactory.makeDefault().getFileHeader(BAM_FILE).getSequenceDictionary().getSequences()));
+
+        tmpCramFile = File.createTempFile(BAM_FILE.getName(), ".cram") ;
+        tmpCramFile.deleteOnExit();
+        tmpCraiFile = new File (tmpCramFile.getAbsolutePath() + ".crai");
+        tmpCraiFile.deleteOnExit();
+        cramBytes = cramFromBAM(BAM_FILE, source);
+
+        FileOutputStream fos = new FileOutputStream(tmpCramFile);
+        fos.write(cramBytes);
+        fos.close();
+
+        FileOutputStream fios = new FileOutputStream(tmpCraiFile);
+        CRAMCRAIIndexer.writeIndex(new SeekableFileStream(tmpCramFile), fios);
+        craiBytes = readFile(tmpCraiFile);
+    }
+
+    private static byte[] readFile(File file) throws FileNotFoundException {
+        FileInputStream fis = new FileInputStream(file);
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        IOUtil.copyStream(fis, baos);
+        return baos.toByteArray();
+    }
+
+    private byte[] cramFromBAM(File bamFile, ReferenceSource source) throws IOException {
+
+        int previousValue = CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE;
+        CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE = nofReadsPerContainer;
+
+        try (final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
+             final SAMRecordIterator iterator = reader.iterator();
+             final ByteArrayOutputStream baos = new ByteArrayOutputStream())
+        {
+            CRAMFileWriter writer = new CRAMFileWriter(
+                    baos,
+                    source,
+                    reader.getFileHeader(),
+                    bamFile.getName());
+            while (iterator.hasNext()) {
+                SAMRecord record = iterator.next();
+                writer.addAlignment(record);
+            }
+            writer.close();
+            return baos.toByteArray();
+        }
+        finally {
+            // failing to reset this can cause unrelated tests to fail if this test fails
+            CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE = previousValue;
+        }
+    }
+
+    private long getIteratorCount(Iterator<SAMRecord> it) {
+        long count = 0;
+        while (it.hasNext()) {
+            count++;
+            it.next();
+        }
+        return count;
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMFileReaderTest.java b/src/test/java/htsjdk/samtools/CRAMFileReaderTest.java
new file mode 100644
index 0000000..8c610bd
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMFileReaderTest.java
@@ -0,0 +1,163 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2015 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.util.Log;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+/**
+ * Additional tests for CRAMFileReader are in CRAMFileIndexTest
+ */
+public class CRAMFileReaderTest {
+
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    @BeforeClass
+    public void initClass() {
+        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
+    }
+
+    private ReferenceSource createReferenceSource() {
+        byte[] refBases = new byte[10 * 10];
+        Arrays.fill(refBases, (byte) 'A');
+        InMemoryReferenceSequenceFile rsf = new InMemoryReferenceSequenceFile();
+        rsf.add("chr1", refBases);
+        return new ReferenceSource(rsf);
+    }
+
+    // constructor 1: CRAMFileReader(final File cramFile, final InputStream inputStream)
+
+    @Test(description = "Test CRAMReader 1 reference required", expectedExceptions = IllegalStateException.class)
+    public void testCRAMReader1_ReferenceRequired() {
+        File file = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
+        InputStream bis = null;
+        // assumes that reference_fasta property is not set and the download service is not enabled
+        new CRAMFileReader(file, bis);
+    }
+
+    // constructor 2: CRAMFileReader(final File cramFile, final InputStream inputStream, final ReferenceSource referenceSource)
+
+    @Test(description = "Test CRAMReader 2 reference required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader2ReferenceRequired() {
+        File file = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
+        InputStream bis =  null;
+        new CRAMFileReader(file, bis, null);
+    }
+
+    @Test(description = "Test CRAMReader 2 input required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader2_InputRequired() {
+        File file = null;
+        InputStream bis =  null;
+        new CRAMFileReader(file, bis, createReferenceSource());
+    }
+
+    // constructor 3: CRAMFileReader(final File cramFile, final File indexFile, final ReferenceSource referenceSource)
+
+    @Test(description = "Test CRAMReader 3 reference required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader3_RequiredReference() {
+        File inputFile = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
+        File indexFile = null;
+        ReferenceSource refSource = null;
+        new CRAMFileReader(inputFile, indexFile, refSource);
+    }
+
+    @Test(description = "Test CRAMReader 3 input required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader3_InputRequirted() {
+        File inputFile = null;
+        File indexFile = null;
+        ReferenceSource refSource = null;
+        new CRAMFileReader(inputFile, indexFile, refSource);
+    }
+
+    // constructor 4: CRAMFileReader(final File cramFile, final ReferenceSource referenceSource)
+
+    @Test(description = "Test CRAMReader 4 reference required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader4_ReferenceRequired() {
+        File inputFile = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
+        ReferenceSource refSource = null;
+        new CRAMFileReader(inputFile, refSource);
+    }
+
+    @Test(description = "Test CRAMReader 4 input required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader4_InputRequired() {
+        File inputFile = null;
+        new CRAMFileReader(inputFile, createReferenceSource());
+    }
+
+    // constructor 5: CRAMFileReader(final InputStream inputStream, final SeekableStream indexInputStream,
+    //          final ReferenceSource referenceSource, final ValidationStringency validationStringency)
+    @Test(description = "Test CRAMReader 5 reference required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader5_ReferenceRequired() throws IOException {
+        InputStream bis = new ByteArrayInputStream(new byte[0]);
+        SeekableFileStream sfs = null;
+        ReferenceSource refSource = null;
+        new CRAMFileReader(bis, sfs, refSource, ValidationStringency.STRICT);
+    }
+
+    @Test(description = "Test CRAMReader 5 input required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader5_InputRequired() throws IOException {
+        InputStream bis = null;
+        SeekableFileStream sfs = null;
+        new CRAMFileReader(bis, sfs, createReferenceSource(), ValidationStringency.STRICT);
+    }
+
+    // constructor 6: CRAMFileReader(final InputStream stream, final File indexFile, final ReferenceSource referenceSource,
+    //                final ValidationStringency validationStringency)
+    @Test(description = "Test CRAMReader 6 reference required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader6_ReferenceRequired() throws IOException {
+        InputStream bis = new ByteArrayInputStream(new byte[0]);
+        File file = null;
+        ReferenceSource refSource = null;
+        new CRAMFileReader(bis, file, refSource, ValidationStringency.STRICT);
+    }
+
+    @Test(description = "Test CRAMReader 6 input required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader6_InputRequired() throws IOException {
+        InputStream bis = null;
+        File file = null;
+        ReferenceSource refSource = null;
+        new CRAMFileReader(bis, file, createReferenceSource(), ValidationStringency.STRICT);
+    }
+
+    // constructor 7: CRAMFileReader(final File cramFile, final File indexFile, final ReferenceSource referenceSource,
+    //                final ValidationStringency validationStringency)
+    @Test(description = "Test CRAMReader 7 reference required", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMReader7_ReferenceRequired() throws IOException {
+        InputStream bis = new ByteArrayInputStream(new byte[0]);
+        File file = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
+        ReferenceSource refSource = null;
+        new CRAMFileReader(file, file, refSource, ValidationStringency.STRICT);
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMFileWriterTest.java b/src/test/java/htsjdk/samtools/CRAMFileWriterTest.java
new file mode 100644
index 0000000..c495528
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMFileWriterTest.java
@@ -0,0 +1,282 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.Log.LogLevel;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+public class CRAMFileWriterTest {
+
+    @BeforeClass
+    public void initClass() {
+        Log.setGlobalLogLevel(LogLevel.ERROR);
+    }
+
+    @Test(description = "Test for lossy CRAM compression invariants.")
+    public void lossyCramInvariantsTest() {
+        doTest(createRecords(1000));
+    }
+
+    @Test(description = "Tests a writing records with null SAMFileHeaders")
+    public void writeRecordsWithNullHeader() throws Exception {
+
+        final List<SAMRecord> samRecs = createRecords(50);
+        for (SAMRecord rec : samRecs) {
+            rec.setHeader(null);
+        }
+        doTest(samRecs);
+    }
+
+    @Test(description = "Tests a unmapped record with sequence and quality fields")
+    public void unmappedWithSequenceAndQualityField() throws Exception {
+        unmappedSequenceAndQualityFieldHelper(true);
+    }
+
+    @Test(description = "Tests a unmapped record with no sequence or quality fields")
+    public void unmappedWithNoSequenceAndQualityField() throws Exception {
+        unmappedSequenceAndQualityFieldHelper(false);
+    }
+
+    private void unmappedSequenceAndQualityFieldHelper(boolean unmappedHasBasesAndQualities) throws Exception {
+        List<SAMRecord> list = new ArrayList<SAMRecord>(2);
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        if (builder.getHeader().getReadGroups().isEmpty()) {
+            throw new Exception("Read group expected in the header");
+        }
+
+        builder.setUnmappedHasBasesAndQualities(unmappedHasBasesAndQualities);
+
+        builder.addUnmappedFragment("test1");
+        builder.addUnmappedPair("test2");
+
+        list.addAll(builder.getRecords());
+
+        Collections.sort(list, new SAMRecordCoordinateComparator());
+
+        doTest(list);
+    }
+
+    private List<SAMRecord> createRecords(int count) {
+        List<SAMRecord> list = new ArrayList<SAMRecord>(count);
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        if (builder.getHeader().getReadGroups().isEmpty()) {
+            throw new IllegalStateException("Read group expected in the header");
+        }
+
+        int posInRef = 1;
+        for (int i = 0; i < count / 2; i++) {
+            builder.addPair(Integer.toString(i), 0, posInRef += 1,
+                    posInRef += 3);
+        }
+        list.addAll(builder.getRecords());
+
+        Collections.sort(list, new SAMRecordCoordinateComparator());
+
+        return list;
+    }
+
+    private SAMFileHeader createSAMHeader(SAMFileHeader.SortOrder sortOrder) {
+        final SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(sortOrder);
+        header.addSequence(new SAMSequenceRecord("chr1", 123));
+        SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord("1");
+        header.addReadGroup(readGroupRecord);
+        return header;
+    }
+
+    private ReferenceSource createReferenceSource() {
+        byte[] refBases = new byte[1024 * 1024];
+        Arrays.fill(refBases, (byte) 'A');
+        InMemoryReferenceSequenceFile rsf = new InMemoryReferenceSequenceFile();
+        rsf.add("chr1", refBases);
+        return new ReferenceSource(rsf);
+    }
+
+    private void writeRecordsToCRAM(CRAMFileWriter writer, List<SAMRecord> samRecords) {
+        for (SAMRecord record : samRecords) {
+            writer.addAlignment(record);
+        }
+        writer.close();
+    }
+
+    private void validateRecords(final List<SAMRecord> expectedRecords, ByteArrayInputStream is, ReferenceSource referenceSource) {
+        CRAMFileReader cReader = new CRAMFileReader(null, is, referenceSource);
+
+        SAMRecordIterator iterator2 = cReader.getIterator();
+        int index = 0;
+        while (iterator2.hasNext()) {
+            SAMRecord actualRecord = iterator2.next();
+            SAMRecord expectedRecord = expectedRecords.get(index++);
+
+            Assert.assertEquals(actualRecord.getReadName(), expectedRecord.getReadName());
+            Assert.assertEquals(actualRecord.getFlags(), expectedRecord.getFlags());
+            Assert.assertEquals(actualRecord.getAlignmentStart(), expectedRecord.getAlignmentStart());
+            Assert.assertEquals(actualRecord.getAlignmentEnd(), expectedRecord.getAlignmentEnd());
+            Assert.assertEquals(actualRecord.getReferenceName(), expectedRecord.getReferenceName());
+            Assert.assertEquals(actualRecord.getMateAlignmentStart(),
+                    expectedRecord.getMateAlignmentStart());
+            Assert.assertEquals(actualRecord.getMateReferenceName(),
+                    expectedRecord.getMateReferenceName());
+            Assert.assertEquals(actualRecord.getReadBases(), expectedRecord.getReadBases());
+            Assert.assertEquals(actualRecord.getBaseQualities(), expectedRecord.getBaseQualities());
+        }
+        cReader.close();
+    }
+
+    private void doTest(final List<SAMRecord> samRecords) {
+        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
+        final ReferenceSource refSource = createReferenceSource();
+        final ByteArrayOutputStream os = new ByteArrayOutputStream();
+
+        CRAMFileWriter writer = new CRAMFileWriter(os, refSource, header, null);
+        writeRecordsToCRAM(writer, samRecords);
+
+        validateRecords(samRecords, new ByteArrayInputStream(os.toByteArray()), refSource);
+    }
+
+    @Test(description = "Test CRAMWriter constructor with index stream")
+    public void testCRAMWriterWithIndex() {
+        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
+        final ReferenceSource refSource = createReferenceSource();
+        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+        final ByteArrayOutputStream indexStream = new ByteArrayOutputStream();
+
+        final List<SAMRecord> samRecords = createRecords(100);
+        CRAMFileWriter writer = new CRAMFileWriter(outStream, indexStream, refSource, header, null);
+
+        writeRecordsToCRAM(writer, samRecords);
+        validateRecords(samRecords, new ByteArrayInputStream(outStream.toByteArray()), refSource);
+        Assert.assertTrue(indexStream.size() != 0);
+    }
+
+    @Test(description = "Test CRAMWriter constructor with presorted==false")
+    public void testCRAMWriterNotPresorted() {
+        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
+        final ReferenceSource refSource = createReferenceSource();
+        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+        final ByteArrayOutputStream indexStream = new ByteArrayOutputStream();
+
+        CRAMFileWriter writer = new CRAMFileWriter(outStream, indexStream, false, refSource, header, null);
+
+        // force records to not be coordinate sorted to ensure we're relying on presorted=false
+        final List<SAMRecord> samRecords = createRecords(100);
+        Collections.sort(samRecords, new SAMRecordCoordinateComparator().reversed());
+
+        writeRecordsToCRAM(writer, samRecords);
+
+        // for validation, restore the sort order of the expected records so they match the order of the written records
+        Collections.sort(samRecords, new SAMRecordCoordinateComparator());
+        validateRecords(samRecords, new ByteArrayInputStream(outStream.toByteArray()), refSource);
+        Assert.assertTrue(indexStream.size() != 0);
+    }
+
+    @Test(description = "Test CRAMWriter constructor reference required 1", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMWriterConstructorRequiredReference_1() {
+        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
+        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+        new CRAMFileWriter(outStream, null, header, null);
+    }
+
+    @Test(description = "Test CRAMWriter constructor reference required 2", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMWriterConstructorRequiredReference_2() {
+        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
+        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+        new CRAMFileWriter(outStream, null, null, header, null);
+    }
+
+    @Test(description = "Test CRAMWriter constructor reference required 3", expectedExceptions = IllegalArgumentException.class)
+    public void testCRAMWriterConstructorRequiredReference_3() {
+        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
+        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+        new CRAMFileWriter(outStream, null, true, null, header, null);
+    }
+
+    @Test
+    public void test_roundtrip_tlen_preserved() throws IOException {
+        SamReader reader = SamReaderFactory.make().open(new File("src/test/resources/htsjdk/samtools/cram_tlen_reads.sorted.sam"));
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        final ReferenceSource source = new ReferenceSource(new File("src/test/resources/htsjdk/samtools/cram_tlen.fasta"));
+        CRAMFileWriter writer = new CRAMFileWriter(baos, source, reader.getFileHeader(), "test.cram");
+        SAMRecordIterator iterator = reader.iterator();
+        List<SAMRecord> records = new ArrayList<SAMRecord>();
+        while (iterator.hasNext()) {
+            final SAMRecord record = iterator.next();
+            writer.addAlignment(record);
+            records.add(record);
+        }
+        writer.close();
+
+        CRAMFileReader cramReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (File) null, source, ValidationStringency.STRICT);
+        iterator = cramReader.getIterator();
+        int i = 0;
+        while (iterator.hasNext()) {
+            SAMRecord record1 = iterator.next();
+            SAMRecord record2 = records.get(i++);
+            Assert.assertEquals(record1.getInferredInsertSize(), record2.getInferredInsertSize(), record1.getReadName());
+        }
+        Assert.assertEquals(records.size(), i);
+    }
+
+    @Test
+    public void testCRAMQuerySort() throws IOException {
+        final File input = new File("src/test/resources/htsjdk/samtools/cram_query_sorted.cram");
+        final File reference = new File("src/test/resources/htsjdk/samtools/cram_query_sorted.fasta");
+        final File outputFile = File.createTempFile("tmp.", ".cram");
+
+        try (final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(input);
+             final SAMFileWriter writer = new SAMFileWriterFactory().makeWriter(reader.getFileHeader().clone(), false, outputFile, reference)) {
+            for (SAMRecord rec : reader) {
+                writer.addAlignment(rec);
+            }
+        }
+
+        try (final SamReader outReader = SamReaderFactory.makeDefault().referenceSequence(reference).open(outputFile)) {
+            String prevName = null;
+            for (final SAMRecord rec : outReader) {
+                if (prevName == null) {
+                    prevName = rec.getReadName();
+                    continue;
+                }
+                // test if the read names are sorted alphabetically:
+                Assert.assertTrue(rec.getReadName().compareTo(prevName) >= 0);
+            }
+        }
+
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java b/src/test/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java
new file mode 100644
index 0000000..b7e3eab
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java
@@ -0,0 +1,209 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.CRAIIndex;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
+import htsjdk.samtools.seekablestream.ByteArraySeekableStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.Log;
+import org.testng.Assert;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Random;
+
+/**
+ * Created by vadim on 23/03/2015.
+ */
+public class CRAMFileWriterWithIndexTest {
+    private byte[] cramBytes;
+    private byte[] indexBytes;
+    private InMemoryReferenceSequenceFile rsf;
+    private ReferenceSource source;
+    private SAMFileHeader header;
+
+    @Test
+    public void test() throws IOException {
+        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(indexBytes), source, ValidationStringency.SILENT);
+        for (SAMSequenceRecord sequenceRecord : reader.getFileHeader().getSequenceDictionary().getSequences()) {
+            final CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart(sequenceRecord.getSequenceName(), 1);
+            Assert.assertNotNull(iterator);
+            Assert.assertTrue(iterator.hasNext());
+            SAMRecord record = iterator.next();
+            Assert.assertEquals(record.getReferenceName(), sequenceRecord.getSequenceName());
+            Assert.assertEquals(record.getAlignmentStart(), 1);
+        }
+    }
+
+    private static class TabuRegionInputStream extends SeekableStream {
+        private SeekableStream delegate;
+        private List<Chunk> tabuChunks;
+
+        public TabuRegionInputStream(List<Chunk> tabuChunks, SeekableStream delegate) {
+            this.tabuChunks = tabuChunks;
+            this.delegate = delegate;
+        }
+
+        private boolean isTabu(long position) {
+
+            for (Chunk chunk : tabuChunks) {
+                if ((chunk.getChunkStart() >> 16) < position && position < (chunk.getChunkEnd() >> 16)) {
+                    return true;
+                }
+            }
+
+            return false;
+        }
+
+        @Override
+        public long length() {
+            return delegate.length();
+        }
+
+        @Override
+        public long position() throws IOException {
+            return delegate.position();
+        }
+
+        @Override
+        public void seek(long position) throws IOException {
+            if (isTabu(position)) {
+                throw new TabuError();
+            }
+            delegate.seek(position);
+        }
+
+        @Override
+        public int read() throws IOException {
+            if (isTabu(position())) throw new TabuError();
+            return delegate.read();
+        }
+
+        @Override
+        public int read(byte[] buffer, int offset, int length) throws IOException {
+            for (long pos = position(); pos < position() + length; pos++)
+                if (isTabu(pos)) {
+                    throw new TabuError();
+                }
+            return delegate.read(buffer, offset, length);
+        }
+
+        @Override
+        public void close() throws IOException {
+            delegate.close();
+        }
+
+        @Override
+        public boolean eof() throws IOException {
+            return delegate.eof();
+        }
+
+        @Override
+        public String getSource() {
+            return delegate.getSource();
+        }
+    }
+
+    private static class TabuError extends RuntimeException {
+
+    }
+
+    /**
+     * This is to check that the indexing actually works and not just skips records. The approach is to forbid reading of the first
+     * container and try accessing reads from the first and the second containers. The first attempt should fail but the second should succeed.
+     *
+     * @throws IOException
+     */
+    @Test
+    public void testUnnecessaryIO() throws IOException {
+        final SeekableStream baiStream = SamIndexes.asBaiSeekableStreamOrNull(new ByteArraySeekableStream(indexBytes), header.getSequenceDictionary());
+
+        BAMIndex index = new CachingBAMFileIndex(baiStream, header.getSequenceDictionary());
+        int refID = 0;
+        long start = index.getSpanOverlapping(refID, 1, Integer.MAX_VALUE).getFirstOffset();
+        long end = index.getSpanOverlapping(refID + 1, 1, Integer.MAX_VALUE).getFirstOffset();
+        TabuRegionInputStream tabuIS = new TabuRegionInputStream(Arrays.asList(new Chunk[]{new Chunk(start, end)}), new ByteArraySeekableStream(cramBytes));
+
+        CRAMFileReader reader = new CRAMFileReader(tabuIS, new ByteArraySeekableStream(indexBytes), source, ValidationStringency.SILENT);
+        try {
+            // the attempt to read 1st container, which will happen when the iterator is initialized, must throw
+            CloseableIterator<SAMRecord> it = reader.queryAlignmentStart(header.getSequence(refID).getSequenceName(), 1);
+            Assert.fail();
+        } catch (TabuError e) {
+
+        }
+
+        // reading after the 1st container should be ok:
+        refID = 2;
+        final CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart(header.getSequence(refID).getSequenceName(), 1);
+        Assert.assertNotNull(iterator);
+        Assert.assertTrue(iterator.hasNext());
+    }
+
+    @BeforeTest
+    public void beforeTest() throws Exception {
+        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
+
+        header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord("1");
+
+        rsf = new InMemoryReferenceSequenceFile();
+        int nofSequencesInDictionary = 3;
+        int sequenceLength = 1024 * 1024;
+        for (int i = 0; i < nofSequencesInDictionary; i++)
+            addRandomSequence(header, sequenceLength, rsf);
+
+        source = new ReferenceSource(rsf);
+
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder(false, SAMFileHeader.SortOrder.coordinate);
+        builder.setHeader(header);
+        builder.setReadGroup(readGroupRecord);
+        header.addReadGroup(readGroupRecord);
+
+        ByteArrayOutputStream os = new ByteArrayOutputStream();
+        ByteArrayOutputStream indexOS = new ByteArrayOutputStream();
+        CRAMFileWriter writer = new CRAMFileWriter(os, indexOS, source, header, null);
+
+        int readPairsPerSequence = CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE;
+
+        for (SAMSequenceRecord sequenceRecord : header.getSequenceDictionary().getSequences()) {
+            int alignmentStart = 1;
+            for (int i = 0; i < readPairsPerSequence / 2; i++) {
+                builder.addPair(Integer.toString(i), sequenceRecord.getSequenceIndex(), alignmentStart, alignmentStart + 2);
+                alignmentStart++;
+            }
+        }
+
+        List<SAMRecord> list = new ArrayList<SAMRecord>(readPairsPerSequence);
+        list.addAll(builder.getRecords());
+        Collections.sort(list, new SAMRecordCoordinateComparator());
+
+        for (SAMRecord record : list)
+            writer.addAlignment(record);
+
+        list.clear();
+        writer.close();
+        cramBytes = os.toByteArray();
+        indexBytes = indexOS.toByteArray();
+    }
+
+    private static void addRandomSequence(SAMFileHeader header, int length, InMemoryReferenceSequenceFile rsf) {
+        String name = String.valueOf(header.getSequenceDictionary().size() + 1);
+        header.addSequence(new SAMSequenceRecord(name, length));
+        byte[] refBases = new byte[length];
+        Random random = new Random();
+        byte[] alphabet = "ACGTN".getBytes();
+        for (int i = 0; i < refBases.length; i++)
+            refBases[i] = alphabet[random.nextInt(alphabet.length)];
+
+        rsf.add(name, refBases);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMIndexQueryTest.java b/src/test/java/htsjdk/samtools/CRAMIndexQueryTest.java
new file mode 100644
index 0000000..df94310
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMIndexQueryTest.java
@@ -0,0 +1,708 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.util.CloseableIterator;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.StandardCopyOption;
+import java.util.function.Function;
+
+/**
+ * Test index query calls against a *SamReader* on a CRAM File, which will use
+ * whatever index format (.bai or .crai converted to .bai) is available for the
+ * target file.
+ */
+public class CRAMIndexQueryTest {
+
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/cram");
+
+    private static final File cramQueryWithBAI = new File(TEST_DATA_DIR, "cramQueryWithBAI.cram");
+    private static final File cramQueryWithCRAI = new File(TEST_DATA_DIR, "cramQueryWithCRAI.cram");
+    private static File cramQueryWithLocalCRAI = null; // generated  by @BeforeClass from cramQueryWithCRAI
+    private static final File cramQueryReference = new File(TEST_DATA_DIR, "human_g1k_v37.20.21.10M-10M200k.fasta");
+
+    private static final File cramQueryReadsWithBAI = new File(TEST_DATA_DIR, "cramQueryTest.cram");
+    private static File cramQueryReadsWithLocalCRAI = null; // generated  by @BeforeClass from cramQueryReadsWithBAI
+
+    private static final File cramQueryTestEmptyWithBAI = new File(TEST_DATA_DIR, "cramQueryTestEmpty.cram");
+    private static File cramQueryTestEmptyWithLocalCRAI = null; // generated  by @BeforeClass from cramQueryTestEmptyWithBAI
+    private static final File cramQueryReadsReference = new File(TEST_DATA_DIR,"../hg19mini.fasta");
+
+
+    // htsjdk currently generates .bai index files instead of .crai due to https://github.com/samtools/htsjdk/issues/531;
+    // however we still want to test the htsjdk CRAI creation code. In order to run these index query tests against CRAI
+    // files generated by htsjdk, we copy the checked-in test files into new CRAMs  and then manually generate .CRAI files
+    // to run to use as additional test cases
+    @BeforeClass
+    public void createLocallyGeneratedCRAIFiles() throws IOException {
+        cramQueryWithLocalCRAI = File.createTempFile("cramQueryWithLocalCRAI.", ".cram");
+        cramQueryWithLocalCRAI.deleteOnExit();
+        File tempCRAIOut = new File(cramQueryWithLocalCRAI.getAbsolutePath() + ".crai");
+        tempCRAIOut.deleteOnExit();
+        createLocalCRAMAndCRAI(
+                cramQueryWithCRAI,
+                cramQueryReference,
+                cramQueryWithLocalCRAI,
+                tempCRAIOut
+        );
+
+        cramQueryReadsWithLocalCRAI = File.createTempFile("cramQueryReadsWithLocalCRAI.", ".cram");
+        tempCRAIOut = new File(cramQueryReadsWithLocalCRAI.getAbsolutePath() + ".crai");
+        tempCRAIOut.deleteOnExit();
+        cramQueryReadsWithLocalCRAI.deleteOnExit();
+        createLocalCRAMAndCRAI(
+                cramQueryReadsWithBAI,
+                cramQueryReadsReference,
+                cramQueryReadsWithLocalCRAI,
+                tempCRAIOut
+        );
+
+        cramQueryTestEmptyWithLocalCRAI = File.createTempFile("cramQueryTestEmptyWithLocalCRAI.", ".cram");
+        tempCRAIOut = new File(cramQueryTestEmptyWithLocalCRAI.getAbsolutePath() +".crai");
+        tempCRAIOut.deleteOnExit();
+        cramQueryTestEmptyWithLocalCRAI.deleteOnExit();
+        createLocalCRAMAndCRAI(
+                cramQueryTestEmptyWithBAI,
+                cramQueryReference,
+                cramQueryTestEmptyWithLocalCRAI,
+                tempCRAIOut
+        );
+    }
+
+    private void createLocalCRAMAndCRAI(
+            final File inputCRAM,
+            final File reference,
+            final File outputCRAM,
+            final File outputCRAI) throws IOException
+    {
+        Files.copy(inputCRAM.toPath(), outputCRAM.toPath(), StandardCopyOption.REPLACE_EXISTING);
+
+        SAMFileHeader samHeader = null;
+        try (SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(inputCRAM)) {
+            samHeader = reader.getFileHeader();
+        }
+        try (FileOutputStream bos = new FileOutputStream(outputCRAI)) {
+            CRAMCRAIIndexer craiIndexer = new CRAMCRAIIndexer(bos, samHeader);
+            craiIndexer.writeIndex(new SeekableFileStream(outputCRAM), bos);
+        }
+    }
+
+    @DataProvider(name = "singleIntervalOverlapping")
+    public Object[][] singleIntervalOverlapping() {
+        return new Object[][] {
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 100009, 100009), new String[]{"a", "b", "c"}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 100009, 100009), new String[]{"a", "b", "c"}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 100009, 100009), new String[]{"a", "b", "c"}},
+
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 100009, 100011), new String[]{"a", "b", "c", "d", "e"}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 100009, 100011), new String[]{"a", "b", "c", "d", "e"}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 100009, 100011), new String[]{"a", "b", "c", "d", "e"}},
+
+            // interval with 1 start
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 1, 100010), new String[]{"a", "b", "c", "d"}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 1, 100010), new String[]{"a", "b", "c", "d"}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 1, 100010), new String[]{"a", "b", "c", "d"}},
+
+            // interval with 0 end
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 100015, 0), new String[]{"a", "b", "c", "d", "e", "f"}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 100015, 0), new String[]{"a", "b", "c", "d", "e", "f"}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 100015, 0), new String[]{"a", "b", "c", "d", "e", "f"}},
+
+            // interval with 1 start and 0 end
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 1, 0), new String[]{"a", "b", "c", "d", "e", "f",  "f"}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 1, 0), new String[]{"a", "b", "c", "d", "e", "f",  "f"}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 1, 0), new String[]{"a", "b", "c", "d", "e", "f",  "f"}},
+
+            //totally empty cram file
+            {cramQueryTestEmptyWithBAI, cramQueryReadsReference, new QueryInterval(0, 1, 0), new String[]{}},
+            {cramQueryTestEmptyWithLocalCRAI, cramQueryReadsReference, new QueryInterval(0, 1, 0), new String[]{}},
+        };
+    }
+
+    @Test(dataProvider="singleIntervalOverlapping")
+    public void testQueryOverlappingSingleInterval(
+        final File cramFileName,
+        final File referenceFileName,
+        final QueryInterval interval,
+        final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+                reader -> reader.queryOverlapping(new QueryInterval[]{interval}),
+                cramFileName,
+                referenceFileName,
+                expectedNames
+        );
+    }
+
+    @Test(dataProvider="singleIntervalOverlapping")
+    public void testQueryOverlappingSequence(
+        final File cramFileName,
+        final File referenceFileName,
+        final QueryInterval interval,
+        final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+            reader -> reader.queryOverlapping(
+                reader.getFileHeader().getSequence(interval.referenceIndex).getSequenceName(),
+                interval.start,
+                interval.end
+            ),
+            cramFileName,
+            referenceFileName,
+            expectedNames
+        );
+    }
+
+    @Test(dataProvider="singleIntervalOverlapping")
+    public void testQuerySingleIntervalContainedFalse(
+        final File cramFileName,
+        final File referenceFileName,
+        final QueryInterval interval,
+        final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+            reader -> reader.query(new QueryInterval[]{interval}, false),
+            cramFileName,
+            referenceFileName,
+            expectedNames
+        );
+    }
+
+    @Test(dataProvider="singleIntervalOverlapping")
+    public void testQuerySequenceContainedFalse(
+        final File cramFileName,
+        final File referenceFileName,
+        final QueryInterval interval,
+        final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+            reader -> reader.query(
+                reader.getFileHeader().getSequence(interval.referenceIndex).getSequenceName(),
+                interval.start,
+                interval.end,
+                false
+            ),
+            cramFileName,
+            referenceFileName,
+            expectedNames
+        );
+    }
+
+    @DataProvider(name = "singleIntervalContained")
+    public Object[][] singleIntervalContained() {
+        return new Object[][] {
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 100013, 100070), new String[]{"f", "f",}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 100013, 100070), new String[]{"f", "f",}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 100013, 100070), new String[]{"f", "f"}},
+
+            // interval with 1 start
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 1, 100100), new String[]{"e", "f", "f"}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 1, 100100), new String[]{"e", "f", "f"}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 1, 100100), new String[]{"e", "f", "f"}},
+
+            // interval with 0 end
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 100010, 0), new String[]{"d", "e", "f", "f",}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 100010, 0), new String[]{"d", "e", "f", "f",}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 100010, 0), new String[]{"d", "e", "f", "f",}},
+
+            // interval with 1 start and 0 end
+            {cramQueryWithCRAI, cramQueryReference, new QueryInterval(0, 1, 0), new String[]{"a", "b", "c", "d", "e", "f",  "f"}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new QueryInterval(0, 1, 0), new String[]{"a", "b", "c", "d", "e", "f",  "f"}},
+            {cramQueryWithBAI, cramQueryReference, new QueryInterval(0, 1, 0), new String[]{"a", "b", "c", "d", "e", "f",  "f"}},
+
+            //totally empty cram file
+            {cramQueryTestEmptyWithBAI, cramQueryReadsReference, new QueryInterval(0, 1, 0), new String[]{}},
+            {cramQueryTestEmptyWithLocalCRAI, cramQueryReadsReference, new QueryInterval(0, 1, 0), new String[]{}},
+        };
+    }
+
+    @Test(dataProvider="singleIntervalContained")
+    public void testQueryContainedSingleInterval(
+        final File cramFileName,
+        final File referenceFileName,
+        final QueryInterval interval,
+        final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+            reader -> reader.queryContained(new QueryInterval[]{interval}),
+            cramFileName,
+            referenceFileName,
+            expectedNames
+        );
+    }
+
+    @Test(dataProvider="singleIntervalContained")
+    public void testQueryContainedSequence(
+        final File cramFileName,
+        final File referenceFileName,
+        final QueryInterval interval,
+        final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+            reader -> reader.queryContained(
+                reader.getFileHeader().getSequence(interval.referenceIndex).getSequenceName(),
+                interval.start,
+                interval.end
+            ),
+            cramFileName,
+            referenceFileName,
+            expectedNames
+        );
+    }
+
+    @Test(dataProvider="singleIntervalContained")
+    public void testQuerySingleIntervalContainedTrue(
+            final File cramFileName,
+            final File referenceFileName,
+            final QueryInterval interval,
+            final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+            reader -> reader.query(new QueryInterval[]{interval}, true),
+            cramFileName,
+            referenceFileName,
+            expectedNames
+        );
+    }
+
+    @Test(dataProvider="singleIntervalContained")
+    public void testQuerySequenceContainedTrue(
+            final File cramFileName,
+            final File referenceFileName,
+            final QueryInterval interval,
+            final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+            reader -> reader.query(
+                reader.getFileHeader().getSequence(interval.referenceIndex).getSequenceName(),
+                interval.start,
+                interval.end,
+                true
+            ),
+            cramFileName,
+            referenceFileName,
+            expectedNames
+        );
+    }
+
+    @DataProvider(name = "multipleIntervalOverlapping")
+    public Object[][] multipleIntervalOverlapping() {
+        return new Object[][]{
+            {cramQueryWithCRAI, cramQueryReference,
+                    new QueryInterval[]{new QueryInterval(0, 100010, 100010), new QueryInterval(0, 100011, 100011)},
+                    new String[]{"a", "b", "c", "d", "e"}},
+            {cramQueryWithLocalCRAI, cramQueryReference,
+                    new QueryInterval[]{new QueryInterval(0, 100010, 100010), new QueryInterval(0, 100011, 100011)},
+                    new String[]{"a", "b", "c", "d", "e"}},
+            {cramQueryWithBAI, cramQueryReference,
+                    new QueryInterval[]{new QueryInterval(0, 100010, 100010), new QueryInterval(0, 100011, 100011)},
+                    new String[]{"a", "b", "c", "d", "e"}},
+            // no matching reads
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 300, 310), new QueryInterval(1, 300, 310)},
+                    new String[]{}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 300, 310), new QueryInterval(1, 300, 310)},
+                    new String[]{}},
+            // matching reads from first interval only
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 205, 206), new QueryInterval(3, 300, 301)},
+                    new String[]{"a", "b"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 205, 206), new QueryInterval(3, 300, 301)},
+                    new String[]{"a", "b"}},
+            // matching reads from last interval only
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(3, 700, 701)},
+                    new String[]{"k"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(3, 700, 701)},
+                    new String[]{"k"}},
+            //matching reads from each interval
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 205, 206), new QueryInterval(3, 700, 701)},
+                    new String[]{"a", "b", "k"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 205, 206), new QueryInterval(3, 700, 701)},
+                    new String[]{"a", "b", "k"}},
+            //matching reads from each interval - 4 intervals
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{
+                            new QueryInterval(0, 200, 201), new QueryInterval(1, 500, 501),
+                            new QueryInterval(2, 300, 301), new QueryInterval(3, 700, 701)},
+                    new String[]{"a", "f", "i", "k"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{
+                            new QueryInterval(0, 200, 201), new QueryInterval(1, 500, 501),
+                            new QueryInterval(2, 300, 301), new QueryInterval(3, 700, 701)},
+                    new String[]{"a", "f", "i", "k"}},
+            // first read is before the first interval
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(2, 300, 301), new QueryInterval(3, 700, 701)},
+                    new String[]{"i", "k"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(2, 300, 301), new QueryInterval(3, 700, 701)},
+                    new String[]{"i", "k"}},
+            // first interval is before the first read
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(0, 200, 201)},
+                    new String[]{"a"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(0, 200, 201)},
+                    new String[]{"a"}},
+            // intervals in reverse order
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(0, 200, 201)},
+                    new String[]{"a"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(0, 200, 201)},
+                    new String[]{"a"}},
+        };
+    }
+
+    @Test(dataProvider="multipleIntervalOverlapping")
+    public void testQueryOverlappingMultipleIntervals(
+            final File cramFileName,
+            final File referenceFileName,
+            final QueryInterval[] intervals,
+            final String[] expectedNames) throws IOException
+    {
+        QueryInterval[] optimizedIntervals = QueryInterval.optimizeIntervals(intervals);
+        Assert.assertTrue(optimizedIntervals.length > 1);
+
+        doQueryTest(
+                reader -> reader.queryOverlapping(optimizedIntervals),
+                cramFileName,
+                referenceFileName,
+                expectedNames
+        );
+    }
+
+    @DataProvider(name = "otherMultipleIntervals")
+    public Object[][] otherMultipleIntervals() {
+        return new Object[][]{
+            // accept an empty QueryIntervalArray
+            {cramQueryWithBAI, cramQueryReference,
+                    new QueryInterval[]{},
+                    new String[]{}},
+            // intervals overlapping - optimized to a single interval
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 1000, 1030), new QueryInterval(0, 1020, 1076)},
+                    new String[]{"d"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 1000, 1030), new QueryInterval(0, 1020, 1076)},
+                    new String[]{"d"}}
+        };
+    }
+
+    // these are tested separately because we want the normal multi-interval test to
+    // assert that the interval list size is > 1 post-optimization to ensure we're
+    // using more than one interval; these tests optimize down to 0 or 1 interval
+    @Test(dataProvider="otherMultipleIntervals")
+    public void testOtherMultipleIntervals(
+            final File cramFileName,
+            final File referenceFileName,
+            final QueryInterval[] intervals,
+            final String[] expectedNames) throws IOException
+    {
+        QueryInterval[] optimizedIntervals = QueryInterval.optimizeIntervals(intervals);
+        doQueryTest(
+                reader -> reader.queryContained(optimizedIntervals),
+                cramFileName,
+                referenceFileName,
+                expectedNames
+        );
+        doQueryTest(
+                reader -> reader.queryOverlapping(optimizedIntervals),
+                cramFileName,
+                referenceFileName,
+                expectedNames
+        );
+    }
+
+    @DataProvider(name = "multipleIntervalContained")
+    public Object[][] multipleIntervalContained() {
+        return new Object[][]{
+            {cramQueryWithCRAI, cramQueryReference,
+                    new QueryInterval[]{new QueryInterval(0, 100008, 100008), new QueryInterval(0, 100013, 0)},
+                    new String[]{"f", "f"}},
+            {cramQueryWithLocalCRAI, cramQueryReference,
+                    new QueryInterval[]{new QueryInterval(0, 100008, 100008), new QueryInterval(0, 100013, 0)},
+                    new String[]{"f", "f"}},
+            {cramQueryWithBAI, cramQueryReference,
+                    new QueryInterval[]{new QueryInterval(0, 100008, 100008), new QueryInterval(0, 100013, 0)},
+                    new String[]{"f", "f"}},
+            // no matching reads
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 300, 310), new QueryInterval(1, 300, 310)},
+                    new String[]{}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 300, 310), new QueryInterval(1, 300, 310)},
+                    new String[]{}},
+            // matching reads from first interval only
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 205, 305), new QueryInterval(3, 300, 301)},
+                    new String[]{"b", "c"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 205, 305), new QueryInterval(3, 300, 301)},
+                    new String[]{"b", "c"}},
+            // matching reads from last interval only
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(3, 700, 776)},
+                    new String[]{"k"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(3, 700, 776)},
+                    new String[]{"k"}},
+            //matching reads from each interval
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 200, 281), new QueryInterval(3, 700, 776)},
+                    new String[]{"a", "b", "k"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 200, 281), new QueryInterval(3, 700, 776)},
+                    new String[]{"a", "b", "k"}},
+            //matching reads from each interval - 4 intervals
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{
+                            new QueryInterval(0, 200, 281), new QueryInterval(1, 500, 576),
+                            new QueryInterval(2, 300, 376), new QueryInterval(3, 700, 776)},
+                    new String[]{"a", "b", "f", "i", "k"}},
+                {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{
+                            new QueryInterval(0, 200, 281), new QueryInterval(1, 500, 576),
+                            new QueryInterval(2, 300, 376), new QueryInterval(3, 700, 776)},
+                    new String[]{"a", "b", "f", "i", "k"}},
+            // first read is before the first interval
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(2, 300, 301), new QueryInterval(3, 700, 776)},
+                    new String[]{"k"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(2, 300, 301), new QueryInterval(3, 700, 776)},
+                    new String[]{"k"}},
+            // first interval is before the first read
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(0, 200, 276)},
+                    new String[]{"a"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 100, 101), new QueryInterval(0, 200, 276)},
+                    new String[]{"a"}},
+            // intervals in reverse order
+            {cramQueryReadsWithBAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 200, 276), new QueryInterval(0, 100, 101)},
+                    new String[]{"a"}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference,
+                    new QueryInterval[]{new QueryInterval(0, 200, 276), new QueryInterval(0, 100, 101)},
+                    new String[]{"a"}},
+        };
+    }
+
+    @Test(dataProvider="multipleIntervalContained")
+    public void testQueryContainedMultipleIntervals(
+            final File cramFileName,
+            final File referenceFileName,
+            final QueryInterval[] intervals,
+            final String[] expectedNames) throws IOException
+    {
+        QueryInterval[] optimizedIntervals = QueryInterval.optimizeIntervals(intervals);
+        Assert.assertTrue(optimizedIntervals.length > 1);
+        doQueryTest(
+                reader -> reader.queryContained(optimizedIntervals),
+                cramFileName,
+                referenceFileName,
+                expectedNames
+        );
+    }
+
+    @DataProvider(name = "unmappedQueries")
+    public Object[][] unmappedQueries() {
+        return new Object[][] {
+            {cramQueryWithCRAI, cramQueryReference, new String[]{"g", "h", "h", "i", "i"}},
+            {cramQueryWithLocalCRAI, cramQueryReference, new String[]{"g", "h", "h", "i", "i"}},
+            {cramQueryWithBAI, cramQueryReference, new String[]{"g", "h", "h", "i", "i"}},
+            //no unmapped reads
+            {cramQueryReadsWithBAI, cramQueryReadsReference, new String[]{}},
+            {cramQueryReadsWithLocalCRAI, cramQueryReadsReference, new String[]{}}
+        };
+    }
+
+    @Test(dataProvider="unmappedQueries")
+    public void testQueryUnmapped(
+            final File cramFileName,
+            final File referenceFileName,
+            final String[] expectedNames) throws IOException
+    {
+        doQueryTest(
+                reader -> reader.queryUnmapped(),
+                cramFileName,
+                referenceFileName,
+                expectedNames
+        );
+    }
+
+    @DataProvider(name = "mateQueries")
+    public Object[][] mateQueries() {
+        return new Object[][] {
+                {cramQueryWithCRAI, cramQueryReference, "f"},
+                {cramQueryWithLocalCRAI, cramQueryReference, "f"},
+                {cramQueryWithBAI, cramQueryReference, "f"}
+        };
+    }
+
+    @Test(dataProvider="mateQueries")
+    public void testQueryMate(
+        final File cramFileName,
+        final File referenceFileName,
+        final String expectedName) throws IOException
+    {
+        SamReaderFactory factory = SamReaderFactory.makeDefault();
+        if (referenceFileName != null) {
+            factory = factory.referenceSequence(referenceFileName);
+        }
+        SAMRecord firstRecord = null;
+        SAMRecord secondRecord = null;
+        try (final SamReader reader = factory.open(cramFileName)) {
+            final CloseableIterator<SAMRecord> it = reader.queryAlignmentStart("20", 100013);
+            Assert.assertTrue(it.hasNext());
+            firstRecord = it.next();
+            Assert.assertTrue(it.hasNext());
+            secondRecord = it.next();
+            Assert.assertFalse(it.hasNext());
+        }
+
+        // get the mate for the first record
+        try (final SamReader reader = factory.open(cramFileName)) {
+            final SAMRecord samRecord = reader.queryMate(firstRecord);
+            Assert.assertEquals(samRecord, secondRecord);
+        }
+
+        // now query the mate's mate to ensure we get symmetric results
+        try (final SamReader reader = factory.open(cramFileName)) {
+            final SAMRecord samRecord = reader.queryMate(secondRecord);
+            Assert.assertEquals(samRecord, firstRecord);
+        }
+    }
+
+    private void doQueryTest(
+        final Function<SamReader, CloseableIterator <SAMRecord>> getIterator,
+        final File cramFileName,
+        final File referenceFileName,
+        final String[] expectedNames) throws IOException
+    {
+        SamReaderFactory factory = SamReaderFactory.makeDefault();
+        if (referenceFileName != null) {
+            factory = factory.referenceSequence(referenceFileName);
+        }
+        try (final SamReader reader = factory.open(cramFileName);
+             final CloseableIterator<SAMRecord> it = getIterator.apply(reader)) {
+            int count = 0;
+            while (it.hasNext()) {
+                SAMRecord samRec = it.next();
+                Assert.assertTrue(count < expectedNames.length);
+                Assert.assertEquals(samRec.getReadName(), expectedNames[count]);
+                count++;
+            }
+            Assert.assertEquals(count, expectedNames.length);
+        }
+    }
+
+
+    @DataProvider(name = "iteratorStateTests")
+    public Object[][] iteratorStateQueries() {
+        return new Object[][] {
+                {cramQueryWithCRAI, cramQueryReference},
+                {cramQueryWithLocalCRAI, cramQueryReference},
+                {cramQueryWithBAI, cramQueryReference}
+        };
+    }
+
+    // The current CRAMFileReader implementation allows multiple iterators to exist on a
+    // CRAM reader at the same time, but they're not properly isolated from each other. When
+    // CRAMFileReader is changed to support the SamReader contract of one-iterator-at-a-time
+    // (https://github.com/samtools/htsjdk/issues/563), these can be re-enabled.
+    //
+    @Test(dataProvider="iteratorStateTests", expectedExceptions=SAMException.class, enabled=false)
+    public void testIteratorState(
+            final File cramFileName,
+            final File referenceFileName,
+            final int expectedCount) throws IOException
+    {
+        SamReaderFactory factory = SamReaderFactory.makeDefault();
+        if (referenceFileName != null) {
+            factory = factory.referenceSequence(referenceFileName);
+        }
+
+        try (final SamReader reader = factory.open(cramFileName)) {
+            final CloseableIterator<SAMRecord> origIt = reader.iterator();
+
+            // opening the second iterator should throw
+            final CloseableIterator<SAMRecord> overlapIt = reader.queryOverlapping("20", 100013, 100070);
+        }
+    }
+
+    @DataProvider(name = "unmappedSliceTest")
+    public Object[][] unmappedMultiSliceTest() {
+        return new Object[][] {
+            // the main test feature of these files is that they have several mapped reads followed by
+            // some number of unmapped reads, each created with seqs_per_slice = 100 to force the unmapped
+            // reads to be distributed over multiple slices (at least for large numbers of unmapped reads)
+            // tests the fix to https://github.com/samtools/htsjdk/issues/562
+            {new File(TEST_DATA_DIR, "NA12878.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram"),
+                    new File(TEST_DATA_DIR, "human_g1k_v37.20.21.1-100.fasta"), 0},
+            {new File(TEST_DATA_DIR, "NA12878.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram"),
+                    new File(TEST_DATA_DIR, "human_g1k_v37.20.21.1-100.fasta"), 1},
+            {new File(TEST_DATA_DIR, "NA12878.20.21.1-100.100-SeqsPerSlice.500-unMapped.cram"),
+                    new File(TEST_DATA_DIR, "human_g1k_v37.20.21.1-100.fasta"), 500},
+        };
+    }
+
+    @Test(dataProvider = "unmappedSliceTest")
+    private void testUnmappedMultiSlice(
+            final File cramFileName,
+            final File referenceFileName,
+            final int expectedCount) throws IOException
+    {
+        SamReaderFactory factory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT);
+        factory = factory.referenceSequence(referenceFileName);
+
+        int count = 0;
+        try (final SamReader reader = factory.open(cramFileName);
+             final CloseableIterator<SAMRecord> it = reader.queryUnmapped())
+        {
+            while (it.hasNext()) {
+                it.next();
+                count++;
+            }
+        }
+        Assert.assertEquals(count, expectedCount);
+    }
+
+}
diff --git a/src/tests/java/htsjdk/samtools/ChunkTest.java b/src/test/java/htsjdk/samtools/ChunkTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/ChunkTest.java
rename to src/test/java/htsjdk/samtools/ChunkTest.java
diff --git a/src/tests/java/htsjdk/samtools/CigarCodecTest.java b/src/test/java/htsjdk/samtools/CigarCodecTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/CigarCodecTest.java
rename to src/test/java/htsjdk/samtools/CigarCodecTest.java
diff --git a/src/tests/java/htsjdk/samtools/CigarTest.java b/src/test/java/htsjdk/samtools/CigarTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/CigarTest.java
rename to src/test/java/htsjdk/samtools/CigarTest.java
diff --git a/src/test/java/htsjdk/samtools/DownsamplingIteratorTests.java b/src/test/java/htsjdk/samtools/DownsamplingIteratorTests.java
new file mode 100644
index 0000000..e84ee2e
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/DownsamplingIteratorTests.java
@@ -0,0 +1,87 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.DownsamplingIteratorFactory.Strategy;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Random;
+
+/**
+ * Tests for the downsampling iterator class.
+ * @author Tim Fennell
+ */
+public class DownsamplingIteratorTests {
+    final int NUM_TEMPLATES = 50000;
+    final EnumMap<Strategy, Double> ACCURACY = new EnumMap<Strategy,Double>(Strategy.class){{
+        put(Strategy.HighAccuracy, 0.001);
+        put(Strategy.Chained, 0.005);
+        put(Strategy.ConstantMemory, 0.01);
+    }};
+
+    private static Random getRandom(){
+        //this test is probably too strict in it's tolerances
+        //not every random seed works, 10000 for example is rejected
+        return new Random(10001);
+    }
+
+    @Test
+    public void testBasicFunction() {
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        final Random r = getRandom();
+        for (int i=0; i<NUM_TEMPLATES; ++i) {
+            builder.addPair("pair" + r.nextInt(), r.nextInt(24), r.nextInt(1000000), r.nextInt(1000000));
+        }
+        final Collection<SAMRecord> recs = builder.getRecords();
+
+        runTests("testBasicFunction", recs);
+    }
+
+    private void runTests(final String name, final Collection<SAMRecord> recs) {
+        for (final DownsamplingIteratorFactory.Strategy strategy : DownsamplingIteratorFactory.Strategy.values()) {
+            final double accuracy = ACCURACY.get(strategy);
+
+            for (final double p : new double[]{0, 0.01, 0.1, 0.5, 0.9, 1}) {
+                final DownsamplingIterator iterator = DownsamplingIteratorFactory.make(recs.iterator(), strategy, p, accuracy, 42);
+                final List<SAMRecord> out = new ArrayList<SAMRecord>();
+                while (iterator.hasNext()) out.add(iterator.next());
+
+                final String testcase = name + ": strategy=" + strategy.name() + ", p=" + p + ", accuracy=" + accuracy;
+
+                final double readFraction = iterator.getAcceptedFraction();
+                Assert.assertEquals(out.size(), iterator.getAcceptedCount(), "Mismatched sizes with " + testcase);
+                Assert.assertTrue(readFraction > p - accuracy && readFraction < p + accuracy, "Read fraction " + readFraction + " out of bounds in " + testcase);
+            }
+        }
+    }
+
+    @Test
+    public void testMixOfPairsAndFrags() {
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        final Random r = getRandom();
+        for (int i=0; i<NUM_TEMPLATES; ++i) {
+            builder.addFrag("frag" + r.nextInt(), r.nextInt(24), r.nextInt(1000000), false);
+            builder.addPair("pair" + r.nextInt(), r.nextInt(24), r.nextInt(1000000), r.nextInt(1000000));
+        }
+
+        final Collection<SAMRecord> recs = builder.getRecords();
+        runTests("testMixOfPairsAndFrags", recs);
+    }
+
+    @Test
+    public void testSecondaryAlignments() {
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        final Random r = getRandom();
+        for (int i=0; i<NUM_TEMPLATES; ++i) {
+            final int x = r.nextInt();
+            builder.addPair("pair" + x, r.nextInt(24), r.nextInt(1000000), r.nextInt(1000000));
+            builder.addPair("pair" + x, r.nextInt(24), r.nextInt(24), r.nextInt(1000000), r.nextInt(1000000), false, false, "50M", "50M", false, true, true, true, 20);
+        }
+
+        final Collection<SAMRecord> recs = builder.getRecords();
+        runTests("testSecondaryAlignments", recs);
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/DuplicateSetIteratorTest.java b/src/test/java/htsjdk/samtools/DuplicateSetIteratorTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/DuplicateSetIteratorTest.java
rename to src/test/java/htsjdk/samtools/DuplicateSetIteratorTest.java
diff --git a/src/tests/java/htsjdk/samtools/GenomicIndexUtilTest.java b/src/test/java/htsjdk/samtools/GenomicIndexUtilTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/GenomicIndexUtilTest.java
rename to src/test/java/htsjdk/samtools/GenomicIndexUtilTest.java
diff --git a/src/tests/java/htsjdk/samtools/MergingSamRecordIteratorGroupCollisionTest.java b/src/test/java/htsjdk/samtools/MergingSamRecordIteratorGroupCollisionTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/MergingSamRecordIteratorGroupCollisionTest.java
rename to src/test/java/htsjdk/samtools/MergingSamRecordIteratorGroupCollisionTest.java
diff --git a/src/tests/java/htsjdk/samtools/MergingSamRecordIteratorTest.java b/src/test/java/htsjdk/samtools/MergingSamRecordIteratorTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/MergingSamRecordIteratorTest.java
rename to src/test/java/htsjdk/samtools/MergingSamRecordIteratorTest.java
diff --git a/src/tests/java/htsjdk/samtools/ProgramRecordChainingTest.java b/src/test/java/htsjdk/samtools/ProgramRecordChainingTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/ProgramRecordChainingTest.java
rename to src/test/java/htsjdk/samtools/ProgramRecordChainingTest.java
diff --git a/src/tests/java/htsjdk/samtools/SAMBinaryTagAndValueUnitTest.java b/src/test/java/htsjdk/samtools/SAMBinaryTagAndValueUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SAMBinaryTagAndValueUnitTest.java
rename to src/test/java/htsjdk/samtools/SAMBinaryTagAndValueUnitTest.java
diff --git a/src/tests/java/htsjdk/samtools/SAMCloneTest.java b/src/test/java/htsjdk/samtools/SAMCloneTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SAMCloneTest.java
rename to src/test/java/htsjdk/samtools/SAMCloneTest.java
diff --git a/src/test/java/htsjdk/samtools/SAMFileReaderTest.java b/src/test/java/htsjdk/samtools/SAMFileReaderTest.java
new file mode 100644
index 0000000..b291011
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMFileReaderTest.java
@@ -0,0 +1,170 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.CRAMException;
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.CloserUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+public class SAMFileReaderTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    @Test(dataProvider = "variousFormatReaderTestCases")
+    public void variousFormatReaderTest(final String inputFile) {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        final SamReader reader = SamReaderFactory.makeDefault().open(input);
+        for (final SAMRecord rec : reader) {
+        }
+        CloserUtil.close(reader);
+    }
+
+    @DataProvider(name = "variousFormatReaderTestCases")
+    public Object[][] variousFormatReaderTestCases() {
+        final Object[][] scenarios = new Object[][]{
+                {"block_compressed.sam.gz"},
+                {"uncompressed.sam"},
+                {"compressed.sam.gz"},
+                {"compressed.bam"},
+        };
+        return scenarios;
+    }
+
+    // tests for CRAM indexing
+
+    @Test(dataProvider = "SmallCRAMTest")
+    public void CRAMIndexTest(final String inputFile, final String referenceFile, QueryInterval queryInterval, String expectedReadName) {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        final File reference = new File(TEST_DATA_DIR, referenceFile);
+        final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(input);
+        Assert.assertTrue(reader.hasIndex());
+
+        final CloseableIterator<SAMRecord> iterator = reader.query(new QueryInterval[]{queryInterval}, false);
+        Assert.assertTrue(iterator.hasNext());
+        SAMRecord r1 = iterator.next();
+        Assert.assertEquals(r1.getReadName(), expectedReadName);
+
+        CloserUtil.close(reader);
+    }
+
+    @DataProvider(name = "SmallCRAMTest")
+    public Object[][] CRAMIndexTestData() {
+        final Object[][] testFiles = new Object[][]{
+                {"cram/test.cram", "cram/auxf.fa", new QueryInterval(0, 12, 13), "Jim"},
+                {"cram_with_bai_index.cram", "hg19mini.fasta", new QueryInterval(3, 700, 0), "k"},
+                {"cram_with_crai_index.cram", "hg19mini.fasta", new QueryInterval(2, 350, 0), "i"},
+        };
+        return testFiles;
+    }
+
+    @Test(dataProvider = "NoIndexCRAMTest")
+    public void CRAMNoIndexTest(final String inputFile, final String referenceFile) {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        final File reference = new File(TEST_DATA_DIR, referenceFile);
+        final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(input);
+        Assert.assertFalse(reader.hasIndex());
+        CloserUtil.close(reader);
+    }
+
+    @DataProvider(name = "NoIndexCRAMTest")
+    public Object[][] CRAMNoIndexTestData() {
+        final Object[][] testFiles = new Object[][]{
+                {"cram/test2.cram", "cram/auxf.fa"},
+        };
+        return testFiles;
+    }
+
+    // Tests for the SAMRecordFactory usage
+    class SAMRecordFactoryTester extends DefaultSAMRecordFactory {
+        int samRecordsCreated;
+        int bamRecordsCreated;
+
+        public SAMRecord createSAMRecord(final SAMFileHeader header) {
+            ++samRecordsCreated;
+            return super.createSAMRecord(header);
+        }
+
+        public BAMRecord createBAMRecord(final SAMFileHeader header, final int referenceSequenceIndex, final int alignmentStart, final short readNameLength, final short mappingQuality, final int indexingBin, final int cigarLen, final int flags, final int readLen, final int mateReferenceSequenceIndex, final int mateAlignmentStart, final int insertSize, final byte[] variableLengthBlock) {
+            ++bamRecordsCreated;
+            return super.createBAMRecord(header, referenceSequenceIndex, alignmentStart, readNameLength, mappingQuality, indexingBin, cigarLen, flags, readLen, mateReferenceSequenceIndex, mateAlignmentStart, insertSize, variableLengthBlock);
+        }
+    }
+
+    @Test(dataProvider = "variousFormatReaderTestCases")
+    public void samRecordFactoryTest(final String inputFile) {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        final SAMRecordFactoryTester factory = new SAMRecordFactoryTester();
+        final SamReader reader = SamReaderFactory.makeDefault().samRecordFactory(factory).open(input);
+
+        int i = 0;
+        for (final SAMRecord rec : reader) {
+            ++i;
+        }
+        CloserUtil.close(reader);
+
+        Assert.assertTrue(i > 0);
+        if (inputFile.endsWith(".sam") || inputFile.endsWith(".sam.gz")) Assert.assertEquals(factory.samRecordsCreated, i);
+        else if (inputFile.endsWith(".bam")) Assert.assertEquals(factory.bamRecordsCreated, i);
+    }
+
+    @Test(dataProvider = "cramTestCases", expectedExceptions=IllegalStateException.class)
+    public void testReferenceRequiredForCRAM(final String inputFile, final String ignoredReferenceFile) {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        final SamReader reader = SamReaderFactory.makeDefault().open(input);
+        for (final SAMRecord rec : reader) {
+        }
+        CloserUtil.close(reader);
+    }
+
+    @DataProvider(name = "cramTestCases")
+    public Object[][] cramTestPositiveCases() {
+        final Object[][] scenarios = new Object[][]{
+                {"cram_with_bai_index.cram", "hg19mini.fasta"},
+                {"cram_with_crai_index.cram", "hg19mini.fasta"},
+        };
+        return scenarios;
+    }
+
+    @Test(dataProvider = "cramTestCases")
+    public void testIterateCRAMWithIndex(final String inputFile, final String referenceFile) {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        final File reference = new File(TEST_DATA_DIR, referenceFile);
+        final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(input);
+        for (final SAMRecord rec : reader) {
+        }
+        CloserUtil.close(reader);
+    }
+
+    @Test
+    public void samRecordFactoryNullHeaderTest() {
+        final SAMRecordFactory factory = new DefaultSAMRecordFactory();
+        final SAMRecord samRec = factory.createSAMRecord(null);
+        Assert.assertTrue(samRec.getHeader() == null);
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/SAMFileWriterFactoryTest.java b/src/test/java/htsjdk/samtools/SAMFileWriterFactoryTest.java
new file mode 100644
index 0000000..dc7a6f3
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMFileWriterFactoryTest.java
@@ -0,0 +1,307 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.build.CramIO;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.util.IOUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.*;
+
+public class SAMFileWriterFactoryTest {
+
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    /** PIC-442Confirm that writing to a special file does not cause exception when writing additional files. */
+    @Test(groups={"unix"})
+    public void specialFileWriterTest() {
+        createSmallBam(new File("/dev/null"));
+    }
+
+    @Test()
+    public void ordinaryFileWriterTest() throws Exception {
+        final File outputFile = File.createTempFile("tmp.", BamFileIoUtils.BAM_FILE_EXTENSION);
+        outputFile.delete();
+        outputFile.deleteOnExit();
+        createSmallBam(outputFile);
+        final File indexFile = SamFiles.findIndex(outputFile);
+        indexFile.deleteOnExit();
+        final File md5File = new File(outputFile.getParent(), outputFile.getName() + ".md5");
+        md5File.deleteOnExit();
+        Assert.assertTrue(outputFile.length() > 0);
+        Assert.assertTrue(indexFile.length() > 0);
+        Assert.assertTrue(md5File.length() > 0);
+    }
+
+    @Test(description="create a BAM in memory,  should start with GZipInputStream.GZIP_MAGIC")
+    public void inMemoryBam()  throws Exception  {
+    	ByteArrayOutputStream os=new ByteArrayOutputStream();
+    	createSmallBamToOutputStream(os,true);
+    	os.flush();
+    	os.close();
+    	byte blob[]=os.toByteArray();
+        Assert.assertTrue(blob.length > 2);
+        int head = ((int) blob[0] & 0xff) | ((blob[1] << 8 ) & 0xff00 );
+        Assert.assertTrue(java.util.zip.GZIPInputStream.GZIP_MAGIC == head);
+    }
+
+    @Test(description="create a SAM in memory,  should start with '@HD'")
+    public void inMemorySam()  throws Exception  {
+    	ByteArrayOutputStream os=new ByteArrayOutputStream();
+    	createSmallBamToOutputStream(os,false);
+    	os.flush();
+    	os.close();
+    	String sam=new String(os.toByteArray());
+        Assert.assertFalse(sam.isEmpty());
+        Assert.assertTrue(sam.startsWith("@HD\t"),"SAM: bad prefix");
+    }
+
+    @Test(description="Read and then write SAM to verify header attribute ordering does not change depending on JVM version")
+    public void samRoundTrip()  throws Exception  {
+        final File input = new File(TEST_DATA_DIR, "roundtrip.sam");
+
+        final SamReader reader = SamReaderFactory.makeDefault().open(input);
+        final File outputFile = File.createTempFile("roundtrip-out", ".sam");
+        outputFile.delete();
+        outputFile.deleteOnExit();
+        FileOutputStream os = new FileOutputStream(outputFile);
+        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        final SAMFileWriter writer = factory.makeSAMWriter(reader.getFileHeader(), false, os);
+        for (SAMRecord rec : reader) {
+            writer.addAlignment(rec);
+        }
+        writer.close();
+        os.close();
+
+        InputStream is = new FileInputStream(input);
+        String originalsam = IOUtil.readFully(is);
+        is.close();
+
+        is = new FileInputStream(outputFile);
+        String writtensam = IOUtil.readFully(is);
+        is.close();
+
+        Assert.assertEquals(writtensam, originalsam);
+    }
+
+    @Test(description="Write SAM records with null SAMFileHeader")
+    public void samNullHeaderRoundTrip()  throws Exception  {
+        final File input = new File(TEST_DATA_DIR, "roundtrip.sam");
+
+        final SamReader reader = SamReaderFactory.makeDefault().open(input);
+        final File outputFile = File.createTempFile("nullheader-out", ".sam");
+        outputFile.delete();
+        outputFile.deleteOnExit();
+        FileOutputStream os = new FileOutputStream(outputFile);
+        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        final SAMFileWriter writer = factory.makeSAMWriter(reader.getFileHeader(), false, os);
+        for (SAMRecord rec : reader) {
+            rec.setHeader(null);
+            writer.addAlignment(rec);
+        }
+        writer.close();
+        os.close();
+
+        InputStream is = new FileInputStream(input);
+        String originalsam = IOUtil.readFully(is);
+        is.close();
+
+        is = new FileInputStream(outputFile);
+        String writtensam = IOUtil.readFully(is);
+        is.close();
+
+        Assert.assertEquals(writtensam, originalsam);
+    }
+
+    private void createSmallBam(final File outputFile) {
+        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        factory.setCreateIndex(true);
+        factory.setCreateMd5File(true);
+        final SAMFileHeader header = new SAMFileHeader();
+        // index only created if coordinate sorted
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        header.addSequence(new SAMSequenceRecord("chr1", 123));
+        final SAMFileWriter writer = factory.makeBAMWriter(header, false, outputFile);
+        fillSmallBam(writer);
+        writer.close();
+    }
+
+
+   private void createSmallBamToOutputStream(final OutputStream outputStream,boolean binary) {
+        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        factory.setCreateIndex(false);
+        factory.setCreateMd5File(false);
+        final SAMFileHeader header = new SAMFileHeader();
+        // index only created if coordinate sorted
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        header.addSequence(new SAMSequenceRecord("chr1", 123));
+        final SAMFileWriter writer = (binary?
+        			factory.makeBAMWriter(header, false, outputStream):
+        			factory.makeSAMWriter(header, false, outputStream)
+        			);
+        fillSmallBam(writer);
+        writer.close();
+    }
+   
+   private int fillSmallBam(SAMFileWriter writer) {
+       final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+       builder.addUnmappedFragment("HiMom!");
+       int numRecs = builder.getRecords().size();
+       for (final SAMRecord rec: builder.getRecords()) {
+           writer.addAlignment(rec);
+       }
+       return numRecs;
+    }
+
+    private File prepareOutputFile(String extension) throws IOException {
+        final File outputFile = File.createTempFile("tmp.", extension);
+        outputFile.delete();
+        outputFile.deleteOnExit();
+        return outputFile;
+    }
+
+    //  Create a writer factory that creates and index and md5 file and set the header to coord sorted
+    private SAMFileWriterFactory createWriterFactoryWithOptions(SAMFileHeader header) {
+        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        factory.setCreateIndex(true);
+        factory.setCreateMd5File(true);
+        // index only created if coordinate sorted
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        header.addSequence(new SAMSequenceRecord("chr1", 123));
+        header.addReadGroup(new SAMReadGroupRecord("1"));
+        return factory;
+    }
+
+    private void verifyWriterOutput(File outputFile, ReferenceSource refSource, int nRecs, boolean verifySupplementalFiles) {
+        if (verifySupplementalFiles) {
+            final File indexFile = SamFiles.findIndex(outputFile);
+            indexFile.deleteOnExit();
+            final File md5File = new File(outputFile.getParent(), outputFile.getName() + ".md5");
+            md5File.deleteOnExit();
+            Assert.assertTrue(indexFile.length() > 0);
+            Assert.assertTrue(md5File.length() > 0);
+        }
+
+        SamReaderFactory factory =  SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT);
+        if (refSource != null) {
+            factory.referenceSource(refSource);
+        }
+        SamReader reader = factory.open(outputFile);
+        SAMRecordIterator it = reader.iterator();
+        int count = 0;
+        for (; it.hasNext(); it.next()) {
+            count++;
+        }
+
+        Assert.assertTrue(count == nRecs);
+    }
+
+    @DataProvider(name="bamOrCramWriter")
+    public Object[][] bamOrCramWriter() {
+        return new Object[][] {
+                { BamFileIoUtils.BAM_FILE_EXTENSION, },
+                { CramIO.CRAM_FILE_EXTENSION }
+        };
+    }
+
+    @Test(dataProvider="bamOrCramWriter")
+    public void testMakeWriter(String extension) throws Exception {
+        final File outputFile = prepareOutputFile(extension);
+        final SAMFileHeader header = new SAMFileHeader();
+        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
+        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
+
+        final SAMFileWriter samWriter = factory.makeWriter(header, false, outputFile, referenceFile);
+        int nRecs = fillSmallBam(samWriter);
+        samWriter.close();
+
+        verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
+    }
+
+    @Test
+    public void testMakeCRAMWriterWithOptions() throws Exception {
+        final File outputFile = prepareOutputFile(CramIO.CRAM_FILE_EXTENSION);
+        final SAMFileHeader header = new SAMFileHeader();
+        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
+        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
+
+        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, false, outputFile, referenceFile);
+        int nRecs = fillSmallBam(samWriter);
+        samWriter.close();
+
+        verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
+    }
+
+    @Test
+    public void testMakeCRAMWriterIgnoresOptions() throws Exception {
+        final File outputFile = prepareOutputFile(CramIO.CRAM_FILE_EXTENSION);
+        final SAMFileHeader header = new SAMFileHeader();
+        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
+        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
+
+        // Note: does not honor factory settings for CREATE_MD5 or CREATE_INDEX.
+        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, new FileOutputStream(outputFile), referenceFile);
+        int nRecs = fillSmallBam(samWriter);
+        samWriter.close();
+
+        verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, false);
+    }
+
+    @Test
+    public void testMakeCRAMWriterPresortedDefault() throws Exception {
+        final File outputFile = prepareOutputFile(CramIO.CRAM_FILE_EXTENSION);
+        final SAMFileHeader header = new SAMFileHeader();
+        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
+        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
+
+        // Defaults to preSorted==true
+        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, outputFile, referenceFile);
+        int nRecs = fillSmallBam(samWriter);
+        samWriter.close();
+
+        verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
+    }
+
+    @Test
+    public void testAsync() throws IOException {
+        final SAMFileWriterFactory builder = new SAMFileWriterFactory();
+
+        final File outputFile = prepareOutputFile(BamFileIoUtils.BAM_FILE_EXTENSION);
+        final SAMFileHeader header = new SAMFileHeader();
+        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
+        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
+
+        SAMFileWriter writer = builder.makeWriter(header, false, outputFile, referenceFile);
+        Assert.assertEquals(writer instanceof AsyncSAMFileWriter, Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS, "testAsync default");
+
+        writer = builder.setUseAsyncIo(true).makeWriter(header, false, outputFile, referenceFile);
+        Assert.assertTrue(writer instanceof AsyncSAMFileWriter, "testAsync option=set");
+
+        writer = builder.setUseAsyncIo(false).makeWriter(header, false, outputFile, referenceFile);
+        Assert.assertFalse(writer instanceof AsyncSAMFileWriter, "testAsync option=unset");
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/SAMFlagTest.java b/src/test/java/htsjdk/samtools/SAMFlagTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SAMFlagTest.java
rename to src/test/java/htsjdk/samtools/SAMFlagTest.java
diff --git a/src/test/java/htsjdk/samtools/SAMIntegerTagTest.java b/src/test/java/htsjdk/samtools/SAMIntegerTagTest.java
new file mode 100644
index 0000000..2d78a78
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMIntegerTagTest.java
@@ -0,0 +1,329 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.util.BinaryCodec;
+import htsjdk.samtools.util.CloserUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Confirm that integer tag types are stored and retrieved properly.
+ *
+ * @author alecw at broadinstitute.org
+ */
+public class SAMIntegerTagTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/SAMIntegerTagTest");
+
+    private static final String BYTE_TAG = "BY";
+    private static final String SHORT_TAG = "SH";
+    private static final String INTEGER_TAG = "IN";
+    private static final String UNSIGNED_INTEGER_TAG = "UI";
+    private static final String STRING_TAG = "ST";
+
+    private static final long TOO_LARGE_UNSIGNED_INT_VALUE = BinaryCodec.MAX_UINT + 1L;
+
+    enum FORMAT {SAM, BAM, CRAM}
+
+    @Test
+    public void testBAM() throws Exception {
+        final SAMRecord rec = writeAndReadSamRecord("bam");
+        Assert.assertTrue(rec.getAttribute(BYTE_TAG) instanceof Integer);
+        Assert.assertEquals(((Number) rec.getAttribute(BYTE_TAG)).intValue(), 1);
+        Assert.assertTrue(rec.getAttribute(SHORT_TAG) instanceof Integer);
+        Assert.assertEquals(((Number) rec.getAttribute(SHORT_TAG)).intValue(), 1);
+        Assert.assertTrue(rec.getAttribute(INTEGER_TAG) instanceof Integer);
+        Assert.assertEquals(((Number) rec.getAttribute(INTEGER_TAG)).intValue(), 1);
+    }
+
+    @Test
+    public void testSAM() throws Exception {
+        final SAMRecord rec = writeAndReadSamRecord("sam");
+        Assert.assertTrue(rec.getAttribute(BYTE_TAG) instanceof Integer);
+        Assert.assertEquals(((Number) rec.getAttribute(BYTE_TAG)).intValue(), 1);
+        Assert.assertTrue(rec.getAttribute(SHORT_TAG) instanceof Integer);
+        Assert.assertEquals(((Number) rec.getAttribute(SHORT_TAG)).intValue(), 1);
+        Assert.assertTrue(rec.getAttribute(INTEGER_TAG) instanceof Integer);
+        Assert.assertEquals(((Number) rec.getAttribute(INTEGER_TAG)).intValue(), 1);
+    }
+
+    @Test
+    public void testUnsignedIntegerSAM() throws Exception {
+        final SAMRecord rec = createSamRecord();
+        final long val = 1l + Integer.MAX_VALUE;
+        rec.setAttribute(UNSIGNED_INTEGER_TAG, val);
+        final Object roundTripValue = rec.getAttribute(UNSIGNED_INTEGER_TAG);
+        Assert.assertTrue(roundTripValue instanceof Long);
+        Assert.assertEquals(((Long)roundTripValue).longValue(), val);
+    }
+
+    @Test
+    public void testGetTypedAttributeMethods() throws Exception {
+        final SAMRecord rec = writeAndReadSamRecord("bam");
+        Assert.assertEquals(rec.getByteAttribute(INTEGER_TAG).intValue(), 1);
+        Assert.assertEquals(rec.getShortAttribute(INTEGER_TAG).intValue(), 1);
+        Assert.assertEquals(rec.getIntegerAttribute(INTEGER_TAG).intValue(), 1);
+    }
+
+    /**
+     * Should be an exception if a typed attribute call is made for the wrong type.
+     */
+    @Test(expectedExceptions = RuntimeException.class)
+    public void testGetTypedAttributeForWrongType() throws Exception {
+        final SAMRecord rec = createSamRecord();
+        rec.setAttribute(STRING_TAG, "Hello, World!");
+        writeAndReadSamRecord("bam", rec);
+        rec.getIntegerAttribute(STRING_TAG);
+        Assert.fail("Exception should have been thrown.");
+    }
+
+    /**
+     * Should be an exception if a typed attribute call is made for a value that cannot
+     * be coerced into the correct type.
+     * This test is a little lame because a RuntimeException could be thrown for some other reason.
+     */
+    @Test(expectedExceptions = RuntimeException.class)
+    public void testGetTypedAttributeOverflow() throws Exception {
+        final SAMRecord rec = createSamRecord();
+        rec.setAttribute(INTEGER_TAG, Integer.MAX_VALUE);
+        writeAndReadSamRecord("bam", rec);
+        rec.getShortAttribute(INTEGER_TAG);
+        Assert.fail("Exception should have been thrown.");
+    }
+
+    /**
+     * Should be an exception if a typed attribute call is made for a value that cannot
+     * be coerced into the correct type.
+     * This test is a little lame because a RuntimeException could be thrown for some other reason.
+     */
+    @Test(expectedExceptions = RuntimeException.class)
+    public void testGetTypedAttributeUnerflow() throws Exception {
+        final SAMRecord rec = createSamRecord();
+        rec.setAttribute(INTEGER_TAG, Integer.MIN_VALUE);
+        writeAndReadSamRecord("bam", rec);
+        rec.getShortAttribute(INTEGER_TAG);
+        Assert.fail("Exception should have been thrown.");
+    }
+
+    /**
+     * Create a SAMRecord with integer tags of various sizes, write to a file, and read it back.
+     *
+     * @param format "sam" or "bam".
+     * @return The record after having being read from file.
+     */
+    private SAMRecord writeAndReadSamRecord(final String format) throws IOException {
+        SAMRecord rec = createSamRecord();
+        rec.setAttribute(BYTE_TAG, (byte) 1);
+        rec.setAttribute(SHORT_TAG, (short) 1);
+        rec.setAttribute(INTEGER_TAG, 1);
+        rec = writeAndReadSamRecord(format, rec);
+        return rec;
+    }
+
+    /**
+     * Write a SAMRecord to a SAM file in the given format, and read it back.
+     *
+     * @param format "sam" or "bam".
+     * @param rec    The record to write.
+     * @return The same record, after having being written and read back.
+     */
+    private SAMRecord writeAndReadSamRecord(final String format, SAMRecord rec) throws IOException {
+        final File bamFile = File.createTempFile("htsjdk-writeAndReadSamRecord.", "." + format);
+        final SAMFileWriter bamWriter = new SAMFileWriterFactory().makeSAMOrBAMWriter(rec.getHeader(), false, bamFile);
+        bamWriter.addAlignment(rec);
+        bamWriter.close();
+        final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
+        rec = reader.iterator().next();
+        reader.close();
+        bamFile.delete();
+        return rec;
+    }
+
+    private SAMRecord createSamRecord() {
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder(false, SAMFileHeader.SortOrder.unsorted);
+        builder.addFrag("readA", 20, 140, false);
+        return builder.iterator().next();
+    }
+
+    private static SamInputResource createSamForIntAttr(long value) {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        PrintStream ps = new PrintStream(baos);
+        ps.println("@HD\tVN:1.0");
+        ps.print("1\t4\t*\t0\t0\t*\t*\t0\t0\tA\t<\tUI:i:");
+        ps.println(value);
+        ps.close();
+
+        return new SamInputResource(new InputStreamInputResource(new ByteArrayInputStream(baos.toByteArray())));
+    }
+
+    @Test
+    public void testGoodSamStrict() throws IOException {
+        final SamReaderFactory factory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.STRICT);
+
+        Assert.assertEquals(0, ((Number) factory.open(createSamForIntAttr(0)).iterator().next().getAttribute("UI")).intValue());
+        Assert.assertEquals(-1, ((Number) factory.open(createSamForIntAttr(-1)).iterator().next().getAttribute("UI")).intValue());
+        Assert.assertEquals(Integer.MIN_VALUE, ((Number) factory.open(createSamForIntAttr(Integer.MIN_VALUE)).iterator().next().getAttribute("UI")).intValue());
+        Assert.assertEquals(Integer.MAX_VALUE, ((Number) factory.open(createSamForIntAttr(Integer.MAX_VALUE)).iterator().next().getAttribute("UI")).intValue());
+        Assert.assertEquals(1L + (long) Integer.MAX_VALUE, ((Number) factory.open(createSamForIntAttr(1L + (long) Integer.MAX_VALUE)).iterator().next().getAttribute("UI")).longValue());
+        Assert.assertEquals(BinaryCodec.MAX_UINT, ((Number) factory.open(createSamForIntAttr(BinaryCodec.MAX_UINT)).iterator().next().getAttribute("UI")).longValue());
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void testBadSamStrict() throws IOException {
+        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.STRICT).open(createSamForIntAttr(BinaryCodec.MAX_UINT + 1L));
+        reader.iterator().next();
+    }
+
+    @Test
+    public void testBadSamSilent() throws IOException {
+        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(createSamForIntAttr(BinaryCodec.MAX_UINT + 1L));
+        reader.iterator().next();
+    }
+
+    @DataProvider(name = "legalIntegerAttributesFiles")
+    public Object[][] getLegalIntegerAttributesFiles() {
+        return new Object[][] {
+                { new File(TEST_DATA_DIR, "variousAttributes.sam") },
+                { new File(TEST_DATA_DIR, "variousAttributes.bam") }
+        };
+    }
+
+    @Test(dataProvider = "legalIntegerAttributesFiles")
+    public void testLegalIntegerAttributesFilesStrict( final File inputFile ) {
+        final SamReader reader = SamReaderFactory.makeDefault()
+                .enable(SamReaderFactory.Option.EAGERLY_DECODE)
+                .validationStringency(ValidationStringency.STRICT)
+                .open(inputFile);
+
+        final SAMRecord rec = reader.iterator().next();
+        final Map<String, Number> expectedTags = new HashMap<String, Number>();
+        expectedTags.put("SB", -128);
+        expectedTags.put("UB", 129);
+        expectedTags.put("SS", 32767);
+        expectedTags.put("US", 65535);
+        expectedTags.put("SI", 2147483647);
+        expectedTags.put("I2", -2147483647);
+        expectedTags.put("UI", 4294967295L);
+        for (final Map.Entry<String, Number> entry : expectedTags.entrySet()) {
+            final Object value = rec.getAttribute(entry.getKey());
+            Assert.assertTrue(((Number) value).longValue() == entry.getValue().longValue());
+        }
+        CloserUtil.close(reader);
+    }
+
+    @DataProvider(name = "valid_set")
+    public static Object[][] valid_set() {
+        List<Object[]> params = new ArrayList<Object[]>();
+        for (FORMAT format:FORMAT.values()) {
+            for (ValidationStringency stringency:ValidationStringency.values()) {
+                params.add(new Object[]{0, format, stringency});
+                params.add(new Object[]{1, format, stringency});
+                params.add(new Object[]{-1, format, stringency});
+                params.add(new Object[]{Integer.MIN_VALUE, format, stringency});
+                params.add(new Object[]{Integer.MAX_VALUE, format, stringency});
+
+                params.add(new Object[]{1L, format, stringency});
+                params.add(new Object[]{-1L, format, stringency});
+                params.add(new Object[]{(long)Integer.MAX_VALUE+1L, format, stringency});
+                params.add(new Object[]{BinaryCodec.MAX_UINT, format, stringency});
+            }
+        }
+
+        return params.toArray(new Object[3][params.size()]);
+    }
+
+    @DataProvider(name = "invalid_set")
+    public static Object[][] invalid_set() {
+        List<Object[]> params = new ArrayList<Object[]>();
+        for (FORMAT format:FORMAT.values()) {
+            for (ValidationStringency stringency:ValidationStringency.values()) {
+                params.add(new Object[]{(long)Integer.MIN_VALUE -1L, format, stringency});
+                params.add(new Object[]{TOO_LARGE_UNSIGNED_INT_VALUE, format, stringency});
+            }
+        }
+
+        return params.toArray(new Object[3][params.size()]);
+    }
+
+    @Test(dataProvider = "valid_set")
+    public void testValidIntegerAttributeRoundtrip(final long value, final FORMAT format, ValidationStringency validationStringency) throws IOException {
+        testRoundtripIntegerAttribute(value, format, validationStringency);
+    }
+
+    @Test(dataProvider = "invalid_set", expectedExceptions = RuntimeException.class)
+    public void testInvalidIntegerAttributeRoundtrip(final long value, final FORMAT format, ValidationStringency validationStringency) throws IOException {
+        testRoundtripIntegerAttribute(value, format, validationStringency);
+    }
+
+    private void testRoundtripIntegerAttribute(final Number value, final FORMAT format, ValidationStringency validationStringency) throws IOException {
+        final SAMFileHeader header = new SAMFileHeader();
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+        final SAMFileWriter w;
+        switch (format) {
+            case SAM:
+                w = new SAMFileWriterFactory().makeSAMWriter(header, false, baos);
+                break;
+            case BAM:
+                w = new SAMFileWriterFactory().makeBAMWriter(header, false, baos);
+                break;
+            case CRAM:
+                w = new SAMFileWriterFactory().makeCRAMWriter(header, baos, null);
+                break;
+            default:
+                throw new RuntimeException("Unknown format: " + format);
+        }
+
+        final SAMRecord record = new SAMRecord(header);
+        record.setAttribute("UI", value);
+        record.setReadName("1");
+        record.setReadUnmappedFlag(true);
+        record.setReadBases("A".getBytes());
+        record.setBaseQualityString("!");
+        Assert.assertEquals(value, record.getAttribute("UI"));
+
+        w.addAlignment(record);
+        w.close();
+
+        final SamReader reader = SamReaderFactory.make().validationStringency(validationStringency).referenceSource(new ReferenceSource((File)null)).
+                open(SamInputResource.of(new ByteArrayInputStream(baos.toByteArray())));
+        final SAMRecordIterator iterator = reader.iterator();
+        Assert.assertTrue(iterator.hasNext());
+        final SAMRecord record2 = iterator.next();
+        final Number returnedValue = (Number) record2.getAttribute("UI");
+        Assert.assertEquals(value.longValue(), returnedValue.longValue());
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/SAMRecordDuplicateComparatorTest.java b/src/test/java/htsjdk/samtools/SAMRecordDuplicateComparatorTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SAMRecordDuplicateComparatorTest.java
rename to src/test/java/htsjdk/samtools/SAMRecordDuplicateComparatorTest.java
diff --git a/src/test/java/htsjdk/samtools/SAMRecordUnitTest.java b/src/test/java/htsjdk/samtools/SAMRecordUnitTest.java
new file mode 100644
index 0000000..a8f06e4
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMRecordUnitTest.java
@@ -0,0 +1,963 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2014 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.BinaryCodec;
+import htsjdk.samtools.util.TestUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.util.Arrays;
+import java.util.List;
+
+public class SAMRecordUnitTest {
+
+    @DataProvider(name = "serializationTestData")
+    public Object[][] getSerializationTestData() {
+        return new Object[][] {
+                { new File("src/test/resources/htsjdk/samtools/serialization_test.sam") },
+                { new File("src/test/resources/htsjdk/samtools/serialization_test.bam") }
+        };
+    }
+
+    @Test(dataProvider = "serializationTestData")
+    public void testSAMRecordSerialization( final File inputFile ) throws Exception {
+        final SamReader reader = SamReaderFactory.makeDefault().open(inputFile);
+        final SAMRecord initialSAMRecord = reader.iterator().next();
+        reader.close();
+
+        final SAMRecord deserializedSAMRecord = TestUtil.serializeAndDeserialize(initialSAMRecord);
+
+        Assert.assertEquals(deserializedSAMRecord, initialSAMRecord, "Deserialized SAMRecord not equal to original SAMRecord");
+    }
+
+    @DataProvider
+    public Object [][] offsetAtReferenceData() {
+        return new Object[][]{
+                {"3S9M",   7, 10, false},
+                {"3S9M",   0,  0, false},
+                {"3S9M",  -1,  0, false},
+                {"3S9M",  13,  0, false},
+                {"4M1D6M", 4,  4, false},
+                {"4M1D6M", 4,  4, true},
+                {"4M1D6M", 5,  0, false},
+                {"4M1D6M", 5,  4, true},
+                {"4M1I6M", 5,  6, false},
+                {"4M1I6M", 11, 0, false},
+        };
+    }
+
+    @Test(dataProvider = "offsetAtReferenceData")
+    public void testOffsetAtReference(String cigar, int posInReference, int expectedPosInRead, boolean returnLastBaseIfDeleted) {
+
+            SAMRecord sam = new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, cigar, null, 2);
+            Assert.assertEquals(SAMRecord.getReadPositionAtReferencePosition(sam, posInReference, returnLastBaseIfDeleted), expectedPosInRead);
+    }
+
+    @DataProvider
+    public Object [][] referenceAtReadData() {
+        return new Object[][]{
+                {"3S9M", 7, 10},
+                {"3S9M", 0, 0},
+                {"3S9M", 0, 13},
+                {"4M1D6M", 4, 4},
+                {"4M1D6M", 6, 5},
+                {"4M1I6M", 0, 5},
+                {"4M1I6M", 5, 6},
+        };
+    }
+
+    @Test(dataProvider = "referenceAtReadData")
+    public void testOffsetAtRead(String cigar, int expectedReferencePos, int posInRead) {
+
+            SAMRecord sam = new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, cigar, null, 2);
+            Assert.assertEquals(sam.getReferencePositionAtReadPosition(posInRead), expectedReferencePos);
+    }
+
+    @DataProvider(name = "deepCopyTestData")
+    public Object [][] deepCopyTestData() {
+        return new Object[][]{
+                { new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "3S9M", null, 2) },
+                { new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "4M1I6M", null, 2) }
+        };
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepCopyBasic(final SAMRecord sam) {
+        testDeepCopy(sam);
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepCopyCigar(SAMRecord sam) {
+        sam.setCigar(sam.getCigar());
+        final SAMRecord deepCopy = sam.deepCopy();
+        Assert.assertTrue(sam.equals(deepCopy));
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepCopyGetCigarString(SAMRecord sam) {
+        sam.setCigarString(sam.getCigarString());
+        final SAMRecord deepCopy = sam.deepCopy();
+        Assert.assertTrue(sam.equals(deepCopy));
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepCopyGetCigar(final SAMRecord sam)
+    {
+        testDeepCopy(sam);
+        sam.setCigarString(sam.getCigarString());
+        sam.getCigar(); // force cigar elements to be resolved for equals
+        testDeepCopy(sam);
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepCopyMutate(final SAMRecord sam) {
+        final byte[] initialBaseQualityCopy = Arrays.copyOf(sam.getBaseQualities(), sam.getBaseQualities().length);
+        final int initialStart = sam.getAlignmentStart();
+
+        final SAMRecord deepCopy = testDeepCopy(sam);
+        Assert.assertTrue(Arrays.equals(sam.getBaseQualities(), deepCopy.getBaseQualities()));
+        Assert.assertTrue(sam.getAlignmentStart() == deepCopy.getAlignmentStart());
+
+        // mutate copy and make sure original remains unchanged
+        final byte[] copyBaseQuals = deepCopy.getBaseQualities();
+        for (int i = 0; i < copyBaseQuals.length; i++) {
+            copyBaseQuals[i]++;
+        }
+        deepCopy.setBaseQualities(copyBaseQuals);
+        deepCopy.setAlignmentStart(initialStart + 1);
+        Assert.assertTrue(Arrays.equals(sam.getBaseQualities(), initialBaseQualityCopy));
+        Assert.assertTrue(sam.getAlignmentStart() == initialStart);
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepByteAttributes( final SAMRecord sam ) throws Exception {
+        SAMRecord deepCopy = testDeepCopy(sam);
+
+        final byte bytes[] = { -2, -1, 0, 1, 2 };
+        sam.setAttribute("BY", bytes);
+        deepCopy = sam.deepCopy();
+        Assert.assertEquals(sam, deepCopy);
+
+        // validate reference inequality and content equality
+        final byte samBytes[] = sam.getByteArrayAttribute("BY");
+        final byte copyBytes[] = deepCopy.getByteArrayAttribute("BY");
+        Assert.assertFalse(copyBytes == samBytes);
+        Assert.assertTrue(Arrays.equals(copyBytes, samBytes));
+
+        // validate mutation independence
+        final byte testByte = -1;
+        Assert.assertTrue(samBytes[2] != testByte);  // ensure initial test condition
+        Assert.assertTrue(copyBytes[2] != testByte); // ensure initial test condition
+        samBytes[2] = testByte;                      // mutate original
+        Assert.assertTrue(samBytes[2] == testByte);
+        Assert.assertTrue(copyBytes[2] != testByte);
+        sam.setAttribute("BY", samBytes);
+        Assert.assertTrue(sam.getByteArrayAttribute("BY")[2] != deepCopy.getByteArrayAttribute("BY")[2]);
+
+        // now unsigned...
+        sam.setUnsignedArrayAttribute("BY", bytes);
+        deepCopy = sam.deepCopy();
+        Assert.assertEquals(sam, deepCopy);
+        final byte samUBytes[] = sam.getUnsignedByteArrayAttribute("BY");
+        final byte copyUBytes[] = deepCopy.getUnsignedByteArrayAttribute("BY");
+        Assert.assertFalse(copyUBytes == bytes);
+        Assert.assertTrue(Arrays.equals(copyUBytes, samUBytes));
+
+        // validate mutation independence
+        final byte uByte = 1;
+        Assert.assertTrue(samUBytes[2] != uByte); //  ensure initial test condition
+        Assert.assertTrue(samUBytes[2] != uByte); //  ensure initial test condition
+        samUBytes[2] = uByte;  // mutate original
+        Assert.assertTrue(samUBytes[2] == uByte);
+        Assert.assertTrue(copyUBytes[2] != uByte);
+        sam.setUnsignedArrayAttribute("BY", samBytes);
+        Assert.assertTrue(sam.getUnsignedByteArrayAttribute("BY")[2] != deepCopy.getUnsignedByteArrayAttribute("BY")[2]);
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepShortAttributes( final SAMRecord sam ) throws Exception {
+        SAMRecord deepCopy = testDeepCopy(sam);
+
+        final short shorts[] = { -20, -10, 0, 10, 20 };
+        sam.setAttribute("SH", shorts);
+        deepCopy = sam.deepCopy();
+        Assert.assertEquals(sam, deepCopy);
+
+        // validate reference inequality, content equality
+        final short samShorts[] = sam.getSignedShortArrayAttribute("SH");
+        final short copyShorts[] = deepCopy.getSignedShortArrayAttribute("SH");
+        Assert.assertFalse(copyShorts == samShorts);
+        Assert.assertTrue(Arrays.equals(copyShorts, samShorts));
+
+        // validate mutation independence
+        final short testShort = -1;
+        Assert.assertTrue(samShorts[2] != testShort); //  ensure initial test condition
+        Assert.assertTrue(samShorts[2] != testShort); //  ensure initial test condition
+        samShorts[2] = testShort;  // mutate original
+        Assert.assertTrue(samShorts[2] == testShort);
+        Assert.assertTrue(copyShorts[2] != testShort);
+        sam.setAttribute("SH", samShorts);
+        Assert.assertTrue(sam.getSignedShortArrayAttribute("SH")[2] != deepCopy.getSignedShortArrayAttribute("SH")[2]);
+
+        // now unsigned...
+        sam.setUnsignedArrayAttribute("SH", shorts);
+        deepCopy = sam.deepCopy();
+        Assert.assertEquals(sam, deepCopy);
+
+        final short samUShorts[] = sam.getUnsignedShortArrayAttribute("SH");
+        final short copyUShorts[] = deepCopy.getUnsignedShortArrayAttribute("SH");
+        Assert.assertFalse(copyUShorts == shorts);
+        Assert.assertTrue(Arrays.equals(copyUShorts, samUShorts));
+
+        // validate mutation independence
+        final byte uShort = 1;
+        Assert.assertTrue(samUShorts[2] != uShort); //  ensure initial test condition
+        Assert.assertTrue(samUShorts[2] != uShort); //  ensure initial test condition
+        samUShorts[2] = uShort;  // mutate original
+        Assert.assertTrue(samUShorts[2] == uShort);
+        Assert.assertTrue(copyUShorts[2] != uShort);
+        sam.setUnsignedArrayAttribute("SH", samShorts);
+        Assert.assertTrue(sam.getUnsignedShortArrayAttribute("SH")[2] != deepCopy.getUnsignedShortArrayAttribute("SH")[2]);
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepIntAttributes( final SAMRecord sam ) throws Exception {
+        SAMRecord deepCopy = testDeepCopy(sam);
+
+        final int ints[] = { -200, -100, 0, 100, 200 };
+        sam.setAttribute("IN", ints);
+        deepCopy = sam.deepCopy();
+        Assert.assertEquals(sam, deepCopy);
+
+        // validate reference inequality and content equality
+        final  int samInts[] = sam.getSignedIntArrayAttribute("IN");
+        final  int copyInts[] = deepCopy.getSignedIntArrayAttribute("IN");
+        Assert.assertFalse(copyInts == ints);
+        Assert.assertTrue(Arrays.equals(copyInts, samInts));
+
+        // validate mutation independence
+        final short testInt = -1;
+        Assert.assertTrue(samInts[2] != testInt); //  ensure initial test condition
+        Assert.assertTrue(samInts[2] != testInt); //  ensure initial test condition
+        samInts[2] = testInt;  // mutate original
+        Assert.assertTrue(samInts[2] == testInt);
+        Assert.assertTrue(copyInts[2] != testInt);
+        sam.setAttribute("IN", samInts);
+        Assert.assertTrue(sam.getSignedIntArrayAttribute("IN")[2] != deepCopy.getSignedIntArrayAttribute("IN")[2]);
+
+        // now unsigned...
+        sam.setUnsignedArrayAttribute("IN", ints);
+        deepCopy = sam.deepCopy();
+        Assert.assertEquals(sam, deepCopy);
+
+        final int samUInts[] = sam.getUnsignedIntArrayAttribute("IN");
+        final int copyUInts[] = deepCopy.getUnsignedIntArrayAttribute("IN");
+        Assert.assertFalse(copyUInts == ints);
+        Assert.assertTrue(Arrays.equals(copyUInts, samUInts));
+
+        // validate mutation independence
+        byte uInt = 1;
+        Assert.assertTrue(samUInts[2] != uInt); //  ensure initial test condition
+        Assert.assertTrue(samUInts[2] != uInt); //  ensure initial test condition
+        samInts[2] = uInt;  // mutate original
+        Assert.assertTrue(samUInts[2] == uInt);
+        Assert.assertTrue(copyUInts[2] != uInt);
+        sam.setUnsignedArrayAttribute("IN", samInts);
+        Assert.assertTrue(sam.getUnsignedIntArrayAttribute("IN")[2] != deepCopy.getUnsignedIntArrayAttribute("IN")[2]);
+    }
+
+    @Test(dataProvider = "deepCopyTestData")
+    public void testDeepFloatAttributes( final SAMRecord sam ) throws Exception {
+        SAMRecord deepCopy = testDeepCopy(sam);
+
+        final float floats[] = { -2.4f, -1.2f, 0, 2.3f, 4.6f };
+        sam.setAttribute("FL", floats);
+        deepCopy = sam.deepCopy();
+        Assert.assertEquals(sam, deepCopy);
+
+        // validate reference inequality and content equality
+        final float samFloats[] = sam.getFloatArrayAttribute("FL");
+        final float copyFloats[] = deepCopy.getFloatArrayAttribute("FL");
+        Assert.assertFalse(copyFloats == floats);
+        Assert.assertFalse(copyFloats == samFloats);
+        Assert.assertTrue(Arrays.equals(copyFloats, samFloats));
+
+        // validate mutation independence
+        final float testFloat = -1.0f;
+        Assert.assertTrue(samFloats[2] != testFloat); //  ensure initial test condition
+        Assert.assertTrue(samFloats[2] != testFloat); //  ensure initial test condition
+        samFloats[2] = testFloat;  // mutate original
+        Assert.assertTrue(samFloats[2] == testFloat);
+        Assert.assertTrue(copyFloats[2] != testFloat);
+        sam.setAttribute("FL", samFloats);
+        Assert.assertTrue(sam.getFloatArrayAttribute("FL")[2] != deepCopy.getFloatArrayAttribute("FL")[2]);
+    }
+
+    private SAMRecord testDeepCopy(SAMRecord sam) {
+        final SAMRecord deepCopy = sam.deepCopy();
+        Assert.assertTrue(sam.equals(deepCopy));
+        return deepCopy;
+    }
+
+    @Test
+    public void test_getUnsignedIntegerAttribute_valid() {
+        final String stringTag = "UI";
+        final short binaryTag = SAMTagUtil.getSingleton().makeBinaryTag(stringTag);
+        SAMFileHeader header = new SAMFileHeader();
+        SAMRecord record = new SAMRecord(header);
+        Assert.assertNull(record.getUnsignedIntegerAttribute(stringTag));
+        Assert.assertNull(record.getUnsignedIntegerAttribute(binaryTag));
+
+        record.setAttribute("UI", (long) 0L);
+        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(stringTag));
+        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(binaryTag));
+
+        record.setAttribute("UI", BinaryCodec.MAX_UINT);
+        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(stringTag));
+        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(binaryTag));
+
+        final SAMBinaryTagAndValue tv_zero = new SAMBinaryTagAndValue(binaryTag, 0L);
+        record = new SAMRecord(header){
+            {
+                setAttributes(tv_zero);
+            }
+        };
+        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(stringTag));
+        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(binaryTag));
+
+        final SAMBinaryTagAndValue tv_max = new SAMBinaryTagAndValue(binaryTag, BinaryCodec.MAX_UINT);
+        record = new SAMRecord(header){
+            {
+                setAttributes(tv_max);
+            }
+        };
+        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(stringTag));
+        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(binaryTag));
+    }
+
+    /**
+     * This is an alternative to test_getUnsignedIntegerAttribute_valid().
+     * This is required for testing invalid (out of range) unsigned integer value.
+     */
+    @Test
+    public void test_getUnsignedIntegerAttribute_valid_alternative() {
+        final short tag = SAMTagUtil.getSingleton().makeBinaryTag("UI");
+        SAMFileHeader header = new SAMFileHeader();
+        SAMRecord record;
+
+        record = new SAMRecord(header);
+        record.setAttribute("UI", 0L);
+        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(tag));
+
+        record = new SAMRecord(header);
+        record.setAttribute("UI", BinaryCodec.MAX_UINT);
+        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute("UI"));
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void test_getUnsignedIntegerAttribute_negative() {
+        SAMFileHeader header = new SAMFileHeader();
+        SAMRecord record = new SAMRecord(header);
+        record.setAttribute("UI", -1L);
+        record.getUnsignedIntegerAttribute("UI");
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void test_setUnsignedIntegerAttributeTooLarge() {
+        SAMFileHeader header = new SAMFileHeader();
+        SAMRecord record = new SAMRecord(header);
+        record.setAttribute("UI", BinaryCodec.MAX_UINT + 1);
+    }
+
+    // NOTE: SAMRecord.asAllowedAttribute is deprecated, as it has been moved into
+    // SAMBinaryTagAndValue, but we'll leave this test here until the code is removed.
+    @Test
+    public void test_isAllowedAttributeDataType() {
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Byte((byte) 0)));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Short((short) 0)));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Integer(0)));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue("a string"));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Character('C')));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Float(0.1F)));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new byte[]{0}));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new short[]{0}));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new int[]{0}));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new float[]{0.1F}));
+
+        // unsigned integers:
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Long(0)));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Long(BinaryCodec.MAX_UINT)));
+        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Long(-1L)));
+        Assert.assertFalse(SAMRecord.isAllowedAttributeValue(new Long(BinaryCodec.MAX_UINT + 1L)));
+        Assert.assertFalse(SAMRecord.isAllowedAttributeValue(new Long(Integer.MIN_VALUE - 1L)));
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void test_setAttribute_unsigned_int_negative() {
+        SAMFileHeader header = new SAMFileHeader();
+        SAMRecord record = null;
+        record = new SAMRecord(header);
+        Assert.assertNull(record.getUnsignedIntegerAttribute("UI"));
+        record.setAttribute("UI", (long) Integer.MIN_VALUE - 1L);
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void test_setAttribute_unsigned_int_tooLarge() {
+        SAMFileHeader header = new SAMFileHeader();
+        SAMRecord record = new SAMRecord(header);
+        Assert.assertNull(record.getUnsignedIntegerAttribute("UI"));
+        record.setAttribute("UI", (long) BinaryCodec.MAX_UINT + 1L);
+    }
+
+    @Test
+    public void test_setAttribute_null_removes_tag() {
+        final short tag = SAMTagUtil.getSingleton().makeBinaryTag("UI");
+        SAMFileHeader header = new SAMFileHeader();
+        SAMRecord record = new SAMRecord(header);
+        Assert.assertNull(record.getUnsignedIntegerAttribute(tag));
+
+        record.setAttribute(tag, BinaryCodec.MAX_UINT);
+        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(tag));
+
+        record.setAttribute(tag, null);
+        Assert.assertNull(record.getUnsignedIntegerAttribute(tag));
+    }
+
+    private SAMRecord createTestRecordHelper() {
+        return new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "3S9M", null, 2);
+    }
+
+    @Test
+    public void testReferenceName() {
+        SAMRecord sam = createTestRecordHelper();
+
+        // NO_ALIGNMENT_NAME
+        sam.setReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
+        Assert.assertTrue(sam.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
+        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+
+        // valid reference name
+        sam = createTestRecordHelper();
+        sam.setReferenceName("chr4");
+        Assert.assertTrue(sam.getReferenceName().equals("chr4"));
+        Assert.assertTrue(sam.getReferenceIndex().equals(3));
+
+        // invalid reference name sets name but leaves ref index invalid
+        sam = createTestRecordHelper();
+        sam.setReferenceName("unresolvableName");
+        Assert.assertTrue(sam.getReferenceName().equals("unresolvableName"));
+        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+    }
+
+    @Test
+    public void testReferenceIndex() {
+        // NO_ALIGNMENT_REFERENCE
+        SAMRecord sam = createTestRecordHelper();
+        sam.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+        Assert.assertTrue(sam.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
+
+        // valid reference
+        sam = createTestRecordHelper();
+        sam.setReferenceIndex(3);
+        Assert.assertTrue(sam.getReferenceIndex().equals(3));
+        Assert.assertTrue(sam.getReferenceName().equals("chr4"));
+    }
+
+    @Test(expectedExceptions=IllegalArgumentException.class)
+    public void testInvalidReferenceIndex() {
+        // unresolvable reference
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setReferenceIndex(9999);
+    }
+
+    @Test
+    public void testMateReferenceName() {
+        // NO_ALIGNMENT_NAME
+        SAMRecord sam = createTestRecordHelper();
+        sam.setMateReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
+        Assert.assertTrue(sam.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
+        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+
+        // valid reference
+        sam = createTestRecordHelper();
+        sam.setMateReferenceName("chr4");
+        Assert.assertTrue(sam.getMateReferenceName().equals("chr4"));
+        Assert.assertTrue(sam.getMateReferenceIndex().equals(3));
+
+        // unresolvable reference
+        sam = createTestRecordHelper();
+        sam.setMateReferenceName("unresolvableName");
+        Assert.assertTrue(sam.getMateReferenceName().equals("unresolvableName"));
+        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+    }
+
+    @Test
+    public void testMateReferenceIndex() {
+        // NO_ALIGNMENT_REFERENCE
+        SAMRecord sam = createTestRecordHelper();
+        sam.setMateReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+        Assert.assertTrue(sam.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
+
+        // valid reference
+        sam = createTestRecordHelper();
+        sam.setMateReferenceIndex(3);
+        Assert.assertTrue(sam.getMateReferenceIndex().equals(3));
+        Assert.assertTrue(sam.getMateReferenceName().equals("chr4"));
+    }
+
+    @Test(expectedExceptions=IllegalArgumentException.class)
+    public void testInvalidMateReferenceIndex() {
+        // unresolvable reference
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setMateReferenceIndex(9999);
+    }
+
+    @Test
+    public void testRecordValidation() {
+        final SAMRecord sam = createTestRecordHelper();
+        List<SAMValidationError> validationErrors = sam.isValid(false);
+        Assert.assertTrue(validationErrors == null);
+    }
+
+    @Test
+    public void testInvalidAlignmentStartValidation() {
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setAlignmentStart(0);
+        List<SAMValidationError> validationErrors = sam.isValid(false);
+        Assert.assertTrue(validationErrors != null && validationErrors.size() == 1);
+    }
+
+    // ----------------- NULL header tests ---------------------
+
+    @Test
+    public void testNullHeaderReferenceName() {
+        final SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+        Assert.assertTrue(null != samHeader);
+        final String originalRefName = sam.getReferenceName();
+
+        // setting header to null retains the previously assigned ref name
+        sam.setHeader(null);
+        Assert.assertTrue(originalRefName.equals(sam.getReferenceName()));
+
+        // null header allows reference name to be set to NO_ALIGNMENT_REFERENCE_NAME
+        sam.setReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
+        Assert.assertTrue(sam.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
+        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+
+        // null header allows reference name to be reset to a valid namw
+        sam.setReferenceName(originalRefName);
+        Assert.assertTrue(sam.getReferenceName().equals(originalRefName));
+    }
+
+    @Test
+    public void testNullHeaderReferenceIndex() {
+        SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+        int originalRefIndex = sam.getReferenceIndex();
+        Assert.assertTrue(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX != originalRefIndex);
+
+        // setting header to null resets the reference index to null
+        sam.setHeader(null);
+        Assert.assertTrue(null == sam.mReferenceIndex);
+        // restoring the header to restores the reference index back to the original
+        sam.setHeader(samHeader);
+        Assert.assertTrue(sam.getReferenceIndex().equals(originalRefIndex));
+
+        // setting the header to null allows setting the reference index to NO_ALIGNMENT_REFERENCE_INDEX
+        sam.setHeader(null);
+        sam.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+        Assert.assertTrue(sam.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
+
+        // force the internal SAMRecord reference index value to (null) initial state
+        sam = new SAMRecord(null);
+        Assert.assertTrue(null == sam.mReferenceIndex);
+        Assert.assertTrue(sam.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+
+        // an unresolvable reference name doesn't throw
+        final String unresolvableRefName = "unresolvable";
+        sam.setReferenceName(unresolvableRefName);
+        // now force the SAMRecord to try to resolve the unresolvable name
+        sam.setHeader(samHeader);
+        Assert.assertTrue(null == sam.mReferenceIndex);
+        Assert.assertTrue(sam.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testNullHeaderSetReferenceIndex() {
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setHeader(null);
+        // setReferenceIndex with null header throws
+        sam.setReferenceIndex(3);
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testNullHeaderGetReferenceIndex() {
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setHeader(null);
+        // getReferenceIndex with null header throws
+        sam.getReferenceIndex();
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testNullHeaderForceIndexResolutionFailure() {
+        // force the internal SAMRecord reference index value to null initial state
+        final SAMRecord sam = new SAMRecord(null);
+        sam.setReferenceName("unresolvable");
+        sam.getReferenceIndex();
+    }
+
+    @Test
+    public void testNullHeaderMateReferenceName() {
+        final SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+        Assert.assertTrue(null != samHeader);
+        final String originalMateRefName = sam.getMateReferenceName();
+
+        // setting header to null retains the previously assigned mate ref name
+        sam.setHeader(null);
+        Assert.assertTrue(originalMateRefName.equals(sam.getMateReferenceName()));
+
+        // null header allows mate reference name to be set to NO_ALIGNMENT_REFERENCE_NAME
+        sam.setMateReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
+        Assert.assertTrue(sam.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
+        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+
+        // null header allows reference name to be reset to a valid namw
+        sam.setMateReferenceName(originalMateRefName);
+        Assert.assertTrue(sam.getMateReferenceName().equals(originalMateRefName));
+    }
+
+    @Test
+    public void testNullHeaderMateReferenceIndex() {
+        SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+        sam.setMateReferenceName("chr1");
+        int originalMateRefIndex = sam.getMateReferenceIndex();
+        Assert.assertTrue(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX != originalMateRefIndex);
+
+        // setting header to null resets the mate reference index to null
+        sam.setHeader(null);
+        Assert.assertTrue(null == sam.mMateReferenceIndex);
+        // restoring the header to restores the reference index back to the original
+        sam.setHeader(samHeader);
+        Assert.assertTrue(sam.getMateReferenceIndex().equals(originalMateRefIndex));
+
+        // setting the header to null allows setting the mate reference index to NO_ALIGNMENT_REFERENCE_INDEX
+        sam.setHeader(null);
+        sam.setMateReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+        Assert.assertTrue(sam.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
+
+        // force the internal SAMRecord mate reference index value to (null) initial state
+        sam = new SAMRecord(null);
+        Assert.assertTrue(null == sam.mMateReferenceIndex);
+        Assert.assertTrue(sam.getMateReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+
+        // an unresolvable mate reference name doesn't throw
+        final String unresolvableRefName = "unresolvable";
+        sam.setMateReferenceName(unresolvableRefName);
+        // now force the SAMRecord to try to resolve the unresolvable mate reference name
+        sam.setHeader(samHeader);
+        Assert.assertTrue(null == sam.mMateReferenceIndex);
+        Assert.assertTrue(sam.getMateReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testNullHeaderSetMateReferenceIndex() {
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setHeader(null);
+        sam.setMateReferenceIndex(3);
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testNullHeaderGetMateReferenceIndex() {
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setMateReferenceName("chr1");
+        sam.setHeader(null);
+        // getMateReferenceIndex with null header throws
+        sam.getMateReferenceIndex();
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testNullHeaderForceMateIndexResolutionFailure() {
+        // force the internal SAMRecord reference index value to null initial state
+        final SAMRecord sam = new SAMRecord(null);
+        sam.setMateReferenceName("unresolvable");
+        sam.getMateReferenceIndex();
+    }
+
+    @Test
+    public void testNullHeaderGetReadGroup() {
+        final SAMRecord sam = createTestRecordHelper();
+        Assert.assertTrue(null != sam.getHeader());
+
+        Assert.assertTrue(null != sam.getReadGroup() && sam.getReadGroup().getId().equals("1"));
+        sam.setHeader(null);
+        Assert.assertNull(sam.getReadGroup());
+    }
+
+    @Test(dataProvider = "serializationTestData")
+    public void testNullHeaderSerialization(final File inputFile) throws Exception {
+        final SamReader reader = SamReaderFactory.makeDefault().open(inputFile);
+        final SAMRecord initialSAMRecord = reader.iterator().next();
+        reader.close();
+
+        initialSAMRecord.setHeader(null);
+        final SAMRecord deserializedSAMRecord = TestUtil.serializeAndDeserialize(initialSAMRecord);
+        Assert.assertEquals(deserializedSAMRecord, initialSAMRecord, "Deserialized SAMRecord not equal to original SAMRecord");
+    }
+
+
+    @Test
+    public void testValidateNonsenseCigar(){
+        // Create nonsense record
+        SAMRecord rec = createTestRecordHelper();
+        rec.setCigarString("nonsense");
+
+        //The default validationStringency of a sam record is SILENT.
+        rec.setValidationStringency(ValidationStringency.STRICT);
+        // Validate record
+        List<SAMValidationError> err = rec.validateCigar(-1);
+
+        Assert.assertNotNull(err);
+        Assert.assertEquals(err.size(), 1);
+        Assert.assertEquals(err.get(0).getType(), SAMValidationError.Type.INVALID_CIGAR);
+    }
+
+    @Test
+    public void testNullHeaderRecordValidation() {
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setHeader(null);
+        List<SAMValidationError> validationErrors = sam.isValid(false);
+        Assert.assertTrue(validationErrors == null);
+    }
+
+    @Test
+    private void testNullHeaderDeepCopy() {
+        SAMRecord sam = createTestRecordHelper();
+        sam.setHeader(null);
+        final SAMRecord deepCopy = sam.deepCopy();
+
+        Assert.assertTrue(sam.equals(deepCopy));
+    }
+
+    private void testNullHeaderCigar(SAMRecord rec) {
+        Cigar origCigar = rec.getCigar();
+        Assert.assertNotNull(origCigar);
+        String originalCigarString = rec.getCigarString();
+
+        // set the cigar to null and then reset the cigar string in order to force getCigar to decode it
+        rec.setCigar(null);
+        Assert.assertNull(rec.getCigar());
+        rec.setCigarString(originalCigarString);
+        rec.setValidationStringency(ValidationStringency.STRICT);
+        rec.setHeader(null);
+        Assert.assertTrue(rec.getValidationStringency() == ValidationStringency.STRICT);
+
+        // force getCigar to decode the cigar string, validate that SAMRecord doesn't try to validate the cigar
+        Cigar cig = rec.getCigar();
+        Assert.assertNotNull(cig);
+        String cigString = TextCigarCodec.encode(cig);
+        Assert.assertEquals(cigString, originalCigarString);
+    }
+
+    @Test
+    private void testNullHeadGetCigarSAM() {
+        SAMRecord sam = createTestRecordHelper();
+        testNullHeaderCigar(sam);
+    }
+
+    @Test
+    private void testNullHeadGetCigarBAM() {
+        SAMRecord sam = createTestRecordHelper();
+        SAMRecordFactory factory = new DefaultSAMRecordFactory();
+        BAMRecord bamRec = factory.createBAMRecord(
+                sam.getHeader(),
+                sam.getReferenceIndex(),
+                sam.getAlignmentStart(),
+                (short) sam.getReadNameLength(),
+                (short) sam.getMappingQuality(),
+                0,
+                sam.getCigarLength(),
+                sam.getFlags(),
+                sam.getReadLength(),
+                sam.getMateReferenceIndex(),
+                sam.getMateAlignmentStart(),
+                0, null);
+
+        bamRec.setCigarString(sam.getCigarString());
+
+        testNullHeaderCigar(bamRec);
+    }
+
+    @Test
+    public void testSetHeaderStrictValid() {
+        SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+        Integer originalRefIndex = sam.getReferenceIndex();
+        Assert.assertTrue(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX != originalRefIndex);
+
+        // force re-resolution of the reference name
+        sam.setHeaderStrict(samHeader);
+        Assert.assertEquals(sam.getReferenceIndex(), originalRefIndex);
+    }
+
+    @Test
+    public void testSetHeaderStrictValidHeaderless() {
+        SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+        Integer originalRefIndex = sam.getReferenceIndex();
+        Assert.assertTrue(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX != originalRefIndex);
+
+        sam.setHeader(null);
+        // force re-resolution of the reference name
+        sam.setHeaderStrict(samHeader);
+        Assert.assertEquals(sam.getReferenceIndex(), originalRefIndex);
+    }
+
+    @Test
+    public void testSetHeaderStrictValidNewHeader() {
+        final SAMRecord sam = createTestRecordHelper();
+        final String origSequenceName = sam.getContig();
+
+        final SAMFileHeader origSamHeader = sam.getHeader();
+        final int origSequenceLength = origSamHeader.getSequence(origSequenceName).getSequenceLength();
+        final SAMFileHeader newHeader = new SAMFileHeader();
+        newHeader.addSequence(new SAMSequenceRecord(origSequenceName, origSequenceLength));
+
+        // force re-resolution of the reference name against the new header
+        sam.setHeaderStrict(newHeader);
+        Assert.assertEquals(sam.getReferenceIndex(), new Integer(0));
+    }
+
+    @Test(expectedExceptions=IllegalArgumentException.class)
+    public void testSetHeaderStrictInvalidReference() {
+        SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+
+        sam.setReferenceName("unresolvable");
+        Assert.assertEquals(new Integer(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX), sam.getReferenceIndex());
+
+        // throw on force re-resolution of the unresolvable reference name
+        sam.setHeaderStrict(samHeader);
+    }
+
+    @Test(expectedExceptions=IllegalArgumentException.class)
+    public void testSetHeaderStrictInvalidMateReference() {
+        SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+
+        sam.setMateReferenceName("unresolvable");
+        Assert.assertEquals(new Integer(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX), sam.getMateReferenceIndex());
+
+        // throw on force re-resolution of the unresolvable mate reference name
+        sam.setHeaderStrict(samHeader);
+    }
+
+    @Test
+    public void testSetHeaderStrictNull() {
+        SAMRecord sam = createTestRecordHelper();
+        Assert.assertNotNull(sam.getHeader());
+        sam.setHeaderStrict(null);
+        Assert.assertNull(sam.getHeader());
+        Assert.assertNull(sam.mReferenceIndex);
+    }
+
+    // resolveIndexFromName
+
+    @Test
+    public void testResolveIndexResolvable() {
+        final SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+        final String contigName = sam.getContig();
+        Assert.assertEquals(SAMRecord.resolveIndexFromName(contigName, samHeader, true), new Integer(samHeader.getSequenceIndex(contigName)));
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testResolveIndexUnresolvableNullHeader() {
+        SAMRecord.resolveIndexFromName("unresolvable", null, false);
+    }
+
+    @Test(expectedExceptions=IllegalArgumentException.class)
+    public void testResolveIndexUnresolvableStrict() {
+        final SAMFileHeader samHeader = new SAMFileHeader();
+        SAMRecord.resolveIndexFromName("unresolvable", samHeader, true);
+    }
+
+    @Test
+    public void testResolveIndexUnresolvableNotStrict() {
+        final SAMFileHeader samHeader = new SAMFileHeader();
+        Assert.assertEquals(SAMRecord.resolveIndexFromName("unresolvable", samHeader, false), null);
+    }
+
+    @Test
+    public void testResolveIndexNoAlignment() {
+        final SAMFileHeader samHeader = new SAMFileHeader();
+        Assert.assertEquals(SAMRecord.resolveIndexFromName(
+                SAMRecord.NO_ALIGNMENT_REFERENCE_NAME, samHeader, true), new Integer(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testResolveIndexNullHeader() {
+        SAMRecord.resolveIndexFromName("unresolvable", null, true);
+    }
+
+    // resolveNameFromIndex
+
+    @Test
+    public void testResolveNameResolvable() {
+        final SAMRecord sam = createTestRecordHelper();
+        final SAMFileHeader samHeader = sam.getHeader();
+        final String contigName = sam.getContig();
+        final Integer contigIndex = samHeader.getSequenceIndex(contigName);
+        Assert.assertEquals(SAMRecord.resolveNameFromIndex(contigIndex, samHeader), contigName);
+    }
+
+    @Test(expectedExceptions=IllegalArgumentException.class)
+    public void testResolveNameUnresolvable() {
+        final SAMFileHeader samHeader = new SAMFileHeader();
+        SAMRecord.resolveNameFromIndex(99, samHeader);
+    }
+
+    @Test
+    public void testResolveNameNoAlignment() {
+        final SAMFileHeader samHeader = new SAMFileHeader();
+        Assert.assertEquals(SAMRecord.resolveNameFromIndex(
+                SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX, samHeader), SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void testResolveNameNullHeader() {
+        SAMRecord.resolveNameFromIndex(1, null);
+    }
+
+}
diff --git a/src/tests/java/htsjdk/samtools/SAMSequenceDictionaryTest.java b/src/test/java/htsjdk/samtools/SAMSequenceDictionaryTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SAMSequenceDictionaryTest.java
rename to src/test/java/htsjdk/samtools/SAMSequenceDictionaryTest.java
diff --git a/src/tests/java/htsjdk/samtools/SAMTextReaderTest.java b/src/test/java/htsjdk/samtools/SAMTextReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SAMTextReaderTest.java
rename to src/test/java/htsjdk/samtools/SAMTextReaderTest.java
diff --git a/src/test/java/htsjdk/samtools/SAMTextWriterTest.java b/src/test/java/htsjdk/samtools/SAMTextWriterTest.java
new file mode 100644
index 0000000..123ab6b
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMTextWriterTest.java
@@ -0,0 +1,130 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+public class SAMTextWriterTest {
+
+    private SAMRecordSetBuilder getSAMReader(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder) {
+        final SAMRecordSetBuilder ret = new SAMRecordSetBuilder(sortForMe, sortOrder);
+        ret.addPair("readB", 20, 200, 300);
+        ret.addPair("readA", 20, 100, 150);
+        ret.addFrag("readC", 20, 140, true);
+        ret.addFrag("readD", 20, 140, false);
+        return ret;
+    }
+
+    @Test
+    public void testNullHeader() throws Exception {
+        final SAMRecordSetBuilder recordSetBuilder = getSAMReader(true, SAMFileHeader.SortOrder.coordinate);
+        for (final SAMRecord rec : recordSetBuilder.getRecords()) {
+            rec.setHeader(null);
+        }
+        doTest(recordSetBuilder);
+    }
+
+    @Test
+    public void testBasic() throws Exception {
+        doTest(SamFlagField.DECIMAL);
+    }
+
+    @Test
+    public void testBasicHexFlag() throws Exception {
+        doTest(SamFlagField.HEXADECIMAL);
+    }
+
+    @Test
+    public void testBasicOctalFlag() throws Exception {
+        doTest(SamFlagField.OCTAL);
+    }
+
+    @Test
+    public void testBasicStringFlag() throws Exception {
+        doTest(SamFlagField.STRING);
+    }
+
+    private void doTest(final SAMRecordSetBuilder recordSetBuilder) throws Exception {
+        doTest(recordSetBuilder, SamFlagField.DECIMAL);
+    }
+
+    private void doTest(final SamFlagField samFlagField) throws Exception {
+        doTest(getSAMReader(true, SAMFileHeader.SortOrder.coordinate), samFlagField);
+    }
+
+    private void doTest(final SAMRecordSetBuilder recordSetBuilder, final SamFlagField samFlagField) throws Exception {
+        SamReader inputSAM = recordSetBuilder.getSamReader();
+        final File samFile = File.createTempFile("tmp.", ".sam");
+        samFile.deleteOnExit();
+        final Map<String, Object> tagMap = new HashMap<String, Object>();
+        tagMap.put("XC", new Character('q'));
+        tagMap.put("XI", 12345);
+        tagMap.put("XF", 1.2345f);
+        tagMap.put("XS", "Hi,Mom!");
+        for (final Map.Entry<String, Object> entry : tagMap.entrySet()) {
+            inputSAM.getFileHeader().setAttribute(entry.getKey(), entry.getValue().toString());
+        }
+        final SAMFileWriter samWriter = new SAMFileWriterFactory().setSamFlagFieldOutput(samFlagField).makeSAMWriter(inputSAM.getFileHeader(), false, samFile);
+        for (final SAMRecord samRecord : inputSAM) {
+            samWriter.addAlignment(samRecord);
+        }
+        samWriter.close();
+
+        // Read it back in and confirm that it matches the input
+        inputSAM = recordSetBuilder.getSamReader();
+        // Stuff in the attributes again since this has been created again.
+        for (final Map.Entry<String, Object> entry : tagMap.entrySet()) {
+            inputSAM.getFileHeader().setAttribute(entry.getKey(), entry.getValue().toString());
+        }
+
+        final SamReader newSAM = SamReaderFactory.makeDefault().open(samFile);
+        Assert.assertEquals(newSAM.getFileHeader(), inputSAM.getFileHeader());
+        final Iterator<SAMRecord> inputIt = inputSAM.iterator();
+        final Iterator<SAMRecord> newSAMIt = newSAM.iterator();
+        while (inputIt.hasNext()) {
+            Assert.assertTrue(newSAMIt.hasNext());
+            final SAMRecord inputSAMRecord = inputIt.next();
+            final SAMRecord newSAMRecord = newSAMIt.next();
+
+            // Force reference index attributes to be populated
+            inputSAMRecord.getReferenceIndex();
+            newSAMRecord.getReferenceIndex();
+            inputSAMRecord.getMateReferenceIndex();
+            newSAMRecord.getMateReferenceIndex();
+
+            // Force these to be equal
+            newSAMRecord.setIndexingBin(inputSAMRecord.getIndexingBin());
+
+            Assert.assertEquals(newSAMRecord, inputSAMRecord);
+        }
+        Assert.assertFalse(newSAMIt.hasNext());
+        inputSAM.close();
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/SAMUtilsTest.java b/src/test/java/htsjdk/samtools/SAMUtilsTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SAMUtilsTest.java
rename to src/test/java/htsjdk/samtools/SAMUtilsTest.java
diff --git a/src/test/java/htsjdk/samtools/SamFileHeaderMergerTest.java b/src/test/java/htsjdk/samtools/SamFileHeaderMergerTest.java
new file mode 100644
index 0000000..6e4fd75
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SamFileHeaderMergerTest.java
@@ -0,0 +1,265 @@
+/**
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ **/
+
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.SequenceUtil;
+import htsjdk.samtools.util.StringUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static org.testng.Assert.assertEquals;
+
+
+/**
+ * @author aaron
+ * @version 1.0
+ * @date May 20, 2009
+ * <p/>
+ * Class SamFileHeaderMergerTest
+ * <p/>
+ * Tests the ability of the SamFileHeaderMerger class to merge sequence dictionaries.
+ */
+public class SamFileHeaderMergerTest {
+
+    private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    /** tests that if we've set the merging to false, we get a SAMException for bam's with different dictionaries. */
+    @Test(expectedExceptions = SequenceUtil.SequenceListsDifferException.class)
+    public void testMergedException() {
+        File INPUT[] = {new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome1to10.bam"),
+                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome5to9.bam")};
+        final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
+        for (final File inFile : INPUT) {
+            IOUtil.assertFileIsReadable(inFile);
+            headers.add(SamReaderFactory.makeDefault().getFileHeader(inFile));
+        }
+        new SamFileHeaderMerger(SAMFileHeader.SortOrder.unsorted, headers, false);
+    }
+
+    /** Tests that we can successfully merge two files with */
+    @Test
+    public void testMerging() {
+        File INPUT[] = {new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome1to10.bam"),
+                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome5to9.bam")};
+        final List<SamReader> readers = new ArrayList<SamReader>();
+        final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
+        for (final File inFile : INPUT) {
+            IOUtil.assertFileIsReadable(inFile);
+            // We are now checking for zero-length reads, so suppress complaint about that.
+            final SamReader in = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(inFile);
+
+            readers.add(in);
+            headers.add(in.getFileHeader());
+        }
+        final MergingSamRecordIterator iterator;
+        final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.unsorted, headers, true);
+        iterator = new MergingSamRecordIterator(headerMerger, readers, false);
+        headerMerger.getMergedHeader();
+
+        // count the total reads, and record read counts for each sequence
+        Map<Integer, Integer> seqCounts = new HashMap<Integer, Integer>();
+        int totalCount = 0;
+
+        while (iterator.hasNext()) {
+            SAMRecord r = iterator.next();
+            if (seqCounts.containsKey(r.getReferenceIndex())) {
+                seqCounts.put(r.getReferenceIndex(), seqCounts.get(r.getReferenceIndex()) + 1);
+            } else {
+                seqCounts.put(r.getReferenceIndex(), 1);
+            }
+            ++totalCount;
+        }
+        assertEquals(totalCount, 1500);
+        for (Integer i : seqCounts.keySet()) {
+            if (i < 4 || i > 8) {
+                // seqeunce 5 - 9 should have 200 reads (indices 4 - 8)
+                assertEquals(seqCounts.get(i).intValue(), 100);
+            } else {
+                // the others should have 100
+                assertEquals(seqCounts.get(i).intValue(), 200);
+            }
+        }
+        CloserUtil.close(readers);
+    }
+
+    private static final String sq1 = "@SQ\tSN:chr1\tLN:1000\n";
+    private static final String sq2 = "@SQ\tSN:chr2\tLN:1000\n";
+    private static final String sq3 = "@SQ\tSN:chr3\tLN:1000\n";
+    private static final String sq4 = "@SQ\tSN:chr4\tLN:1000\n";
+    private static final String sq5 = "@SQ\tSN:chr5\tLN:1000\n";
+
+    @Test
+    public void testSequenceDictionaryMerge() {
+        final String sd1 = sq1 + sq2 + sq5;
+        final String sd2 = sq2 + sq3 + sq4;
+        SamReader reader1 = SamReaderFactory.makeDefault().open(SamInputResource.of(new ByteArrayInputStream(StringUtil.stringToBytes(sd1))));
+        SamReader reader2 = SamReaderFactory.makeDefault().open(SamInputResource.of(new ByteArrayInputStream(StringUtil.stringToBytes(sd2))));
+        final List<SAMFileHeader> inputHeaders = Arrays.asList(reader1.getFileHeader(), reader2.getFileHeader());
+        SamFileHeaderMerger merger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, inputHeaders, true);
+        final SAMFileHeader mergedHeader = merger.getMergedHeader();
+        for (final SAMFileHeader inputHeader : inputHeaders) {
+            int prevTargetIndex = -1;
+            for (final SAMSequenceRecord sequenceRecord : inputHeader.getSequenceDictionary().getSequences()) {
+                final int targetIndex = mergedHeader.getSequenceIndex(sequenceRecord.getSequenceName());
+                Assert.assertNotSame(targetIndex, -1);
+                Assert.assertTrue(prevTargetIndex < targetIndex);
+                prevTargetIndex = targetIndex;
+            }
+        }
+        CloserUtil.close(reader1);
+        CloserUtil.close(reader2);
+    }
+
+    @Test(dataProvider = "data")
+    public void testProgramGroupAndReadGroupMerge(File inputFiles[], File expectedOutputFile) throws IOException {
+
+        BufferedReader reader = new BufferedReader(new FileReader(expectedOutputFile));
+
+        String line;
+        String expected_output = "";
+        while ((line = reader.readLine()) != null) {
+            expected_output += line + "\n";
+        }
+
+        final List<SamReader> readers = new ArrayList<SamReader>();
+        final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
+        for (final File inFile : inputFiles) {
+            IOUtil.assertFileIsReadable(inFile);
+
+            // We are now checking for zero-length reads, so suppress complaint about that.
+            final SamReader in = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(inFile);
+            readers.add(in);
+            headers.add(in.getFileHeader());
+        }
+        final MergingSamRecordIterator iterator;
+
+        final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, headers, true);
+        iterator = new MergingSamRecordIterator(headerMerger, readers, false);
+
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        SAMFileWriter writer = new SAMFileWriterFactory().makeSAMWriter(headerMerger.getMergedHeader(), true, baos);
+        while (iterator.hasNext()) {
+            writer.addAlignment(iterator.next());
+        }
+        writer.close();
+
+        String actual_output = StringUtil.bytesToString(baos.toByteArray());
+
+        List<String> actual = Arrays.asList(actual_output.split("\\n"));
+        List<String> expected = Arrays.asList(expected_output.split("\\n"));
+        for (int i = 0; i < expected.size(); i++) {
+            if (expected.get(i).startsWith("@")) {
+                Assert.assertTrue(headersEquivalent(actual.get(i), expected.get(i)));
+            } else {
+                List<String> expectedSamParts = Arrays.asList(expected.get(i).split("\\s*"));
+                List<String> actualSamParts = Arrays.asList(actual.get(i).split("\\s*"));
+                for (String exp : expectedSamParts) {
+                    Assert.assertTrue(actualSamParts.contains(exp));
+                }
+                for (String act : actualSamParts) {
+                    Assert.assertTrue(expectedSamParts.contains(act));
+                }
+            }
+        }
+        CloserUtil.close(readers);
+    }
+
+    private static final boolean headersEquivalent(String a, String b) {
+        if (a.length() != b.length()) return false;
+        List<String> remaining = new LinkedList<String>(Arrays.asList(a.split("\\t")));
+        for (final String item : b.split("\\t")) {
+            if (!remaining.remove(item)) return false;
+        }
+        return remaining.isEmpty(); 
+    }
+
+    @DataProvider(name = "data")
+    private Object[][] getProgramGroupAndReadGroupMergeData() {
+
+        return new Object[][]{
+                {
+
+                        new File[]{
+                                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case1/chr11sub_file1.sam"),
+                                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case1/chr11sub_file2.sam")},
+                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case1/expected_output.sam")
+                }, {
+                new File[]{
+                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/chr11sub_file1.sam"),
+                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/chr11sub_file2.sam"),
+                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/chr11sub_file3.sam"),
+                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/chr11sub_file4.sam")},
+                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/expected_output.sam")
+        }
+        };
+    }
+
+    @Test(expectedExceptions = {SAMException.class})
+    public void testUnmergeableSequenceDictionary() {
+        final String sd1 = sq1 + sq2 + sq5;
+        final String sd2 = sq2 + sq3 + sq4 + sq1;
+        final SamReader reader1 = SamReaderFactory.makeDefault().open(SamInputResource.of(new ByteArrayInputStream(StringUtil.stringToBytes(sd1))));
+        final SamReader reader2 = SamReaderFactory.makeDefault().open(SamInputResource.of(new ByteArrayInputStream(StringUtil.stringToBytes(sd2))));
+        final List<SAMFileHeader> inputHeaders = Arrays.asList(reader1.getFileHeader(), reader2.getFileHeader());
+        new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, inputHeaders, true);
+        CloserUtil.close(reader1);
+        CloserUtil.close(reader2);
+    }
+
+    @DataProvider(name = "fourDigitBase36StrPositiveData")
+    public Object[][] positiveFourDigitBase36StrData() {
+        return new Object[][]{
+                {0, "0"},
+                {15, "F"},
+                {36, "10"},
+                {1200000, "PPXC"},
+                {36 * 36 * 36 * 36 - 2, "ZZZY"},
+                {36 * 36 * 36 * 36 - 1, "ZZZZ"},
+        };
+    }
+
+    @Test(dataProvider = "fourDigitBase36StrPositiveData")
+    public void fourDigitBase36StrPositiveTest(final int toConvert, final String expectedValue) {
+        final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, new ArrayList<SAMFileHeader>(), true);
+        Assert.assertEquals(expectedValue, headerMerger.positiveFourDigitBase36Str(toConvert));
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/SamFilesTest.java b/src/test/java/htsjdk/samtools/SamFilesTest.java
new file mode 100644
index 0000000..443a4d1
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SamFilesTest.java
@@ -0,0 +1,91 @@
+package htsjdk.samtools;
+
+import java.nio.file.Path;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Test valid combinations of bam/cram vs bai/crai files.
+ * Created by vadim on 10/08/2015.
+ */
+public class SamFilesTest {
+    private static final String TEST_DATA = "src/test/resources/htsjdk/samtools/BAMFileIndexTest/";
+    private static final File BAM_FILE = new File(TEST_DATA + "index_test.bam");
+
+    @DataProvider(name = "FindIndexParams")
+    public static Object[][] paramsFindIndexForSuffixes() {
+        return new Object[][]{
+                // no index available sanity checks:
+                {".tmp", null, null},
+                {".bam", null, null},
+                {".cram", null, null},
+
+                // legit cases for BAM files:
+                {".bam", ".bai", ".bai"},
+                {".bam", ".bam.bai", ".bam.bai"},
+
+                // legit cases for CRAM files:
+                {".cram", ".cram.bai", ".cram.bai"},
+                {".cram", ".cram.crai", ".cram.crai"},
+
+                // special prohibited cases:
+                {".bam", ".crai", null},
+                {".tmp", ".crai", null},
+        };
+    }
+
+    @Test(dataProvider = "FindIndexParams")
+    public void testFindIndexForSuffixes(final String dataFileSuffix, final String indexFileSuffix, final String expectIndexSuffix) throws IOException {
+        final File dataFile = File.createTempFile("test", dataFileSuffix);
+        dataFile.deleteOnExit();
+        Assert.assertNull(SamFiles.findIndex(dataFile));
+        Assert.assertNull(SamFiles.findIndex(dataFile.toPath()));
+
+        File indexFile = null;
+        if (indexFileSuffix != null) {
+            indexFile = new File(dataFile.getAbsolutePath().replaceFirst("\\.\\S+$", indexFileSuffix));
+            indexFile.createNewFile();
+            indexFile.deleteOnExit();
+        }
+
+        final File foundIndexFile = SamFiles.findIndex(dataFile);
+        if (expectIndexSuffix == null) {
+            Assert.assertNull(foundIndexFile);
+        } else {
+            Assert.assertNotNull(foundIndexFile);
+            Assert.assertTrue(foundIndexFile.getName().endsWith(expectIndexSuffix));
+        }
+
+        final Path foundIndexPath = SamFiles.findIndex(dataFile.toPath());
+        if (expectIndexSuffix == null) {
+            Assert.assertNull(foundIndexPath);
+        } else {
+            Assert.assertNotNull(foundIndexPath);
+            Assert.assertTrue(foundIndexPath.getFileName().toString().endsWith(expectIndexSuffix));
+        }
+    }
+
+    @DataProvider(name = "filesAndIndicies")
+    public Object[][] getFilesAndIndicies() throws IOException {
+
+        final File REAL_INDEX_FILE = new File(BAM_FILE + ".bai"); //test regular file
+        final File SYMLINKED_BAM_WITH_SYMLINKED_INDEX = new File(TEST_DATA, "symlink_with_index.bam");
+
+        return new Object[][]{
+                {BAM_FILE, REAL_INDEX_FILE},
+                {SYMLINKED_BAM_WITH_SYMLINKED_INDEX, new File(SYMLINKED_BAM_WITH_SYMLINKED_INDEX + ".bai")},
+                {new File(TEST_DATA, "symlink_without_linked_index.bam"), REAL_INDEX_FILE.getCanonicalFile()},
+                {new File(TEST_DATA, "FileThatDoesntExist"), null}
+        };
+    }
+
+    @Test(dataProvider ="filesAndIndicies")
+    public void testIndexSymlinking(File bam, File expected_index) {
+        Assert.assertEquals(SamFiles.findIndex(bam), expected_index);
+        Assert.assertEquals(SamFiles.findIndex(bam.toPath()), expected_index == null ? null : expected_index.toPath());
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/SamFlagFieldTest.java b/src/test/java/htsjdk/samtools/SamFlagFieldTest.java
new file mode 100644
index 0000000..f09e636
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SamFlagFieldTest.java
@@ -0,0 +1,150 @@
+package htsjdk.samtools;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+/**
+ * @author nhomer
+ */
+public class SamFlagFieldTest {
+
+    @Test
+    public void testAllFlags() {
+        int flagAsInteger = 0;
+        for (final SAMFlag samFlag : SAMFlag.values()) {
+            flagAsInteger |= samFlag.flag;
+        }
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "urURpP12sSxd");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testAllFlagsReverseOrder() {
+        int flagAsInteger = 0;
+        for (final SAMFlag samFlag : SAMFlag.values()) {
+            flagAsInteger |= samFlag.flag;
+        }
+        final String flagAsString = new StringBuilder("urURpP12sSxd").reverse().toString();
+
+        Assert.assertEquals(flagAsInteger, SamFlagField.STRING.parse(flagAsString));
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testForwardStrandFlags() {
+        final int flagAsInteger = SamFlagField.STRING.parse("f");
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "mf");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testPairedForwardStrandFlags() {
+        final int flagAsInteger = SamFlagField.STRING.parse("mfMFp");
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "mfMFp");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testMappedFlags() {
+        final int flagAsInteger = SamFlagField.STRING.parse("m");
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "mf");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testPairedMappedFlags() {
+        final int flagAsInteger = SamFlagField.STRING.parse("pmM");
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "mfMFp");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testMateMappedNotOnFragmentFlags() {
+        final int flagAsInteger = SAMFlag.MATE_UNMAPPED.flag;
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "mfU");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testMateMappedOnlyOnPairsFlags() {
+        final int flagAsInteger = SAMFlag.MATE_UNMAPPED.flag | SAMFlag.READ_PAIRED.flag;
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "mfUFp");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testMateForwardStrandNotOnFragmentFlags() {
+        final int flagAsInteger = 0;
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "mf");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testMateForwardStrandOnlyOnPairsFlags() {
+        final int flagAsInteger = SAMFlag.READ_PAIRED.flag;
+        final String flagAsString = SamFlagField.STRING.format(flagAsInteger);
+
+        Assert.assertEquals(flagAsString, "mfMFp");
+        Assert.assertEquals(SamFlagField.STRING.parse(flagAsString), flagAsInteger);
+    }
+
+    @Test
+    public void testFlagTypesParsing() {
+        Assert.assertEquals(SamFlagField.of("0"), SamFlagField.DECIMAL);
+        Assert.assertEquals(SamFlagField.of("1234"), SamFlagField.DECIMAL);
+        Assert.assertEquals(SamFlagField.of("0xDOESNOTMATTER"), SamFlagField.HEXADECIMAL);
+        Assert.assertEquals(SamFlagField.of("0x"), SamFlagField.HEXADECIMAL);
+        Assert.assertEquals(SamFlagField.of("0[^x]DOESNOTMATTER"), SamFlagField.OCTAL);
+        Assert.assertEquals(SamFlagField.of("0a"), SamFlagField.OCTAL);
+        Assert.assertEquals(SamFlagField.of("DOESNOTMATTER"), SamFlagField.STRING);
+    }
+
+    @Test
+    public void testFlagTypesFormatting() {
+
+        Assert.assertEquals(SamFlagField.DECIMAL.format(1), "1");
+        Assert.assertEquals(SamFlagField.DECIMAL.format(124), "124");
+
+        Assert.assertEquals(SamFlagField.HEXADECIMAL.format(1), "0x1");
+        Assert.assertEquals(SamFlagField.HEXADECIMAL.format(9), "0x9");
+        Assert.assertEquals(SamFlagField.HEXADECIMAL.format(10), "0xa");
+        Assert.assertEquals(SamFlagField.HEXADECIMAL.format(16), "0x10");
+
+        Assert.assertEquals(SamFlagField.OCTAL.format(1), "01");
+        Assert.assertEquals(SamFlagField.OCTAL.format(124), "0174");
+
+        Assert.assertEquals(SamFlagField.STRING.format(337), "mrMFp1s");
+    }
+    
+    @Test(expectedExceptions = SAMFormatException.class)
+    public void testIllegalStringFlagCharacter(){
+        SamFlagField.STRING.parse("HELLO WORLD");
+    }
+
+    @Test(expectedExceptions = SAMFormatException.class)
+    public void testIllegalHexadecimalFlagCharacter(){
+        SamFlagField.HEXADECIMAL.parse("HELLO WORLD");
+    }
+
+    @Test(expectedExceptions = SAMFormatException.class)
+    public void testIllegalStringFlagCharacterExclamation(){
+        SamFlagField.STRING.parse("pmMr!F1s");
+    }
+}
\ No newline at end of file
diff --git a/src/tests/java/htsjdk/samtools/SamHeaderRecordComparatorTest.java b/src/test/java/htsjdk/samtools/SamHeaderRecordComparatorTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SamHeaderRecordComparatorTest.java
rename to src/test/java/htsjdk/samtools/SamHeaderRecordComparatorTest.java
diff --git a/src/test/java/htsjdk/samtools/SamIndexesTest.java b/src/test/java/htsjdk/samtools/SamIndexesTest.java
new file mode 100644
index 0000000..d13001f
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SamIndexesTest.java
@@ -0,0 +1,193 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.CRAIEntry;
+import htsjdk.samtools.cram.CRAIIndex;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.seekablestream.SeekableMemoryStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.IOUtil;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.zip.GZIPOutputStream;
+
+public class SamIndexesTest {
+
+    @Test
+    public void testEmptyBai() throws IOException {
+        final File baiFile = File.createTempFile("test", ".bai");
+        baiFile.deleteOnExit();
+        final FileOutputStream fos = new FileOutputStream(baiFile);
+        fos.write(SamIndexes.BAI.magic);
+        fos.close();
+
+
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        baos.write(SamIndexes.BAI.magic);
+        baos.close();
+
+        final InputStream inputStream = SamIndexes.asBaiStreamOrNull(new ByteArrayInputStream(baos.toByteArray()), null);
+        for (final byte b : SamIndexes.BAI.magic) {
+            Assert.assertEquals(inputStream.read(), 0xFF & b);
+        }
+    }
+
+    @Test(expectedExceptions = NullPointerException.class)
+    public void testCraiRequiresDictionary() throws IOException {
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        final GZIPOutputStream gos = new GZIPOutputStream(baos);
+        gos.close();
+
+        SamIndexes.asBaiStreamOrNull(new ByteArrayInputStream(baos.toByteArray()), null);
+    }
+
+    @Test
+    public void testEmptyCraiReadAsBai() throws IOException {
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        final GZIPOutputStream gos = new GZIPOutputStream(baos);
+        gos.close();
+
+        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
+        dictionary.addSequence(new SAMSequenceRecord("1", 100));
+        final InputStream inputStream = SamIndexes.asBaiStreamOrNull(new ByteArrayInputStream(baos.toByteArray()), dictionary);
+        for (final byte b : SamIndexes.BAI.magic) {
+            Assert.assertEquals(inputStream.read(), 0xFF & b);
+        }
+    }
+
+    @Test
+    public void testCraiInMemory() throws IOException {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        CRAMCRAIIndexer indexer = new CRAMCRAIIndexer(baos, header);
+        final CRAIEntry entry = new CRAIEntry();
+        entry.sequenceId = 0;
+        entry.alignmentStart = 1;
+        entry.alignmentSpan = 2;
+        entry.sliceOffset = 3;
+        entry.sliceSize = 4;
+        entry.containerStartOffset = 5;
+        indexer.addEntry(entry);
+        indexer.finish();
+        baos.close();
+
+        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
+        dictionary.addSequence(new SAMSequenceRecord("1", 100));
+
+        final InputStream baiStream = SamIndexes.asBaiStreamOrNull(new ByteArrayInputStream(baos.toByteArray()), dictionary);
+        Assert.assertNotNull(baiStream);
+
+        baos = new ByteArrayOutputStream();
+        IOUtil.copyStream(baiStream, baos);
+        final CachingBAMFileIndex bamIndex = new CachingBAMFileIndex(new SeekableMemoryStream(baos.toByteArray(), null), dictionary);
+        final BAMFileSpan span = bamIndex.getSpanOverlapping(entry.sequenceId, entry.alignmentStart, entry.alignmentStart);
+        Assert.assertNotNull(span);
+        final long[] coordinateArray = span.toCoordinateArray();
+        Assert.assertEquals(coordinateArray.length, 2);
+        Assert.assertEquals(coordinateArray[0] >> 16, entry.containerStartOffset);
+        Assert.assertEquals(coordinateArray[1] & 0xFFFF, 1);
+    }
+
+    @Test
+    public void testCraiFromFile() throws IOException {
+        final File file = File.createTempFile("test", ".crai");
+        file.deleteOnExit();
+        final FileOutputStream fos = new FileOutputStream(file);
+
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        CRAMCRAIIndexer indexer = new CRAMCRAIIndexer(fos, header);
+        final CRAIEntry entry = new CRAIEntry();
+        entry.sequenceId = 0;
+        entry.alignmentStart = 1;
+        entry.alignmentSpan = 2;
+        entry.sliceOffset = 3;
+        entry.sliceSize = 4;
+        entry.containerStartOffset = 5;
+        indexer.addEntry(entry);
+        indexer.finish();
+        fos.close();
+
+        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
+        dictionary.addSequence(new SAMSequenceRecord("1", 100));
+
+        final SeekableStream baiStream = SamIndexes.asBaiSeekableStreamOrNull(new SeekableFileStream(file), dictionary);
+        Assert.assertNotNull(baiStream);
+
+        final CachingBAMFileIndex bamIndex = new CachingBAMFileIndex(baiStream, dictionary);
+        final BAMFileSpan span = bamIndex.getSpanOverlapping(entry.sequenceId, entry.alignmentStart, entry.alignmentStart);
+        Assert.assertNotNull(span);
+        final long[] coordinateArray = span.toCoordinateArray();
+        Assert.assertEquals(coordinateArray.length, 2);
+        Assert.assertEquals(coordinateArray[0] >> 16, entry.containerStartOffset);
+        Assert.assertEquals(coordinateArray[1] & 0xFFFF, 1);
+    }
+
+    @Test(expectedExceptions = NullPointerException.class)
+    public void testOpenIndexFileAsBaiOrNull_NPE() throws IOException {
+        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
+        dictionary.addSequence(new SAMSequenceRecord("1", 100));
+        Assert.assertNull(SamIndexes.openIndexFileAsBaiOrNull(null, dictionary));
+    }
+
+    @Test
+    public void testOpenIndexFileAsBaiOrNull_ReturnsNull() throws IOException {
+        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
+        dictionary.addSequence(new SAMSequenceRecord("1", 100));
+        File file = File.createTempFile("test", ".notbai");
+        file.deleteOnExit();
+        Assert.assertNull(SamIndexes.openIndexFileAsBaiOrNull(file, dictionary));
+        file.delete();
+
+        file = File.createTempFile("test", ".notcrai");
+        file.deleteOnExit();
+        Assert.assertNull(SamIndexes.openIndexFileAsBaiOrNull(file, dictionary));
+        file.delete();
+    }
+
+    @Test
+    public void testOpenIndexUrlAsBaiOrNull() throws IOException {
+        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
+        dictionary.addSequence(new SAMSequenceRecord("1", 100));
+
+        final File file = File.createTempFile("test", ".crai");
+        file.deleteOnExit();
+        final FileOutputStream fos = new FileOutputStream(file);
+        SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        CRAMCRAIIndexer indexer = new CRAMCRAIIndexer(fos, header);
+        final CRAIEntry entry = new CRAIEntry();
+        entry.sequenceId = 0;
+        entry.alignmentStart = 1;
+        entry.alignmentSpan = 2;
+        entry.sliceOffset = 3;
+        entry.sliceSize = 4;
+        entry.containerStartOffset = 5;
+        indexer.addEntry(entry);
+        indexer.finish();
+        fos.close();
+
+        final InputStream baiStream = SamIndexes.openIndexUrlAsBaiOrNull(file.toURI().toURL(), dictionary);
+        Assert.assertNotNull(baiStream);
+
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        IOUtil.copyStream(baiStream, baos);
+        final CachingBAMFileIndex bamIndex = new CachingBAMFileIndex(new SeekableMemoryStream(baos.toByteArray(), null), dictionary);
+        final BAMFileSpan span = bamIndex.getSpanOverlapping(entry.sequenceId, entry.alignmentStart, entry.alignmentStart);
+        Assert.assertNotNull(span);
+        final long[] coordinateArray = span.toCoordinateArray();
+        Assert.assertEquals(coordinateArray.length, 2);
+        Assert.assertEquals(coordinateArray[0] >> 16, entry.containerStartOffset);
+        Assert.assertEquals(coordinateArray[1] & 0xFFFF, 1);
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/SamPairUtilTest.java b/src/test/java/htsjdk/samtools/SamPairUtilTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/SamPairUtilTest.java
rename to src/test/java/htsjdk/samtools/SamPairUtilTest.java
diff --git a/src/test/java/htsjdk/samtools/SamReaderFactoryTest.java b/src/test/java/htsjdk/samtools/SamReaderFactoryTest.java
new file mode 100644
index 0000000..74adbf1
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SamReaderFactoryTest.java
@@ -0,0 +1,437 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.seekablestream.ISeekableStreamFactory;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.seekablestream.SeekableHTTPStream;
+import htsjdk.samtools.seekablestream.SeekableStreamFactory;
+import htsjdk.samtools.util.Iterables;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.samtools.util.StopWatch;
+
+import java.nio.file.Path;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.function.BiFunction;
+
+public class SamReaderFactoryTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    private static final Log LOG = Log.getInstance(SamReaderFactoryTest.class);
+
+    @Test(dataProvider = "variousFormatReaderTestCases")
+    public void variousFormatReaderTest(final String inputFile) throws IOException {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        final SamReader reader = SamReaderFactory.makeDefault().open(input);
+        for (final SAMRecord ignored : reader) {
+        }
+        reader.close();
+    }
+
+    private int countRecordsInQueryInterval(final SamReader reader, final QueryInterval query) {
+        final SAMRecordIterator iter = reader.queryOverlapping(new QueryInterval[] { query });
+        int count = 0;
+        while (iter.hasNext()) {
+            iter.next();
+            count++;
+        }
+        iter.close();
+        return count;
+    }
+
+    private int countRecords(final SamReader reader) {
+        int count = 0;
+        try (final SAMRecordIterator iter = reader.iterator()) {
+            while (iter.hasNext()) {
+                iter.next();
+                count++;
+            }
+        }
+        return count;
+    }
+
+    // See https://github.com/samtools/htsjdk/issues/76
+    @Test(dataProvider = "queryIntervalIssue76TestCases")
+    public void queryIntervalIssue76(final String sequenceName, final int start, final int end, final int expectedCount) throws IOException {
+        final File input = new File(TEST_DATA_DIR, "issue76.bam");
+        final SamReader reader = SamReaderFactory.makeDefault().open(input);
+        final QueryInterval interval = new QueryInterval(reader.getFileHeader().getSequence(sequenceName).getSequenceIndex(), start, end);
+        Assert.assertEquals(countRecordsInQueryInterval(reader, interval), expectedCount);
+        reader.close();
+    }
+
+    @DataProvider(name = "queryIntervalIssue76TestCases")
+    public Object[][] queryIntervalIssue76TestCases() {
+        return new Object[][]{
+                {"1", 11966, 11966, 2},
+                {"1", 11966, 11967, 2},
+                {"1", 11967, 11967, 1}
+        };
+    }
+
+    @DataProvider(name = "variousFormatReaderTestCases")
+    public Object[][] variousFormatReaderTestCases() {
+        return new Object[][]{
+                {"block_compressed.sam.gz"},
+                {"uncompressed.sam"},
+                {"compressed.sam.gz"},
+                {"compressed.bam"},
+                {"unsorted.sam"}
+        };
+    }
+
+    // Tests for the SAMRecordFactory usage
+    class SAMRecordFactoryTester extends DefaultSAMRecordFactory {
+        int samRecordsCreated;
+        int bamRecordsCreated;
+
+        public SAMRecord createSAMRecord(final SAMFileHeader header) {
+            ++samRecordsCreated;
+            return super.createSAMRecord(header);
+        }
+
+        public BAMRecord createBAMRecord(final SAMFileHeader header, final int referenceSequenceIndex, final int alignmentStart, final short readNameLength, final short mappingQuality, final int indexingBin, final int cigarLen, final int flags, final int readLen, final int mateReferenceSequenceIndex, final int mateAlignmentStart, final int insertSize, final byte[] variableLengthBlock) {
+            ++bamRecordsCreated;
+            return super.createBAMRecord(header, referenceSequenceIndex, alignmentStart, readNameLength, mappingQuality, indexingBin, cigarLen, flags, readLen, mateReferenceSequenceIndex, mateAlignmentStart, insertSize, variableLengthBlock);
+        }
+    }
+
+    @Test(dataProvider = "variousFormatReaderTestCases")
+    public void samRecordFactoryTest(final String inputFile) throws IOException {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+
+        final SAMRecordFactoryTester recordFactory = new SAMRecordFactoryTester();
+        final SamReaderFactory readerFactory = SamReaderFactory.makeDefault().samRecordFactory(recordFactory);
+        final SamReader reader = readerFactory.open(input);
+
+        int i = 0;
+        for (final SAMRecord ignored : reader) {
+            ++i;
+        }
+        reader.close();
+
+        Assert.assertTrue(i > 0);
+        if (inputFile.endsWith(".sam") || inputFile.endsWith(".sam.gz")) Assert.assertEquals(recordFactory.samRecordsCreated, i);
+        else if (inputFile.endsWith(".bam")) Assert.assertEquals(recordFactory.bamRecordsCreated, i);
+    }
+
+    @Test(expectedExceptions=IllegalStateException.class)
+    public void samRecordFactoryNullHeaderBAMTest() {
+        final SAMRecordFactory recordFactory = new DefaultSAMRecordFactory();
+        recordFactory.createBAMRecord(
+                null, // null header
+                0,
+                0,
+                (short) 0,
+                (short) 0,
+                0,
+                0,
+                0,
+                0,
+                0,
+                0,
+                0,
+                null);
+    }
+
+
+    /**
+     * Unit tests for asserting all permutations of data and index sources read the same records and header.
+     */
+    final File localBam = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
+    final File localBamIndex = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai");
+
+    final URL bamUrl, bamIndexUrl;
+
+    {
+        try {
+            bamUrl = new URL("http://www.broadinstitute.org/~picard/testdata/index_test.bam");
+            bamIndexUrl = new URL("http://www.broadinstitute.org/~picard/testdata/index_test.bam.bai");
+        } catch (final MalformedURLException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @DataProvider
+    public Object[][] composeAllPermutationsOfSamInputResource() {
+        final List<SamInputResource> sources = new ArrayList<SamInputResource>();
+        for (final InputResource.Type dataType : InputResource.Type.values()) {
+            if (dataType.equals(InputResource.Type.SRA_ACCESSION))
+                continue;
+
+            sources.add(new SamInputResource(composeInputResourceForType(dataType, false)));
+            for (final InputResource.Type indexType : InputResource.Type.values()) {
+                if (indexType.equals(InputResource.Type.SRA_ACCESSION))
+                    continue;
+
+                sources.add(new SamInputResource(
+                        composeInputResourceForType(dataType, false),
+                        composeInputResourceForType(indexType, true)
+                ));
+            }
+        }
+        final Object[][] data = new Object[sources.size()][];
+        for (final SamInputResource source : sources) {
+            data[sources.indexOf(source)] = new Object[]{source};
+        }
+
+        return data;
+    }
+
+    private InputResource composeInputResourceForType(final InputResource.Type type, final boolean forIndex) {
+        final File f = forIndex ? localBamIndex : localBam;
+        final URL url = forIndex ? bamIndexUrl : bamUrl;
+        switch (type) {
+            case FILE:
+                return new FileInputResource(f);
+            case PATH:
+                return new PathInputResource(f.toPath());
+            case URL:
+                return new UrlInputResource(url);
+            case SEEKABLE_STREAM:
+                return new SeekableStreamInputResource(new SeekableHTTPStream(url));
+            case INPUT_STREAM:
+                try {
+                    return new InputStreamInputResource(new FileInputStream(f));
+                } catch (final FileNotFoundException e) {
+                    throw new RuntimeIOException(e);
+                }
+            default:
+                throw new IllegalStateException();
+        }
+    }
+
+    final Set<SAMFileHeader> observedHeaders = new HashSet<SAMFileHeader>();
+    final Set<List<SAMRecord>> observedRecordOrdering = new HashSet<List<SAMRecord>>();
+
+    @Test(dataProvider = "composeAllPermutationsOfSamInputResource")
+    public void exhaustInputResourcePermutation(final SamInputResource resource) throws IOException {
+        final SamReader reader = SamReaderFactory.makeDefault().open(resource);
+        LOG.info(String.format("Reading from %s ...", resource));
+        final List<SAMRecord> slurped = Iterables.slurp(reader);
+        final SAMFileHeader fileHeader = reader.getFileHeader();
+        reader.hasIndex();
+        reader.indexing().hasBrowseableIndex();
+        reader.close();
+        
+        /* Ensure all tests have read the same records in the same order or, if this is the first test, set it as the template. */
+        observedHeaders.add(fileHeader);
+        observedRecordOrdering.add(slurped);
+        Assert.assertEquals(observedHeaders.size(), 1, "read different headers than other testcases");
+        Assert.assertEquals(observedRecordOrdering.size(), 1, "read different records than other testcases");
+    }
+
+    @Test
+    public void openPath() throws IOException {
+        final Path path = localBam.toPath();
+        final List<SAMRecord> records;
+        final SAMFileHeader fileHeader;
+        try (final SamReader reader = SamReaderFactory.makeDefault().open(path)) {
+            LOG.info(String.format("Reading from %s ...", path));
+            records = Iterables.slurp(reader);
+            fileHeader = reader.getFileHeader();
+            reader.close();
+        }
+
+        try (final SamReader fileReader = SamReaderFactory.makeDefault().open(localBam)) {
+            final List<SAMRecord> expectedRecords = Iterables.slurp(fileReader);
+            final SAMFileHeader expectedFileHeader = fileReader.getFileHeader();
+            Assert.assertEquals(records, expectedRecords);
+            Assert.assertEquals(fileHeader, expectedFileHeader);
+        }
+    }
+
+
+    final Set<List<SAMRecord>> observedRecordOrdering1 = new HashSet<List<SAMRecord>>();
+    final Set<List<SAMRecord>> observedRecordOrdering3 = new HashSet<List<SAMRecord>>();
+    final Set<List<SAMRecord>> observedRecordOrdering20 = new HashSet<List<SAMRecord>>();
+
+    @Test(dataProvider = "composeAllPermutationsOfSamInputResource")
+    public void queryInputResourcePermutation(final SamInputResource resource) throws IOException {
+        final SamReader reader = SamReaderFactory.makeDefault().open(resource);
+        LOG.info(String.format("Query from %s ...", resource));
+        if (reader.hasIndex()) {
+            final StopWatch stopWatch = new StopWatch();
+            stopWatch.start();
+            final SAMRecordIterator q1 = reader.query("chr1", 500000, 100000000, true);
+            observedRecordOrdering1.add(Iterables.slurp(q1));
+            q1.close();
+            final SAMRecordIterator q20 = reader.query("chr20", 1, 1000000, true);
+            observedRecordOrdering20.add(Iterables.slurp(q20));
+            q20.close();
+            final SAMRecordIterator q3 = reader.query("chr3", 1, 10000000, true);
+            observedRecordOrdering3.add(Iterables.slurp(q3));
+            q3.close();
+            stopWatch.stop();
+            LOG.info(String.format("Finished queries in %sms", stopWatch.getElapsedTime()));
+
+            Assert.assertEquals(observedRecordOrdering1.size(), 1, "read different records for chromosome 1");
+            Assert.assertEquals(observedRecordOrdering20.size(), 1, "read different records for chromosome 20");
+            Assert.assertEquals(observedRecordOrdering3.size(), 1, "read different records for chromosome 3");
+        } else if (resource.indexMaybe() != null) {
+            LOG.warn("Resource has an index source, but is not indexed: " + resource);
+        } else {
+            LOG.info("Skipping query operation: no index.");
+        }
+        reader.close();
+    }
+    
+    @Test
+    public void customReaderFactoryTest() throws IOException {
+        try {
+          CustomReaderFactory.setInstance(new CustomReaderFactory(
+              "https://www.googleapis.com/genomics/v1beta/reads/," +
+              "htsjdk.samtools.SamReaderFactoryTest$TestReaderFactory"));
+          final SamReader reader = SamReaderFactory.makeDefault().open(
+              SamInputResource.of(
+              "https://www.googleapis.com/genomics/v1beta/reads/?uncompressed.sam"));
+          int i = 0;
+          for (@SuppressWarnings("unused") final SAMRecord ignored : reader) {
+              ++i;
+          }
+          reader.close();
+  
+          Assert.assertTrue(i > 0);
+        } finally {
+          CustomReaderFactory.resetToDefaultInstance();
+        }
+    }
+    
+    public static class TestReaderFactory implements CustomReaderFactory.ICustomReaderFactory {
+      @Override
+      public SamReader open(URL url) {
+        final File file = new File(TEST_DATA_DIR, url.getQuery());
+        LOG.info("Opening customr reader for " + file.toString());
+        return SamReaderFactory.makeDefault().open(file);
+      }
+    }
+    
+    @Test
+    public void inputResourceFromStringTest() throws IOException {
+      Assert.assertEquals(SamInputResource.of("http://test.url").data().type(),
+          InputResource.Type.URL);
+      Assert.assertEquals(SamInputResource.of("https://test.url").data().type(),
+          InputResource.Type.URL);
+      Assert.assertEquals(SamInputResource.of("ftp://test.url").data().type(),
+          InputResource.Type.URL);
+      Assert.assertEquals(SamInputResource.of("/a/b/c").data().type(),
+          InputResource.Type.FILE);
+    }
+
+    @Test
+    public void testCRAMReaderFromURL() throws IOException {
+        // get a CRAM reader with an index from a URL-backed resource
+        getCRAMReaderFromInputResource(
+                (cramURL, indexURL) -> { return SamInputResource.of(cramURL).index(indexURL);},
+                true,
+                3);
+    }
+
+    @Test
+    public void testCRAMReaderFromURLStream() throws IOException {
+        // get a CRAM reader with an index from a stream-backed resource created from a URL
+        getCRAMReaderFromInputResource(
+                (cramURL, indexURL) -> {
+                    try {
+                        ISeekableStreamFactory streamFactory = SeekableStreamFactory.getInstance();
+                        return SamInputResource
+                                .of(streamFactory.getStreamFor(cramURL))
+                                .index(streamFactory.getStreamFor(indexURL));
+                    }
+                    catch (IOException e) {
+                        throw new RuntimeIOException(e);
+                    }
+                },
+                true,
+                3);
+    }
+
+    @Test
+    public void testCRAMReaderFromURLNoIndexFile() throws IOException {
+        // get just a CRAM reader (no index) from an URL-backed resource
+        getCRAMReaderFromInputResource(
+                (cramURL, indexURL) -> { return SamInputResource.of(cramURL); },
+            false,
+            11);
+    }
+
+    @Test(expectedExceptions=RuntimeIOException.class)
+    public void testCRAMReaderFromURLBadIndexFile() throws IOException {
+        // deliberately specify a bad index file to ensure we get an IOException
+        getCRAMReaderFromInputResource(
+                (cramURL, indexURL) -> { return SamInputResource.of(cramURL).index(new File("nonexistent.bai")); },
+            true,
+            3);
+    }
+
+    private void getCRAMReaderFromInputResource(
+            final BiFunction<URL, URL, SamInputResource> getInputResource,
+            final boolean hasIndex,
+            final int expectedCount) throws IOException {
+        final String cramFilePath = new File(TEST_DATA_DIR, "cram_with_bai_index.cram").getAbsolutePath();
+        final String cramIndexPath = new File(TEST_DATA_DIR, "cram_with_bai_index.cram.bai").getAbsolutePath();
+        final URL cramURL = new URL("file://" + cramFilePath);
+        final URL indexURL = new URL("file://" + cramIndexPath);
+
+        final SamReaderFactory factory = SamReaderFactory.makeDefault()
+                .referenceSource(new ReferenceSource(new File(TEST_DATA_DIR, "hg19mini.fasta")))
+                .validationStringency(ValidationStringency.SILENT);
+        final SamReader reader = factory.open(getInputResource.apply(cramURL, indexURL));
+
+        int count = hasIndex ?
+            countRecordsInQueryInterval(reader, new QueryInterval(1, 10, 1000)) :
+            countRecords(reader);
+        Assert.assertEquals(count, expectedCount);
+    }
+
+    @Test
+    public void testSamReaderFromSeekableStream() throws IOException {
+        // even though a SAM isn't indexable, make sure we can open one
+        // using a seekable stream
+        final File samFile = new File(TEST_DATA_DIR, "unsorted.sam");
+        final SamReaderFactory factory = SamReaderFactory.makeDefault()
+                .validationStringency(ValidationStringency.SILENT);
+        final SamReader reader = factory.open(
+                SamInputResource.of(new SeekableFileStream(samFile)));
+        Assert.assertEquals(countRecords(reader), 10);
+    }
+
+
+    @Test
+    public void testSamReaderFromURL() throws IOException {
+        final String samFilePath = new File(TEST_DATA_DIR, "unsorted.sam").getAbsolutePath();
+        final URL samURL = new URL("file://" + samFilePath);
+        final SamReaderFactory factory = SamReaderFactory.makeDefault()
+                .validationStringency(ValidationStringency.SILENT);
+        final SamReader reader = factory.open(SamInputResource.of(samURL));
+        Assert.assertEquals(countRecords(reader), 10);
+    }
+
+    @Test(expectedExceptions=SAMFormatException.class)
+    public void testSamReaderFromMalformedSeekableStream() throws IOException {
+        // use a bogus (.bai file) to force SamReaderFactory to fall through to the
+        // fallback code that assumes a SAM File when it can't determine the
+        // format of the input, to ensure that it results in a SAMFormatException
+        final File samFile = new File(TEST_DATA_DIR, "cram_with_bai_index.cram.bai");
+        final SamReaderFactory factory = SamReaderFactory.makeDefault()
+                .validationStringency(ValidationStringency.SILENT);
+        final SamReader reader = factory.open(
+                SamInputResource.of(new SeekableFileStream(samFile)));
+        countRecords(reader);
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/SamReaderSortTest.java b/src/test/java/htsjdk/samtools/SamReaderSortTest.java
new file mode 100755
index 0000000..cc496db
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SamReaderSortTest.java
@@ -0,0 +1,102 @@
+package htsjdk.samtools;
+
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * Tests for the implementation of SAMRecordIterator in SAMFileReader
+ *
+ * @author ktibbett at broadinstitute.org
+ */
+public class SamReaderSortTest {
+
+    public static final String COORDINATE_SORTED_FILE = "src/test/resources/htsjdk/samtools/coordinate_sorted.sam";
+    public static final String QUERYNAME_SORTED_FILE = "src/test/resources/htsjdk/samtools/queryname_sorted.sam";
+    public static final String QUERYNAME_SORTED_NO_HEADER_SORT = "src/test/resources/htsjdk/samtools/unsorted.sam";
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testSortsDisagree() throws Exception {
+        SAMRecordIterator it = SamReaderFactory.makeDefault().open(new File(COORDINATE_SORTED_FILE)).iterator();
+        try {
+            it.assertSorted(SAMFileHeader.SortOrder.queryname);
+            while (it.hasNext()) {
+                it.next();
+            }
+            Assert.fail("Queryname assertion should have failed on coordinate sorted file but didn't");
+        } finally {
+            it.close();
+        }
+    }
+
+    @Test(dataProvider = "validSorts")
+    public void testSortAssertionValid(String file, SAMFileHeader.SortOrder order) {
+        SAMRecordIterator it = SamReaderFactory.makeDefault().open(new File(file)).iterator();
+        try {
+            it.assertSorted(order);
+            while (it.hasNext()) {
+                it.next();
+            }
+        } finally {
+            it.close();
+        }
+    }
+
+    @DataProvider(name = "validSorts")
+    public Object[][] getValidSorts() {
+        return new Object[][]{
+                {COORDINATE_SORTED_FILE, SAMFileHeader.SortOrder.coordinate},
+                {QUERYNAME_SORTED_FILE, SAMFileHeader.SortOrder.queryname},
+                {QUERYNAME_SORTED_NO_HEADER_SORT, SAMFileHeader.SortOrder.queryname},
+                {COORDINATE_SORTED_FILE, SAMFileHeader.SortOrder.unsorted}
+        };
+    }
+
+
+    @Test(dataProvider = "invalidSorts", expectedExceptions = IllegalStateException.class)
+    public void testSortAssertionFails(String file, SAMFileHeader.SortOrder order) throws Exception {
+        SAMRecordIterator it = SamReaderFactory.makeDefault().open(new File(file)).iterator();
+        try {
+            it.assertSorted(order);
+            while (it.hasNext()) {
+                it.next();
+            }
+            Assert.fail("Iterated successfully over " + file + " with invalid sort assertion: " + order.name());
+        } finally {
+            it.close();
+        }
+    }
+
+    @DataProvider(name = "invalidSorts")
+    public Object[][] getInvalidSorts() {
+        return new Object[][]{
+                {QUERYNAME_SORTED_NO_HEADER_SORT, SAMFileHeader.SortOrder.coordinate}
+        };
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/SamSpecIntTest.java b/src/test/java/htsjdk/samtools/SamSpecIntTest.java
new file mode 100644
index 0000000..8305065
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SamSpecIntTest.java
@@ -0,0 +1,97 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2014 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CloserUtil;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class SamSpecIntTest {
+    private static final File SAM_INPUT = new File("src/test/resources/htsjdk/samtools/inttest.sam");
+    private static final File BAM_INPUT = new File("src/test/resources/htsjdk/samtools/inttest.bam");
+
+    @Test
+    public void testSamIntegers() throws IOException {
+        final List<String> errorMessages = new ArrayList<String>();
+        final SamReader samReader = SamReaderFactory.makeDefault().open(SAM_INPUT);
+        final File bamOutput = File.createTempFile("test", ".bam");
+        final File samOutput = File.createTempFile("test", ".sam");
+        final SAMFileWriter samWriter = new SAMFileWriterFactory().makeWriter(samReader.getFileHeader(), true, samOutput, null);
+        final SAMFileWriter bamWriter = new SAMFileWriterFactory().makeWriter(samReader.getFileHeader(), true, bamOutput, null);
+
+        final SAMRecordIterator iterator = samReader.iterator();
+        while (iterator.hasNext()) {
+            try {
+                final SAMRecord rec = iterator.next();
+                samWriter.addAlignment(rec);
+                bamWriter.addAlignment(rec);
+            } catch (final Throwable e) {
+                System.out.println(e.getMessage());
+                errorMessages.add(e.getMessage());
+            }
+        }
+
+        CloserUtil.close(samReader);
+        samWriter.close();
+        bamWriter.close();
+        Assert.assertEquals(errorMessages.size(), 0);
+        bamOutput.deleteOnExit();
+        samOutput.deleteOnExit();
+    }
+
+    @Test
+    public void testBamIntegers() throws IOException {
+        final List<String> errorMessages = new ArrayList<String>();
+        final SamReader bamReader = SamReaderFactory.makeDefault().open(BAM_INPUT);
+        final File bamOutput = File.createTempFile("test", ".bam");
+        final File samOutput = File.createTempFile("test", ".sam");
+        final SAMFileWriter samWriter = new SAMFileWriterFactory().makeWriter(bamReader.getFileHeader(), true, samOutput, null);
+        final SAMFileWriter bamWriter = new SAMFileWriterFactory().makeWriter(bamReader.getFileHeader(), true, bamOutput, null);
+        final SAMRecordIterator iterator = bamReader.iterator();
+        while (iterator.hasNext()) {
+            try {
+                final SAMRecord rec = iterator.next();
+                samWriter.addAlignment(rec);
+                bamWriter.addAlignment(rec);
+            } catch (final Throwable e) {
+                System.out.println(e.getMessage());
+                errorMessages.add(e.getMessage());
+            }
+        }
+
+        CloserUtil.close(bamReader);
+        samWriter.close();
+        bamWriter.close();
+        Assert.assertEquals(errorMessages.size(), 0);
+        bamOutput.deleteOnExit();
+        samOutput.deleteOnExit();
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/SamStreamsTest.java b/src/test/java/htsjdk/samtools/SamStreamsTest.java
new file mode 100644
index 0000000..c92d6db
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SamStreamsTest.java
@@ -0,0 +1,124 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.seekablestream.SeekableStreamFactory;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.net.URL;
+
+public class SamStreamsTest {
+
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    @Test(dataProvider = "makeData")
+    public void testDataFormat(final String inputFile, final boolean isGzippedSAMFile, final boolean isBAMFile, final boolean isCRAMFile) throws Exception {
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        try(final InputStream fis = new BufferedInputStream(new FileInputStream(input))) { //must be buffered or the isGzippedSAMFile will blow up
+            Assert.assertEquals(SamStreams.isGzippedSAMFile(fis), isGzippedSAMFile, "isGzippedSAMFile:" + inputFile);
+            Assert.assertEquals(SamStreams.isBAMFile(fis), isBAMFile,   "isBAMFile:" + inputFile);
+            Assert.assertEquals(SamStreams.isCRAMFile(fis), isCRAMFile, "isCRAMFile:" + inputFile);
+        }
+    }
+
+    @DataProvider(name = "makeData")
+    public Object[][] makeData() {
+        final Object[][] scenarios = new Object[][]{
+                //isGzippedSAMFile isBAMFile isCRAMFile
+                {"block_compressed.sam.gz", true,  false, false},
+                {"uncompressed.sam",        false, false, false},
+                {"compressed.sam.gz",       true,  false, false},
+                {"compressed.bam",          true,  true,  false}, //this is slightly weird (responding true to isGzippedSAMFile)
+                {"cram_query_sorted.cram",  false, false, true},
+        };
+        return scenarios;
+    }
+
+    @DataProvider(name = "sourceLikeCram")
+    public Object[][] sourceLikeCramData() {
+        return new Object[][] {
+                {"cram_with_bai_index.cram", true, true },
+                {"compressed.bam", true, false },
+                {"unsorted.sam", true, false },
+                // fails due to https://github.com/samtools/htsjdk/issues/618
+                //{"ftp://ftp.broadinstitute.org/dummy.cram", false, true}
+                {"http://www.broadinstitute.org/dummy.cram", false, true},
+                {"https://www.broadinstitute.org/dummy.cram", false, true},
+                {"http://www.broadinstitute.org/dummy.cram?alt=media", false, true},
+                {"http://www.broadinstitute.org/test?file=my.cram", false, true},
+                {"http://www.broadinstitute.org/test?foo=bar,file=my.cram", false, true},
+                {"http://www.broadinstitute.org/test?file=my.bam", false, false}
+        };
+    }
+
+    @Test(dataProvider = "sourceLikeCram")
+    public void sourceLikeCram(
+            final String resourceName,
+            final boolean isFile,
+            final boolean expected) throws IOException
+    {
+        SeekableStream strm = isFile ?
+            new SeekableFileStream(new File(TEST_DATA_DIR, resourceName)) :
+            SeekableStreamFactory.getInstance().getStreamFor(new URL(resourceName));
+        Assert.assertEquals(SamStreams.sourceLikeCram(strm), expected);
+    }
+
+
+    @DataProvider(name = "sourceLikeBam")
+    public Object[][] sourceLikeBamData() {
+        return new Object[][] {
+                {"cram_with_bai_index.cram", true, false },
+                {"compressed.bam", true, true },
+                {"unsorted.sam", true, false },
+                // fails due to a combination of https://github.com/samtools/htsjdk/issues/619 and
+                // https://github.com/samtools/htsjdk/issues/618
+                //{"ftp://ftp.broadinstitute.org/dummy.cram", false, false},
+                {"ftp://ftp.broadinstitute.org/dummy.bam", false, true},
+                {"http://www.broadinstitute.org/dummy.bam", false, true},
+                {"https://www.broadinstitute.org/dummy.bam", false, true},
+                {"http://www.broadinstitute.org/dummy.bam?alt=media", false, true},
+                {"http://www.broadinstitute.org/test?file=my.bam", false, true},
+                {"http://www.broadinstitute.org/test?foo=bar,file=my.bam", false, true},
+                {"http://www.broadinstitute.org/test?file=my.cram", false, false}
+        };
+    }
+
+    @Test(dataProvider = "sourceLikeBam")
+    public void sourceLikeBam(
+            final String resourceName,
+            final boolean isFile,
+            final boolean expected) throws IOException
+    {
+        SeekableStream strm = isFile ?
+                new SeekableFileStream(new File(TEST_DATA_DIR, resourceName)) :
+                SeekableStreamFactory.getInstance().getStreamFor(new URL(resourceName));
+        Assert.assertEquals(SamStreams.sourceLikeBam(strm), expected);
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java b/src/test/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java
new file mode 100644
index 0000000..2c3a95c
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java
@@ -0,0 +1,98 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.samtools.util.CloserUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * Test new functionality that truncates sequence names at first whitespace in order to deal
+ * with older BAMs that had spaces in sequence names.
+ *
+ * @author alecw at broadinstitute.org
+ */
+public class SequenceNameTruncationAndValidationTest {
+    private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    @Test(expectedExceptions = {SAMException.class}, dataProvider = "badSequenceNames")
+    public void testSequenceRecordThrowsWhenInvalid(final String sequenceName) {
+        new SAMSequenceRecord(sequenceName, 123);
+        Assert.fail("Should not reach here.");
+    }
+
+    @DataProvider(name = "badSequenceNames")
+    public Object[][] badSequenceNames() {
+        return new Object[][]{
+                {" "},
+                {"\t"},
+                {"\n"},
+                {"="},
+                {"Hi, Mom!"}
+        };
+    }
+
+    @Test(dataProvider = "goodSequenceNames")
+    public void testSequenceRecordPositiveTest(final String sequenceName) {
+        new SAMSequenceRecord(sequenceName, 123);
+    }
+
+    @DataProvider(name = "goodSequenceNames")
+    public Object[][] goodSequenceNames() {
+        return new Object[][]{
+                {"Hi, at Mom!"}
+        };
+    }
+
+    @Test(dataProvider = "samFilesWithSpaceInSequenceName")
+    public void testSamSequenceTruncation(final String filename) {
+        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, filename));
+        for (final SAMSequenceRecord sequence : reader.getFileHeader().getSequenceDictionary().getSequences()) {
+            Assert.assertFalse(sequence.getSequenceName().contains(" "), sequence.getSequenceName());
+        }
+        for (final SAMRecord rec : reader) {
+            Assert.assertFalse(rec.getReferenceName().contains(" "));
+        }
+        CloserUtil.close(reader);
+    }
+
+    @DataProvider(name = "samFilesWithSpaceInSequenceName")
+    public Object[][] samFilesWithSpaceInSequenceName() {
+        return new Object[][]{
+                {"sequenceWithSpace.sam"},
+                {"sequenceWithSpace.bam"}
+        };
+    }
+
+    @Test(expectedExceptions = {SAMFormatException.class})
+    public void testBadRname() {
+        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, "readWithBadRname.sam"));
+        for (final SAMRecord rec : reader) {
+        }
+        Assert.fail("Should not reach here.");
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/ValidateSamFileTest.java b/src/test/java/htsjdk/samtools/ValidateSamFileTest.java
new file mode 100644
index 0000000..33a34da
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/ValidateSamFileTest.java
@@ -0,0 +1,471 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.samtools.BamIndexValidator.IndexValidationStringency;
+import htsjdk.samtools.metrics.MetricBase;
+import htsjdk.samtools.metrics.MetricsFile;
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.Histogram;
+import htsjdk.samtools.util.StringUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.LineNumberReader;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.Arrays;
+import java.util.Iterator;
+
+/**
+ * Tests almost all error conditions detected by the sam file validator. The
+ * conditions not tested are proactively prevented by sam generation code.
+ *
+ * @author Doug Voet
+ */
+public class ValidateSamFileTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/ValidateSamFileTest");
+
+    @Test
+    public void testValidSamFile() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "valid.sam"));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertTrue(results.isEmpty());
+    }
+
+    @Test
+    public void testSamFileVersion1pt5() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "test_samfile_version_1pt5.bam"));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertTrue(results.isEmpty());
+    }
+
+    @Test
+    public void testSortOrder() throws IOException {
+        Histogram<String> results = executeValidation(SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
+                .open(new File(TEST_DATA_DIR, "invalid_coord_sort_order.sam")), null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.RECORD_OUT_OF_ORDER.getHistogramString()).getValue(), 1.0);
+        results = executeValidation(SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
+                .open(new File(TEST_DATA_DIR, "invalid_queryname_sort_order.sam")), null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.RECORD_OUT_OF_ORDER.getHistogramString()).getValue(), 5.0);
+    }
+
+    @Test
+    public void testVerbose() throws IOException {
+        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
+
+        for (int i = 0; i < 20; i++) {
+            samBuilder.addFrag(String.valueOf(i), 1, i, false);
+        }
+        for (final SAMRecord record : samBuilder) {
+            record.setProperPairFlag(true);
+        }
+
+        final StringWriter results = new StringWriter();
+        final SamFileValidator validator = new SamFileValidator(new PrintWriter(results), 8000);
+        validator.setVerbose(true, 10);
+        validator.validateSamFileVerbose(samBuilder.getSamReader(), null);
+
+        final int lineCount = results.toString().split("\n").length;
+        Assert.assertEquals(lineCount, 11);
+    }
+
+    @Test
+    public void testUnpairedRecords() throws IOException {
+        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
+
+        for (int i = 0; i < 6; i++) {
+            samBuilder.addFrag(String.valueOf(i), i, i, false);
+        }
+        final Iterator<SAMRecord> records = samBuilder.iterator();
+        records.next().setProperPairFlag(true);
+        records.next().setMateUnmappedFlag(true);
+        records.next().setMateNegativeStrandFlag(true);
+        records.next().setFirstOfPairFlag(true);
+        records.next().setSecondOfPairFlag(true);
+        records.next().setMateReferenceIndex(1);
+
+        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
+
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_PROPER_PAIR.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_MATE_UNMAPPED.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_MATE_NEG_STRAND.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_FIRST_OF_PAIR.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_SECOND_OF_PAIR.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_MATE_REF_INDEX.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test
+    public void testPairedRecords() throws IOException {
+        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
+
+        for (int i = 0; i < 5; i++) {
+            samBuilder.addPair(String.valueOf(i), i, i, i + 100);
+        }
+        final Iterator<SAMRecord> records = samBuilder.iterator();
+        records.next().setMateReferenceName("*");
+        records.next().setMateAlignmentStart(Integer.MAX_VALUE);
+        records.next().setMateAlignmentStart(records.next().getAlignmentStart() + 1);
+        records.next().setMateNegativeStrandFlag(!records.next().getReadNegativeStrandFlag());
+        records.next().setMateReferenceIndex(records.next().getReferenceIndex() + 1);
+        records.next().setMateUnmappedFlag(!records.next().getReadUnmappedFlag());
+
+
+        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
+
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_ALIGNMENT_START.getHistogramString()).getValue(), 3.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_MATE_UNMAPPED.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_FLAG_MATE_NEG_STRAND.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_FLAG_MATE_UNMAPPED.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_MATE_ALIGNMENT_START.getHistogramString()).getValue(), 2.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_MATE_REF_INDEX.getHistogramString()).getValue(), 2.0);
+    }
+
+    @Test(dataProvider = "missingMateTestCases")
+    public void testMissingMate(final SAMFileHeader.SortOrder sortOrder) throws IOException {
+        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder(true, sortOrder);
+
+        samBuilder.addPair(String.valueOf(1), 1, 1, 101);
+        final Iterator<SAMRecord> records = samBuilder.iterator();
+        records.next();
+        records.remove();
+        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
+
+        Assert.assertEquals(results.get(SAMValidationError.Type.MATE_NOT_FOUND.getHistogramString()).getValue(), 1.0);
+    }
+
+    @DataProvider(name = "missingMateTestCases")
+    public Object[][] missingMateTestCases() {
+        return new Object[][]{
+                {SAMFileHeader.SortOrder.coordinate},
+                {SAMFileHeader.SortOrder.queryname},
+                {SAMFileHeader.SortOrder.unsorted},
+        };
+    }
+
+    @Test
+    public void testUnmappedRecords() throws IOException {
+        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
+
+        for (int i = 0; i < 4; i++) {
+            samBuilder.addUnmappedFragment(String.valueOf(i));
+        }
+        final Iterator<SAMRecord> records = samBuilder.iterator();
+        records.next().setReadNegativeStrandFlag(true);
+        records.next().setNotPrimaryAlignmentFlag(true);
+        records.next().setMappingQuality(10);
+        records.next().setCigarString("36M");
+
+        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
+
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_NOT_PRIM_ALIGNMENT.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_MAPPING_QUALITY.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test
+    public void testMappedRecords() throws IOException {
+        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
+
+        for (int i = 0; i < 2; i++) {
+            samBuilder.addFrag(String.valueOf(i), i, i, false);
+        }
+        final Iterator<SAMRecord> records = samBuilder.iterator();
+        records.next().setCigarString("25M3S25M");
+        records.next().setReferenceName("*");
+
+        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
+
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_CIGAR.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_READ_UNMAPPED.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISSING_TAG_NM.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test
+    public void testNmFlagValidation() throws IOException {
+        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
+
+        for (int i = 0; i < 3; i++) {
+            samBuilder.addFrag(String.valueOf(i), i, i + 1, false);
+        }
+        final Iterator<SAMRecord> records = samBuilder.iterator();
+        records.next().setAttribute(ReservedTagConstants.NM, 4);
+
+        // PIC-215: Confirm correct NM value when there is an insertion and a deletion.
+        final SAMRecord recordWithInsert = records.next();
+        final byte[] sequence = recordWithInsert.getReadBases();
+        Arrays.fill(sequence, (byte) 'A');
+        recordWithInsert.setReadBases(sequence);
+        recordWithInsert.setCigarString("1D" + Integer.toString(sequence.length - 1) + "M1I");
+        recordWithInsert.setAttribute(ReservedTagConstants.NM, 2);
+
+        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), new ReferenceSequenceFile() {
+            private int index = 0;
+
+            public SAMSequenceDictionary getSequenceDictionary() {
+                return null;
+            }
+
+            public ReferenceSequence nextSequence() {
+                final byte[] bases = new byte[10000];
+                Arrays.fill(bases, (byte) 'A');
+                return new ReferenceSequence("foo", index++, bases);
+            }
+
+            public void reset() {
+                this.index = 0;
+            }
+
+            public boolean isIndexed() { return false; }
+
+            public ReferenceSequence getSequence(final String contig) {
+                throw new UnsupportedOperationException();
+            }
+
+            public ReferenceSequence getSubsequenceAt(final String contig, final long start, final long stop) {
+                throw new UnsupportedOperationException();
+            }
+
+            @Override
+            public void close() throws IOException {
+                //no-op
+            }
+        }, IndexValidationStringency.EXHAUSTIVE);
+
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_TAG_NM.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISSING_TAG_NM.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test(dataProvider = "testMateCigarScenarios")
+    public void testMateCigarScenarios(final String scenario, final String inputFile, final SAMValidationError.Type expectedError)
+            throws Exception {
+        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, inputFile));
+        final Histogram<String> results = executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertNotNull(results.get(expectedError.getHistogramString()));
+        Assert.assertEquals(results.get(expectedError.getHistogramString()).getValue(), 1.0);
+    }
+
+
+    @DataProvider(name = "testMateCigarScenarios")
+    public Object[][] testMateCigarScenarios() {
+        return new Object[][]{
+                {"invalid mate cigar", "invalid_mate_cigar_string.sam", SAMValidationError.Type.MISMATCH_MATE_CIGAR_STRING},
+                {"inappropriate mate cigar", "inappropriate_mate_cigar_string.sam", SAMValidationError.Type.MATE_CIGAR_STRING_INVALID_PRESENCE}
+        };
+    }
+
+    @Test(dataProvider = "testTruncatedScenarios")
+    public void testTruncated(final String scenario, final String inputFile, final SAMValidationError.Type expectedError)
+            throws Exception {
+        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, inputFile));
+        final Histogram<String> results = executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertNotNull(results.get(expectedError.getHistogramString()));
+        Assert.assertEquals(results.get(expectedError.getHistogramString()).getValue(), 1.0);
+    }
+
+    @DataProvider(name = "testTruncatedScenarios")
+    public Object[][] testTruncatedScenarios() {
+        return new Object[][]{
+                {"truncated bam", "truncated.bam", SAMValidationError.Type.TRUNCATED_FILE},
+                {"truncated quals", "truncated_quals.sam", SAMValidationError.Type.MISMATCH_READ_LENGTH_AND_QUALS_LENGTH},
+                // TODO: Because validation is turned off when parsing, this error is not detectable currently by validator.
+                //{"truncated tag", "truncated_tag.sam", SAMValidationError.Type.TRUNCATED_FILE},
+                // TODO: Currently, this is not considered an error.  Should it be?
+                //{"hanging tab", "hanging_tab.sam", SAMValidationError.Type.TRUNCATED_FILE},
+        };
+    }
+
+    @Test(expectedExceptions = SAMException.class, dataProvider = "testFatalParsingErrors")
+    public void testFatalParsingErrors(final String scenario, final String inputFile) throws Exception {
+        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, inputFile));
+        executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.fail("Exception should have been thrown.");
+    }
+
+    @DataProvider(name = "testFatalParsingErrors")
+    public Object[][] testFatalParsingErrorScenarios() {
+        return new Object[][]{
+                {"missing fields", "missing_fields.sam"},
+                {"zero length read", "zero_length_read.sam"}
+        };
+    }
+
+    @Test
+    public void testHeaderVersionValidation() throws Exception {
+        final String header = "@HD	VN:Hi,Mom!	SO:queryname";
+        final InputStream strm = new ByteArrayInputStream(StringUtil.stringToBytes(header));
+        final SamReader samReader = SamReaderFactory.makeDefault().open(SamInputResource.of(strm));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_VERSION_NUMBER.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test(enabled = false, description = "File is actually valid for Standard quality scores so this test fails with an NPE.")
+    public void testQualityFormatValidation() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().open(new File("./src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/illumina-as-standard.bam"));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        final Histogram.Bin<String> bin = results.get(SAMValidationError.Type.INVALID_QUALITY_FORMAT.getHistogramString());
+        final double value = bin.getValue();
+        Assert.assertEquals(value, 1.0);
+    }
+
+    @Test
+    public void testCigarOffEndOfReferenceValidation() throws Exception {
+        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
+        samBuilder.addFrag(String.valueOf(0), 0, 1, false);
+        final int contigLength = samBuilder.getHeader().getSequence(0).getSequenceLength();
+        // Should hang off the end.
+        samBuilder.addFrag(String.valueOf(1), 0, contigLength - 1, false);
+        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertNotNull(results.get(SAMValidationError.Type.CIGAR_MAPS_OFF_REFERENCE.getHistogramString()));
+        Assert.assertEquals(results.get(SAMValidationError.Type.CIGAR_MAPS_OFF_REFERENCE.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test(expectedExceptions = SAMFormatException.class)
+    public void testConflictingTags() throws Exception {
+        final String header = "@HD	VN:1.0	SO:queryname	SO:coordinate";
+        final InputStream strm = new ByteArrayInputStream(StringUtil.stringToBytes(header));
+        final SamReader reader = SamReaderFactory.makeDefault().open(SamInputResource.of(strm));
+        Assert.fail("Exception should have been thrown.");
+    }
+
+    @Test
+    public void testRedundantTags() throws Exception {
+        final String header = "@HD	VN:1.0	SO:coordinate	SO:coordinate";
+        final InputStream strm = new ByteArrayInputStream(StringUtil.stringToBytes(header));
+        final SamReader samReader = SamReaderFactory.makeDefault().open(SamInputResource.of(strm));
+        Assert.assertEquals(SAMFileHeader.SortOrder.coordinate, samReader.getFileHeader().getSortOrder());
+        CloserUtil.close(samReader);
+    }
+
+    @Test
+    public void testHeaderValidation() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
+                .open(new File(TEST_DATA_DIR, "buggyHeader.sam"));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.UNRECOGNIZED_HEADER_TYPE.getHistogramString()).getValue(), 3.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.HEADER_TAG_MULTIPLY_DEFINED.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test
+    public void testPlatformMissing() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
+                .open((new File(TEST_DATA_DIR, "missing_platform_unit.sam")));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISSING_PLATFORM_VALUE.getHistogramString()).getValue(), 1.0);
+    }
+    
+    @Test
+    public void testPlatformInvalid() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
+                .open((new File(TEST_DATA_DIR, "invalid_platform_unit.sam")));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_PLATFORM_VALUE.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test
+    public void testDuplicateRGIDs() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
+                .open((new File(TEST_DATA_DIR, "duplicate_rg.sam")));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.DUPLICATE_READ_GROUP_ID.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test
+    public void testIndexFileValidation() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
+                .enable(SamReaderFactory.Option.CACHE_FILE_BASED_INDEXES).open((new File(TEST_DATA_DIR, "bad_index.bam")));
+
+        Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_INDEX_FILE_POINTER.getHistogramString()).getValue(), 1.0);
+
+        results = executeValidation(samReader, null, IndexValidationStringency.LESS_EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_INDEX_FILE_POINTER.getHistogramString()).getValue(), 1.0);
+
+    }
+
+    private Histogram<String> executeValidation(final SamReader samReader, final ReferenceSequenceFile reference, final IndexValidationStringency stringency) throws IOException {
+        final File outFile = File.createTempFile("validation", ".txt");
+        outFile.deleteOnExit();
+        final PrintWriter out = new PrintWriter(outFile);
+        new SamFileValidator(out, 8000).setIndexValidationStringency(stringency).validateSamFileSummary(samReader, reference);
+        final LineNumberReader reader = new LineNumberReader(new FileReader(outFile));
+        if (reader.readLine().equals("No errors found")) {
+            return new Histogram<String>();
+        }
+        final MetricsFile<MetricBase, String> outputFile = new MetricsFile<MetricBase, String>();
+        outputFile.read(new FileReader(outFile));
+        Assert.assertNotNull(outputFile.getHistogram());
+        return outputFile.getHistogram();
+    }
+    
+    private void testHeaderVersion(final String version, final boolean expectValid) throws Exception {
+        final File samFile = File.createTempFile("validateHeader.", ".sam");
+        samFile.deleteOnExit();
+        final PrintWriter pw = new PrintWriter(samFile);
+        pw.println("@HD\tVN:" + version);
+        pw.close();
+        final SamReader reader = SamReaderFactory.makeDefault().open(samFile);
+        final Histogram<String> results = executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
+        if (expectValid) Assert.assertNull(results.get(SAMValidationError.Type.INVALID_VERSION_NUMBER.getHistogramString()));
+        else {
+            Assert.assertNotNull(results.get(SAMValidationError.Type.INVALID_VERSION_NUMBER.getHistogramString()));
+            Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_VERSION_NUMBER.getHistogramString()).getValue(), 1.0);
+        }
+    }
+
+    @Test
+    public void testHeaderVersions() throws Exception {
+        // Test the acceptable versions
+        for (final String version : SAMFileHeader.ACCEPTABLE_VERSIONS) {
+            testHeaderVersion(version, true);
+        }
+
+        // Test an unacceptable version
+        testHeaderVersion("1.6", false);
+    }
+
+    @Test(enabled = false)
+    public void duplicateReads() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "duplicated_reads.sam"));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertFalse(results.isEmpty());
+        Assert.assertEquals(results.get(SAMValidationError.Type.MATES_ARE_SAME_END.getHistogramString()).getValue(), 2.0);
+    }
+
+    @Test
+    public void duplicateReadsOutOfOrder() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "duplicated_reads_out_of_order.sam"));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertFalse(results.isEmpty());
+        Assert.assertEquals(results.get(SAMValidationError.Type.MATES_ARE_SAME_END.getHistogramString()).getValue(), 2.0);
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/apps/.gitignore b/src/test/java/htsjdk/samtools/apps/.gitignore
similarity index 100%
rename from src/tests/java/htsjdk/samtools/apps/.gitignore
rename to src/test/java/htsjdk/samtools/apps/.gitignore
diff --git a/src/tests/java/htsjdk/samtools/cram/CRAIEntryTest.java b/src/test/java/htsjdk/samtools/cram/CRAIEntryTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/CRAIEntryTest.java
rename to src/test/java/htsjdk/samtools/cram/CRAIEntryTest.java
diff --git a/src/test/java/htsjdk/samtools/cram/CRAIIndexTest.java b/src/test/java/htsjdk/samtools/cram/CRAIIndexTest.java
new file mode 100644
index 0000000..7ebdb75
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/cram/CRAIIndexTest.java
@@ -0,0 +1,220 @@
+package htsjdk.samtools.cram;
+
+import htsjdk.samtools.BAMFileSpan;
+import htsjdk.samtools.CRAMCRAIIndexer;
+import htsjdk.samtools.DiskBasedBAMFileIndex;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.seekablestream.SeekableBufferedStream;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.BiFunction;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * Created by vadim on 25/08/2015.
+ */
+public class CRAIIndexTest {
+
+    @Test
+    public void testFind() throws IOException, CloneNotSupportedException {
+        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
+
+        final int sequenceId = 1;
+        CRAIEntry e = new CRAIEntry();
+        e.sequenceId = sequenceId;
+        e.alignmentStart = 1;
+        e.alignmentSpan = 1;
+        e.containerStartOffset = 1;
+        e.sliceOffset = 1;
+        e.sliceSize = 0;
+        index.add(e);
+
+        e = e.clone();
+        e.alignmentStart = 2;
+        e.containerStartOffset = 2;
+        index.add(e);
+
+        e = e.clone();
+        e.alignmentStart = 3;
+        e.containerStartOffset = 3;
+        index.add(e);
+
+        Assert.assertFalse(allFoundEntriesIntersectQueryInFind(index, sequenceId, 1, 0));
+
+        Assert.assertTrue(allFoundEntriesIntersectQueryInFind(index, sequenceId, 1, 1));
+        Assert.assertTrue(allFoundEntriesIntersectQueryInFind(index, sequenceId, 1, 2));
+        Assert.assertTrue(allFoundEntriesIntersectQueryInFind(index, sequenceId, 2, 1));
+        Assert.assertTrue(allFoundEntriesIntersectQueryInFind(index, sequenceId, 1, 3));
+
+        final int nonExistentSequenceId = 2;
+        Assert.assertFalse(allFoundEntriesIntersectQueryInFind(index, nonExistentSequenceId, 2, 1));
+        // a query starting beyond all entries:
+        Assert.assertFalse(allFoundEntriesIntersectQueryInFind(index, sequenceId, 4, 1));
+    }
+
+    private boolean allFoundEntriesIntersectQueryInFind(final List<CRAIEntry> index, final int sequenceId, final int start, final int span) {
+        int foundCount = 0;
+        for (final CRAIEntry found : CRAIIndex.find(index, sequenceId, start, span)) {
+            foundCount++;
+            Assert.assertEquals(found.sequenceId, sequenceId);
+            boolean intersects = false;
+            for (int pos = Math.min(found.alignmentStart, start); pos <= Math.max(found.alignmentStart + found.alignmentSpan, start + span); pos++) {
+                if (pos >= found.alignmentStart && pos >= start &&
+                        pos <= found.alignmentStart + found.alignmentSpan && pos <= start + span) {
+                    intersects = true;
+                    break;
+                }
+            }
+            if (!intersects) {
+                return false;
+            }
+        }
+        return foundCount > 0;
+    }
+
+    @Test(expectedExceptions = NullPointerException.class)
+    public void testCraiRequiresDictionary() throws IOException {
+        try (final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+             final GZIPOutputStream gos = new GZIPOutputStream(baos);
+             final BufferedInputStream bis = new BufferedInputStream(new ByteArrayInputStream(baos.toByteArray()))) {
+            CRAIIndex.openCraiFileAsBaiStream(bis, null);
+        }
+    }
+
+    @Test
+    public void testCraiInMemory() throws IOException {
+        doCRAITest(this::getBaiStreamFromMemory);
+    }
+
+    @Test
+    public void testCraiFromFile() throws IOException {
+        doCRAITest(this::getBaiStreamFromFile);
+    }
+
+    private void doCRAITest(BiFunction<SAMSequenceDictionary, List<CRAIEntry>, SeekableStream> getBaiStreamForIndex) throws IOException {
+        final ArrayList<CRAIEntry> index = new ArrayList<CRAIEntry>();
+        final CRAIEntry entry = new CRAIEntry();
+        entry.sequenceId = 0;
+        entry.alignmentStart = 1;
+        entry.alignmentSpan = 2;
+        entry.sliceOffset = 3;
+        entry.sliceSize = 4;
+        entry.containerStartOffset = 5;
+        index.add(entry);
+
+        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
+        dictionary.addSequence(new SAMSequenceRecord("1", 100));
+
+        final SeekableStream baiStream = getBaiStreamForIndex.apply(dictionary, index);
+
+        final DiskBasedBAMFileIndex bamIndex = new DiskBasedBAMFileIndex(baiStream, dictionary);
+        final BAMFileSpan span = bamIndex.getSpanOverlapping(entry.sequenceId, entry.alignmentStart, entry.alignmentStart);
+        Assert.assertNotNull(span);
+        final long[] coordinateArray = span.toCoordinateArray();
+        Assert.assertEquals(coordinateArray.length, 2);
+        Assert.assertEquals(coordinateArray[0] >> 16, entry.containerStartOffset);
+        Assert.assertEquals(coordinateArray[1] & 0xFFFF, 1);
+    }
+
+    public SeekableStream getBaiStreamFromMemory(SAMSequenceDictionary dictionary, final List<CRAIEntry> index) {
+        try {
+            ByteArrayOutputStream baos = new ByteArrayOutputStream();
+            SAMFileHeader samHeader = new SAMFileHeader();
+            samHeader.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+            CRAMCRAIIndexer indexer = new CRAMCRAIIndexer(baos, samHeader);
+            for (CRAIEntry entry: index) {
+                indexer.addEntry(entry);
+            }
+            indexer.finish();
+            final SeekableStream baiStream = CRAIIndex.openCraiFileAsBaiStream(new ByteArrayInputStream(baos.toByteArray()), dictionary);
+            Assert.assertNotNull(baiStream);
+            return baiStream;
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private SeekableStream getBaiStreamFromFile(SAMSequenceDictionary dictionary, final List<CRAIEntry> index) {
+        try {
+            final File file = File.createTempFile("test", ".crai");
+            file.deleteOnExit();
+            final FileOutputStream fos = new FileOutputStream(file);
+            SAMFileHeader samHeader = new SAMFileHeader();
+            samHeader.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+            CRAMCRAIIndexer indexer = new CRAMCRAIIndexer(fos, samHeader);
+            for (CRAIEntry entry: index) {
+                indexer.addEntry(entry);
+            }
+            indexer.finish();
+            final SeekableStream baiStream = CRAIIndex.openCraiFileAsBaiStream(new SeekableBufferedStream(new SeekableFileStream(file)), dictionary);
+            Assert.assertNotNull(baiStream);
+            return baiStream;
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Test
+    public void testGetLeftmost() throws CloneNotSupportedException {
+        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
+        Assert.assertNull(CRAIIndex.getLeftmost(index));
+
+        final CRAIEntry e1 = new CRAIEntry();
+        e1.sequenceId = 1;
+        e1.alignmentStart = 2;
+        e1.alignmentSpan = 3;
+        e1.containerStartOffset = 4;
+        e1.sliceOffset = 5;
+        e1.sliceSize = 6;
+        index.add(e1);
+        // trivial case of single entry in index:
+        Assert.assertEquals(e1, CRAIIndex.getLeftmost(index));
+
+        final CRAIEntry e2 = e1.clone();
+        e2.alignmentStart = e1.alignmentStart + 1;
+        index.add(e2);
+        Assert.assertEquals(e1, CRAIIndex.getLeftmost(index));
+    }
+
+    @Test
+    public void testFindLastAlignedEntry() {
+        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
+        Assert.assertEquals(-1, CRAIIndex.findLastAlignedEntry(index));
+
+        // Scan all allowed combinations of 10 mapped/unmapped entries and assert the found last aligned entry:
+        final int indexSize = 10;
+        for (int lastAligned = 0; lastAligned < indexSize; lastAligned++) {
+            index.clear();
+            for (int i = 0; i < indexSize; i++) {
+                final CRAIEntry e = new CRAIEntry();
+
+                e.sequenceId = (i <= lastAligned ? 0 : -1);
+                e.alignmentStart = i;
+                index.add(e);
+            }
+            // check expectations are correct before calling findLastAlignedEntry method:
+            Assert.assertTrue(index.get(lastAligned).sequenceId != -1);
+            if (lastAligned < index.size() - 1) {
+                Assert.assertTrue(index.get(lastAligned + 1).sequenceId == -1);
+            }
+            // assert the the found value matches the expectation:
+            Assert.assertEquals(CRAIIndex.findLastAlignedEntry(index), lastAligned);
+        }
+    }
+
+}
diff --git a/src/tests/java/htsjdk/samtools/cram/LosslessRoundTripTest.java b/src/test/java/htsjdk/samtools/cram/LosslessRoundTripTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/LosslessRoundTripTest.java
rename to src/test/java/htsjdk/samtools/cram/LosslessRoundTripTest.java
diff --git a/src/test/java/htsjdk/samtools/cram/VersionTest.java b/src/test/java/htsjdk/samtools/cram/VersionTest.java
new file mode 100644
index 0000000..0602eb3
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/cram/VersionTest.java
@@ -0,0 +1,106 @@
+package htsjdk.samtools.cram;
+
+import htsjdk.samtools.CRAMFileWriter;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.cram.build.CramIO;
+import htsjdk.samtools.cram.common.CramVersions;
+import htsjdk.samtools.cram.common.Version;
+import htsjdk.samtools.cram.io.CramInt;
+import htsjdk.samtools.cram.io.InputStreamUtils;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.cram.structure.Block;
+import htsjdk.samtools.cram.structure.Container;
+import htsjdk.samtools.cram.structure.ContainerIO;
+import htsjdk.samtools.cram.structure.CramHeader;
+import htsjdk.samtools.seekablestream.SeekableMemoryStream;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.zip.CRC32;
+
+/**
+ * Created by vadim on 18/02/2016.
+ */
+public class VersionTest {
+    /**
+     * The test purpose is to ensure that a CRAM written by {@link CRAMFileWriter} adheres to CRAM3 specs expectations:
+     * 1. version 3.+, via both actual byte comparison and CramIO API
+     * 2. EOF container
+     * 3. trailing 4 bytes of a container bytes are a valid crc32 of previous bytes in the container
+     * 3. trailing 4 bytes of a block bytes are a valid crc32 of previous bytes in the block
+     * @throws IOException
+     */
+    @Test
+    public void test_V3() throws IOException {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        ReferenceSource source = new ReferenceSource((File) null);
+        SAMFileHeader samFileHeader = new SAMFileHeader();
+        Version version = CramVersions.CRAM_v3;
+        CRAMFileWriter w = new CRAMFileWriter(baos, source, samFileHeader, null);
+        SAMRecord record = new SAMRecord(samFileHeader);
+        record.setReadName("name");
+        record.setAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
+        record.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
+        record.setReadUnmappedFlag(true);
+        record.setReadBases("AAA".getBytes());
+        record.setBaseQualities("!!!".getBytes());
+        w.addAlignment(record);
+        w.close();
+
+        byte[] cramBytes = baos.toByteArray();
+
+        SeekableStream cramSeekableStream = new SeekableMemoryStream(cramBytes, null);
+        CramHeader cramHeader = CramIO.readCramHeader(cramSeekableStream);
+        Assert.assertEquals(version, cramHeader.getVersion());
+
+        // read whole container:
+        long containerStart = cramSeekableStream.position();
+        Container container = ContainerIO.readContainer(version, cramSeekableStream);
+        Assert.assertNotNull(container);
+
+        // ensure EOF follows:
+        Container eof = ContainerIO.readContainer(version, cramSeekableStream);
+        Assert.assertNotNull(eof);
+        Assert.assertTrue(eof.isEOF());
+
+        // TODO: ensure there is nothing left in the stream.
+
+        // position stream at the start of the 1st container:
+        cramSeekableStream.seek(containerStart);
+        // read only container header:
+        ContainerIO.readContainerHeader(version.major, cramSeekableStream);
+
+        // read the following 4 bytes of CRC32:
+        int crcByteSize = 4;
+        cramSeekableStream.seek(cramSeekableStream.position() - crcByteSize);
+        byte[] crcBytes = InputStreamUtils.readFully(cramSeekableStream, crcByteSize);
+        long firstBlockStart = cramSeekableStream.position();
+
+        // rewind to 1st container start:
+        cramSeekableStream.seek(containerStart);
+        // read container header bytes:
+        byte[] containerHeaderBytes = InputStreamUtils.readFully(cramSeekableStream, (int) (firstBlockStart - containerStart) - crcByteSize);
+
+        // test that checksum matches:
+        CRC32 digester = new CRC32();
+        digester.update(containerHeaderBytes);
+        Assert.assertEquals(container.checksum, (int) digester.getValue());
+        Assert.assertEquals(CramInt.int32(crcBytes), container.checksum);
+
+        // test block's crc:
+        cramSeekableStream.seek(firstBlockStart);
+        Block.readFromInputStream(version.major, cramSeekableStream);
+        long blockByteSyze = cramSeekableStream.position() - firstBlockStart - crcByteSize;
+        cramSeekableStream.seek(firstBlockStart);
+        final byte[] blockBytes = InputStreamUtils.readFully(cramSeekableStream, (int) blockByteSyze);
+        crcBytes = InputStreamUtils.readFully(cramSeekableStream, crcByteSize);
+        digester = new CRC32();
+        digester.update(blockBytes);
+        Assert.assertEquals(CramInt.int32(crcBytes), (int) digester.getValue());
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/cram/build/CompressionHeaderFactoryTest.java b/src/test/java/htsjdk/samtools/cram/build/CompressionHeaderFactoryTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/build/CompressionHeaderFactoryTest.java
rename to src/test/java/htsjdk/samtools/cram/build/CompressionHeaderFactoryTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/build/ContainerFactoryTest.java b/src/test/java/htsjdk/samtools/cram/build/ContainerFactoryTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/build/ContainerFactoryTest.java
rename to src/test/java/htsjdk/samtools/cram/build/ContainerFactoryTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/build/ContainerParserTest.java b/src/test/java/htsjdk/samtools/cram/build/ContainerParserTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/build/ContainerParserTest.java
rename to src/test/java/htsjdk/samtools/cram/build/ContainerParserTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/build/CramIOTest.java b/src/test/java/htsjdk/samtools/cram/build/CramIOTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/build/CramIOTest.java
rename to src/test/java/htsjdk/samtools/cram/build/CramIOTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanTest.java b/src/test/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanTest.java
rename to src/test/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/encoding/rans/RansTest.java b/src/test/java/htsjdk/samtools/cram/encoding/rans/RansTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/encoding/rans/RansTest.java
rename to src/test/java/htsjdk/samtools/cram/encoding/rans/RansTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/io/ITF8Test.java b/src/test/java/htsjdk/samtools/cram/io/ITF8Test.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/io/ITF8Test.java
rename to src/test/java/htsjdk/samtools/cram/io/ITF8Test.java
diff --git a/src/tests/java/htsjdk/samtools/cram/io/LTF8Test.java b/src/test/java/htsjdk/samtools/cram/io/LTF8Test.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/io/LTF8Test.java
rename to src/test/java/htsjdk/samtools/cram/io/LTF8Test.java
diff --git a/src/tests/java/htsjdk/samtools/cram/lossy/QualityScorePreservationTest.java b/src/test/java/htsjdk/samtools/cram/lossy/QualityScorePreservationTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/lossy/QualityScorePreservationTest.java
rename to src/test/java/htsjdk/samtools/cram/lossy/QualityScorePreservationTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/ref/EnaRefServiceTest.java b/src/test/java/htsjdk/samtools/cram/ref/EnaRefServiceTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/ref/EnaRefServiceTest.java
rename to src/test/java/htsjdk/samtools/cram/ref/EnaRefServiceTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/structure/CramCompressionRecordTest.java b/src/test/java/htsjdk/samtools/cram/structure/CramCompressionRecordTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/structure/CramCompressionRecordTest.java
rename to src/test/java/htsjdk/samtools/cram/structure/CramCompressionRecordTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/structure/ReadTagTest.java b/src/test/java/htsjdk/samtools/cram/structure/ReadTagTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/structure/ReadTagTest.java
rename to src/test/java/htsjdk/samtools/cram/structure/ReadTagTest.java
diff --git a/src/tests/java/htsjdk/samtools/cram/structure/SliceTests.java b/src/test/java/htsjdk/samtools/cram/structure/SliceTests.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/structure/SliceTests.java
rename to src/test/java/htsjdk/samtools/cram/structure/SliceTests.java
diff --git a/src/tests/java/htsjdk/samtools/cram/structure/SubstitutionMatrixTest.java b/src/test/java/htsjdk/samtools/cram/structure/SubstitutionMatrixTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/cram/structure/SubstitutionMatrixTest.java
rename to src/test/java/htsjdk/samtools/cram/structure/SubstitutionMatrixTest.java
diff --git a/src/tests/java/htsjdk/samtools/fastq/.gitignore b/src/test/java/htsjdk/samtools/fastq/.gitignore
similarity index 100%
rename from src/tests/java/htsjdk/samtools/fastq/.gitignore
rename to src/test/java/htsjdk/samtools/fastq/.gitignore
diff --git a/src/test/java/htsjdk/samtools/fastq/FastqRecordTest.java b/src/test/java/htsjdk/samtools/fastq/FastqRecordTest.java
new file mode 100644
index 0000000..f6f238e
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/fastq/FastqRecordTest.java
@@ -0,0 +1,204 @@
+package htsjdk.samtools.fastq;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public final class FastqRecordTest {
+
+    @Test
+    public void testBasic() {
+        final String seqHeaderPrefix = "FAKE0003 Original version has Solexa scores from 62 to -5 inclusive (in that order)";
+        final String seqLine = "ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT";
+        final String qualHeaderPrefix = "";
+        final String qualLine = ";<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
+        final FastqRecord fastqRecord = new FastqRecord(seqHeaderPrefix, seqLine, qualHeaderPrefix, qualLine);
+
+        Assert.assertNull(fastqRecord.getBaseQualityHeader());
+
+        Assert.assertEquals(fastqRecord.getReadHeader(), seqHeaderPrefix);
+        Assert.assertEquals(fastqRecord.getBaseQualityString(), qualLine);
+        Assert.assertEquals(fastqRecord.getReadString(), seqLine);
+        Assert.assertNotNull(fastqRecord.toString());//just check not nullness
+        Assert.assertNotEquals(fastqRecord, null);
+        Assert.assertFalse(fastqRecord.equals(null));
+        Assert.assertNotEquals(null, fastqRecord);
+        Assert.assertEquals(fastqRecord, fastqRecord);
+        Assert.assertNotEquals(fastqRecord, "fred");
+        Assert.assertNotEquals("fred", fastqRecord);
+        Assert.assertEquals(fastqRecord.length(), seqLine.length());
+        Assert.assertEquals(fastqRecord.getBaseQualityString().length(), fastqRecord.getReadString().length());
+        Assert.assertEquals(fastqRecord.getReadString().length(), fastqRecord.length());
+    }
+
+    @Test
+    public void testBasicEmptyHeaderPrefix() {
+        final String seqHeaderPrefix = "";
+        final String seqLine = "ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT";
+        final String qualHeaderPrefix = "";
+        final String qualLine = ";<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
+        final FastqRecord fastqRecord = new FastqRecord(seqHeaderPrefix, seqLine, qualHeaderPrefix, qualLine);
+        Assert.assertNull(fastqRecord.getReadHeader());
+        Assert.assertNull(fastqRecord.getBaseQualityHeader());
+    }
+
+    @Test
+    public void testCopy() {
+        final String seqHeaderPrefix = "FAKE0003 Original version has Solexa scores from 62 to -5 inclusive (in that order)";
+        final String seqLine = "ACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGTACGT";
+        final String qualHeaderPrefix = "";
+        final String qualLine = ";<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
+        final FastqRecord fastqRecord = new FastqRecord(seqHeaderPrefix, seqLine, qualHeaderPrefix, qualLine);
+        final FastqRecord fastqRecordCopy = new FastqRecord(fastqRecord);
+
+        Assert.assertEquals(fastqRecord, fastqRecordCopy);
+        Assert.assertNotSame(fastqRecord, fastqRecordCopy);
+        Assert.assertSame(fastqRecord.getReadString(), fastqRecordCopy.getReadString());
+        Assert.assertSame(fastqRecord.getBaseQualityString(), fastqRecordCopy.getBaseQualityString());
+        Assert.assertSame(fastqRecord.getBaseQualityHeader(), fastqRecordCopy.getBaseQualityHeader());
+    }
+
+    @Test
+    public void testNullSeq() {
+        final String seqHeaderPrefix = "header";
+        final String seqLine = null;
+        final String qualHeaderPrefix = "";
+        final String qualLine = ";<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
+        new FastqRecord(seqHeaderPrefix, seqLine, qualHeaderPrefix, qualLine);
+        //Note: this does not blow up now but it will once we enforce non null seqLine
+    }
+
+    @Test
+    public void testEqualsWithNullHeader() {
+        final String seqLine = "GATTACA";
+        final String qualHeaderPrefix = "";
+        final String qualLine = "ABCDEFG";
+        final FastqRecord fastqRecord1 = new FastqRecord("", seqLine, qualHeaderPrefix, qualLine);
+        final FastqRecord fastqRecord2 = new FastqRecord("header", seqLine, qualHeaderPrefix, qualLine);
+        Assert.assertNotEquals(fastqRecord1, fastqRecord2);
+        Assert.assertNotEquals(fastqRecord2, fastqRecord1);
+
+        Assert.assertNotEquals(fastqRecord1.hashCode(), fastqRecord2.hashCode());
+        Assert.assertNotEquals(fastqRecord2.hashCode(), fastqRecord1.hashCode());
+        Assert.assertEquals(fastqRecord1.hashCode(), fastqRecord1.hashCode());
+        Assert.assertEquals(fastqRecord2.hashCode(), fastqRecord2.hashCode());
+    }
+
+    @Test
+    public void testEqualsWithNullSeqLine() {
+        final String seqLine = "GATTACA";
+        final String qualHeaderPrefix = "";
+        final String qualLine = "ABCDEFG";
+        final FastqRecord fastqRecord1 = new FastqRecord("", null, qualHeaderPrefix, qualLine);
+        final FastqRecord fastqRecord2 = new FastqRecord("header", seqLine, qualHeaderPrefix, qualLine);
+        Assert.assertNotEquals(fastqRecord1, fastqRecord2);
+        Assert.assertNotEquals(fastqRecord2, fastqRecord1);
+    }
+
+    @Test
+    public void testEqualsWithNullQualLine() {
+        final String seqLine = "GATTACA";
+        final String qualHeaderPrefix = "";
+        final String qualLine = "ABCDEFG";
+        final FastqRecord fastqRecord1 = new FastqRecord("", seqLine, qualHeaderPrefix, null);
+        final FastqRecord fastqRecord2 = new FastqRecord("header", seqLine, qualHeaderPrefix, qualLine);
+        Assert.assertNotEquals(fastqRecord1, fastqRecord2);
+        Assert.assertNotEquals(fastqRecord2, fastqRecord1);
+    }
+
+    @Test
+    public void testEqualsWithNullBaseQualityHeader() {
+        final String seqHeaderPrefix = "header";
+        final String seqLine = "GATTACA";
+        final String qualLine = "ABCDEFG";
+        final FastqRecord fastqRecord1 = new FastqRecord(seqHeaderPrefix, seqLine, null, qualLine);
+        final FastqRecord fastqRecord2 = new FastqRecord(seqHeaderPrefix, seqLine, "qualHeaderPrefix", qualLine);
+        Assert.assertNotEquals(fastqRecord1, fastqRecord2);
+        Assert.assertNotEquals(fastqRecord2, fastqRecord1);
+
+        Assert.assertNotEquals(fastqRecord1.hashCode(), fastqRecord2.hashCode());
+        Assert.assertNotEquals(fastqRecord2.hashCode(), fastqRecord1.hashCode());
+        Assert.assertEquals(fastqRecord1.hashCode(), fastqRecord1.hashCode());
+        Assert.assertEquals(fastqRecord2.hashCode(), fastqRecord2.hashCode());
+    }
+
+    @Test
+    public void testNullQual() {
+        final String seqHeaderPrefix = "header";
+        final String seqLine = "GATTACA";
+        new FastqRecord(seqHeaderPrefix, seqLine, "qualHeaderPrefix", null);
+        //Note: this does not blow up now but it will once we enforce non null quals
+    }
+
+    @Test
+    public void testNullString() {
+        final String seqHeaderPrefix = "header";
+        final String qualLine = "GATTACA";
+        new FastqRecord(seqHeaderPrefix, null, "qualHeaderPrefix", qualLine);
+        //Note: this does not blow up now but it will once we enforce non null seqLine
+    }
+
+    @Test
+    public void testEmptyQual() {
+        final String seqHeaderPrefix = "header";
+        final String seqLine = "GATTACA";
+        new FastqRecord(seqHeaderPrefix, seqLine, "qualHeaderPrefix", "");
+        //Note: this does not blow up now but it will once we enforce non empty quals
+    }
+
+    @Test
+    public void testEmptyString() {
+        final String seqHeaderPrefix = "header";
+        final String qualLine = "GATTACA";
+        new FastqRecord(seqHeaderPrefix, "", "qualHeaderPrefix", qualLine);
+        //Note: this does not blow up now but it will once we enforce non empty seqLine
+    }
+
+    @Test
+    public void testNotEqualQuals() {
+        final String seqLine1 = "GATTACA";
+        final String qualLine1 = "ABCDEFG";
+
+        final String seqLine2 = seqLine1;
+        final String qualLine2 = seqLine2.replace('A', 'X');
+
+        final FastqRecord fastqRecord1 = new FastqRecord("header", seqLine1, "qualHeaderPrefix", qualLine1);
+        final FastqRecord fastqRecord2 = new FastqRecord("header", seqLine2, "qualHeaderPrefix", qualLine2);
+        Assert.assertNotEquals(fastqRecord1, fastqRecord2);
+        Assert.assertNotEquals(fastqRecord2, fastqRecord1);
+
+        Assert.assertEquals(fastqRecord1.getReadString(), fastqRecord2.getReadString());
+        Assert.assertNotEquals(fastqRecord1.getBaseQualityString(), fastqRecord2.getBaseQualityString());
+
+        Assert.assertNotEquals(fastqRecord1.hashCode(), fastqRecord2.hashCode());
+        Assert.assertNotEquals(fastqRecord2.hashCode(), fastqRecord1.hashCode());
+    }
+
+    @Test
+    public void testNotEqualStrings() {
+        final String seqLine1 = "GATTACA";
+        final String qualLine1 = "ABCDEFG";
+
+        final String seqLine2 = seqLine1.replace('A', 'X');
+        final String qualLine2 = qualLine1;
+
+        final FastqRecord fastqRecord1 = new FastqRecord("header", seqLine1, "qualHeaderPrefix", qualLine1);
+        final FastqRecord fastqRecord2 = new FastqRecord("header", seqLine2, "qualHeaderPrefix", qualLine2);
+        Assert.assertNotEquals(fastqRecord1, fastqRecord2);
+        Assert.assertNotEquals(fastqRecord2, fastqRecord1);
+
+        Assert.assertNotEquals(fastqRecord1.getReadString(), fastqRecord2.getReadString());
+        Assert.assertEquals(fastqRecord1.getBaseQualityString(), fastqRecord2.getBaseQualityString());
+
+        Assert.assertNotEquals(fastqRecord1.hashCode(), fastqRecord2.hashCode());
+        Assert.assertNotEquals(fastqRecord2.hashCode(), fastqRecord1.hashCode());
+    }
+
+    @Test
+    public void testNotEqualLengths() {
+        final String seqLine1 = "GATTACA";
+        final String qualLine1 = seqLine1 + "X";
+
+        new FastqRecord("header", seqLine1, "qualHeaderPrefix", qualLine1);
+        //Note: this does not blow up now but it will once we enforce that seqLine and qualLine be the same length
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/fastq/FastqWriterTest.java b/src/test/java/htsjdk/samtools/fastq/FastqWriterTest.java
new file mode 100644
index 0000000..eba5c5b
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/fastq/FastqWriterTest.java
@@ -0,0 +1,73 @@
+/*
+ * The MIT License
+ *
+ * Pierre Lindenbaum PhD
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.fastq;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import htsjdk.samtools.util.TestUtil;
+
+import java.io.File;
+import java.util.ArrayList;
+
+/**
+ * test fastq
+ */
+public class FastqWriterTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest");
+
+    @DataProvider(name = "fastqsource")
+    public Object[][] createTestData() {
+        return new Object[][]{
+                {"solexa_full_range_as_solexa.fastq"},
+                {"5k-30BB2AAXX.3.aligned.sam.fastq"}
+        };
+    }
+
+    @Test(dataProvider = "fastqsource")
+    public void testReadReadWriteFastq(final String basename) throws Exception {
+        final File tmpFile = File.createTempFile("test.", ".fastq");
+        tmpFile.deleteOnExit();
+        final FastqReader fastqReader = new FastqReader(new File(TEST_DATA_DIR,basename));
+        final FastqWriterFactory writerFactory = new FastqWriterFactory();
+        final FastqWriter fastqWriter = writerFactory.newWriter(tmpFile);
+        for(final FastqRecord rec: fastqReader) fastqWriter.write(rec);
+        fastqWriter.close();
+        fastqReader.close();
+    }
+    
+    @Test(dataProvider = "fastqsource")
+    public void testFastqSerialize(final String basename) throws Exception {
+        //write 
+        final ArrayList<FastqRecord> records = new ArrayList<>();
+        final FastqReader fastqReader = new FastqReader(new File(TEST_DATA_DIR,basename));
+        for(final FastqRecord rec: fastqReader) {
+            records.add(rec);
+            if(records.size()>100) break;
+        }
+        fastqReader.close();
+        Assert.assertEquals(TestUtil.serializeAndDeserialize(records),records);
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/filter/FailsVendorReadQualityFilterTest.java b/src/test/java/htsjdk/samtools/filter/FailsVendorReadQualityFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/filter/FailsVendorReadQualityFilterTest.java
rename to src/test/java/htsjdk/samtools/filter/FailsVendorReadQualityFilterTest.java
diff --git a/src/tests/java/htsjdk/samtools/filter/InsertSizeFilterTest.java b/src/test/java/htsjdk/samtools/filter/InsertSizeFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/filter/InsertSizeFilterTest.java
rename to src/test/java/htsjdk/samtools/filter/InsertSizeFilterTest.java
diff --git a/src/test/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java b/src/test/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java
new file mode 100644
index 0000000..7835576
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java
@@ -0,0 +1,74 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2015 Pierre Lindenbaum @yokofakun Institut du Thorax - Nantes - France
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.filter;
+
+import htsjdk.samtools.SAMRecordIterator;
+import htsjdk.samtools.SamReader;
+import htsjdk.samtools.SamReaderFactory;
+import htsjdk.samtools.util.CloserUtil;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * @author Pierre Lindenbaum PhD Institut du Thorax - INSERM - Nantes - France
+ */
+
+public class JavascriptSamRecordFilterTest {
+    final File testDir = new File("./src/test/resources/htsjdk/samtools");
+
+    @DataProvider
+    public Object[][] jsData() {
+        return new Object[][] { { "unsorted.sam", "samFilter01.js", 8 }, { "unsorted.sam", "samFilter02.js", 10 }, };
+    }
+
+    @Test(dataProvider = "jsData")
+    public void testJavascriptFilters(final String samFile, final String javascriptFile, final int expectCount) {
+        final SamReaderFactory srf = SamReaderFactory.makeDefault();
+        final SamReader samReader = srf.open(new File(testDir, samFile));
+        final JavascriptSamRecordFilter filter;
+        try {
+            filter = new JavascriptSamRecordFilter(new File(testDir, javascriptFile),
+                    samReader.getFileHeader());    
+        } catch (IOException err) {
+            Assert.fail("Cannot read script",err);
+            return;
+        }
+        final SAMRecordIterator iter = samReader.iterator();
+        int count = 0;
+        while (iter.hasNext()) {
+            if (filter.filterOut(iter.next())) {
+                continue;
+            }
+            ++count;
+        }
+        iter.close();
+        CloserUtil.close(samReader);
+        Assert.assertEquals(count, expectCount, "Expected number of reads " + expectCount + " but got " + count);
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/filter/MappingQualityFilterTest.java b/src/test/java/htsjdk/samtools/filter/MappingQualityFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/filter/MappingQualityFilterTest.java
rename to src/test/java/htsjdk/samtools/filter/MappingQualityFilterTest.java
diff --git a/src/tests/java/htsjdk/samtools/filter/OverclippedReadFilterTest.java b/src/test/java/htsjdk/samtools/filter/OverclippedReadFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/filter/OverclippedReadFilterTest.java
rename to src/test/java/htsjdk/samtools/filter/OverclippedReadFilterTest.java
diff --git a/src/tests/java/htsjdk/samtools/filter/SolexaNoiseFilterTest.java b/src/test/java/htsjdk/samtools/filter/SolexaNoiseFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/filter/SolexaNoiseFilterTest.java
rename to src/test/java/htsjdk/samtools/filter/SolexaNoiseFilterTest.java
diff --git a/src/tests/java/htsjdk/samtools/filter/TagFilterTest.java b/src/test/java/htsjdk/samtools/filter/TagFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/filter/TagFilterTest.java
rename to src/test/java/htsjdk/samtools/filter/TagFilterTest.java
diff --git a/src/test/java/htsjdk/samtools/liftover/LiftOverTest.java b/src/test/java/htsjdk/samtools/liftover/LiftOverTest.java
new file mode 100644
index 0000000..8e9f92e
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/liftover/LiftOverTest.java
@@ -0,0 +1,468 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.liftover;
+
+import htsjdk.samtools.util.Interval;
+import htsjdk.samtools.util.OverlapDetector;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
+/**
+ * @author alecw at broadinstitute.org
+ */
+public class LiftOverTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/liftover");
+    private static final File CHAIN_FILE = new File(TEST_DATA_DIR, "hg18ToHg19.over.chain");
+
+    private LiftOver liftOver;
+    Map<String, Set<String>> contigMap;
+
+    @BeforeClass
+    public void initLiftOver() {
+        liftOver = new LiftOver(CHAIN_FILE);
+        contigMap = liftOver.getContigMap();
+    }
+
+    @Test(dataProvider = "testIntervals")
+    public void testBasic(final Interval in, final Interval expected) {
+        final Interval out = liftOver.liftOver(in);
+        Assert.assertEquals(out, expected);
+
+    }
+
+    @DataProvider(name = "testIntervals")
+    public Object[][] makeTestIntervals() {
+        return new Object[][] {
+                {new Interval("chr3", 50911035, 50911051), null},
+                {new Interval("chr1", 16776377, 16776452),    new Interval("chr1", 16903790, 16903865)},
+                {new Interval("chr2", 30575990, 30576065),    new Interval("chr2", 30722486, 30722561)},
+                {new Interval("chr3", 12157217, 12157292),    new Interval("chr3", 12182217, 12182292)},
+                {new Interval("chr4", 12503121, 12503196),    new Interval("chr4", 12894023, 12894098)},
+                {new Interval("chr5", 13970930, 13971005),    new Interval("chr5", 13917930, 13918005)},
+                {new Interval("chr6", 13838774, 13838849),    new Interval("chr6", 13730795, 13730870)},
+                {new Interval("chr7", 23978336, 23978411),    new Interval("chr7", 24011811, 24011886)},
+                {new Interval("chr8", 13337368, 13337443),    new Interval("chr8", 13292997, 13293072)},
+                {new Interval("chr9", 35059282, 35059357),    new Interval("chr9", 35069282, 35069357)},
+                {new Interval("chr10", 7893794, 7893869),     new Interval("chr10", 7853788, 7853863)},
+                {new Interval("chr11", 17365784, 17365859),   new Interval("chr11", 17409208, 17409283)},
+                {new Interval("chr12", 4530193, 4530268),     new Interval("chr12", 4659932, 4660007)},
+                {new Interval("chr13", 29398707, 29398782),   new Interval("chr13", 30500707, 30500782)},
+                {new Interval("chr14", 22955252, 22955327),   new Interval("chr14", 23885412, 23885487)},
+                {new Interval("chr15", 27477379, 27477454),   new Interval("chr15", 29690087, 29690162)},
+                {new Interval("chr16", 13016380, 13016455),   new Interval("chr16", 13108879, 13108954)},
+                {new Interval("chr17", 28318218, 28318293),   new Interval("chr17", 31294105, 31294180)},
+                {new Interval("chr18", 42778225, 42778300),   new Interval("chr18", 44524227, 44524302)},
+                {new Interval("chr19", 8340119, 8340194),     new Interval("chr19", 8434119, 8434194)},
+                {new Interval("chr20", 39749226, 39749301),   new Interval("chr20", 40315812, 40315887)},
+                {new Interval("chr21", 20945136, 20945211),   new Interval("chr21", 22023265, 22023340)},
+                {new Interval("chr22", 32307031, 32307106),   new Interval("chr22", 33977031, 33977106)},
+                {new Interval("chrX", 34252958, 34253033) ,   new Interval("chrX", 34343037, 34343112)},
+                // Sampling from /seq/references/HybSelOligos/whole_exome_refseq_coding/whole_exome_refseq_coding.targets.interval_list
+                {new Interval("chr1", 58952, 59873),	new Interval("chr1", 69089, 70010)},
+                {new Interval("chr1", 7733844, 7734041),	new Interval("chr1", 7811257, 7811454)},
+                {new Interval("chr1", 16261179, 16261276),	new Interval("chr1", 16388592, 16388689)},
+                {new Interval("chr1", 23634929, 23635110),	new Interval("chr1", 23762342, 23762523)},
+                {new Interval("chr1", 31910910, 31911030),	new Interval("chr1", 32138323, 32138443)},
+                {new Interval("chr1", 39686851, 39687024),	new Interval("chr1", 39914264, 39914437)},
+                {new Interval("chr1", 46434068, 46434185),	new Interval("chr1", 46661481, 46661598)},
+                {new Interval("chr1", 60102890, 60102928),	new Interval("chr1", 60330302, 60330340)},
+                {new Interval("chr1", 84734151, 84734336),	new Interval("chr1", 84961563, 84961748)},
+                {new Interval("chr1", 100529545, 100529650),	new Interval("chr1", 100756957, 100757062)},
+                {new Interval("chr1", 114771320, 114771441),	new Interval("chr1", 114969797, 114969918)},
+                {new Interval("chr1", 148564831, 148564965),	new Interval("chr1", 150298207, 150298341)},
+                {new Interval("chr1", 153293008, 153293090),	new Interval("chr1", 155026384, 155026466)},
+                {new Interval("chr1", 158167550, 158167677),	new Interval("chr1", 159900926, 159901053)},
+                {new Interval("chr1", 169444555, 169444718),	new Interval("chr1", 171177931, 171178094)},
+                {new Interval("chr1", 183535970, 183536100),	new Interval("chr1", 185269347, 185269477)},
+                {new Interval("chr1", 201411300, 201411508),	new Interval("chr1", 203144677, 203144885)},
+                {new Interval("chr1", 212862043, 212862249),	new Interval("chr1", 214795420, 214795626)},
+                {new Interval("chr1", 228992499, 228992560),	new Interval("chr1", 230925876, 230925937)},
+                {new Interval("chr1", 246268191, 246269133),	new Interval("chr1", 248201568, 248202510)},
+                {new Interval("chr2", 25027765, 25027929),	new Interval("chr2", 25174261, 25174425)},
+                {new Interval("chr2", 32572109, 32572240),	new Interval("chr2", 32718605, 32718736)},
+                {new Interval("chr2", 53988959, 53989061),	new Interval("chr2", 54135455, 54135557)},
+                {new Interval("chr2", 71749748, 71749847),	new Interval("chr2", 71896240, 71896339)},
+                {new Interval("chr2", 96059879, 96060011),	new Interval("chr2", 96696152, 96696284)},
+                {new Interval("chr2", 109923614, 109923763),	new Interval("chr2", 110566325, 110566474)},
+                {new Interval("chr2", 130655571, 130655646),	new Interval("chr2", 130939101, 130939176)},
+                {new Interval("chr2", 159228028, 159228205),	new Interval("chr2", 159519782, 159519959)},
+                {new Interval("chr2", 172639236, 172639282),	new Interval("chr2", 172930990, 172931036)},
+                {new Interval("chr2", 189558634, 189558751),	new Interval("chr2", 189850389, 189850506)},
+                {new Interval("chr2", 203547300, 203547466),	new Interval("chr2", 203839055, 203839221)},
+                {new Interval("chr2", 219578985, 219579191),	new Interval("chr2", 219870741, 219870947)},
+                {new Interval("chr2", 232982284, 232982404),	new Interval("chr2", 233274040, 233274160)},
+                {new Interval("chr3", 3114819, 3114976),	new Interval("chr3", 3139819, 3139976)},
+                {new Interval("chr3", 16333337, 16333745),	new Interval("chr3", 16358333, 16358741)},
+                {new Interval("chr3", 40183652, 40183736),	new Interval("chr3", 40208648, 40208732)},
+                {new Interval("chr3", 48601077, 48601227),	new Interval("chr3", 48626073, 48626223)},
+                {new Interval("chr3", 52287255, 52287419),	new Interval("chr3", 52312215, 52312379)},
+                {new Interval("chr3", 63979313, 63979425),	new Interval("chr3", 64004273, 64004385)},
+                {new Interval("chr3", 110234255, 110234364),	new Interval("chr3", 108751565, 108751674)},
+                {new Interval("chr3", 126088466, 126088539),	new Interval("chr3", 124605776, 124605849)},
+                {new Interval("chr3", 137600279, 137600363),	new Interval("chr3", 136117589, 136117673)},
+                {new Interval("chr3", 159845116, 159845200),	new Interval("chr3", 158362422, 158362506)},
+                {new Interval("chr3", 185387877, 185387927),	new Interval("chr3", 183905183, 183905233)},
+                {new Interval("chr3", 199065658, 199065715),	new Interval("chr3", 197581261, 197581318)},
+                {new Interval("chr4", 10152742, 10152765),	new Interval("chr4", 10543644, 10543667)},
+                {new Interval("chr4", 47243396, 47243638),	new Interval("chr4", 47548639, 47548881)},
+                {new Interval("chr4", 72632227, 72632303),	new Interval("chr4", 72413363, 72413439)},
+                {new Interval("chr4", 88942682, 88942736),	new Interval("chr4", 88723658, 88723712)},
+                {new Interval("chr4", 114381088, 114381190),	new Interval("chr4", 114161639, 114161741)},
+                {new Interval("chr4", 151338602, 151338707),	new Interval("chr4", 151119152, 151119257)},
+                {new Interval("chr4", 184429225, 184429390),	new Interval("chr4", 184192231, 184192396)},
+                {new Interval("chr5", 14804176, 14804350),	new Interval("chr5", 14751176, 14751350)},
+                {new Interval("chr5", 43687596, 43687745),	new Interval("chr5", 43651839, 43651988)},
+                {new Interval("chr5", 71651730, 71651806),	new Interval("chr5", 71615974, 71616050)},
+                {new Interval("chr5", 95017504, 95017771),	new Interval("chr5", 94991748, 94992015)},
+                {new Interval("chr5", 128984208, 128984352),	new Interval("chr5", 128956309, 128956453)},
+                {new Interval("chr5", 140033038, 140033159),	new Interval("chr5", 140052854, 140052975)},
+                {new Interval("chr5", 153045976, 153046084),	new Interval("chr5", 153065783, 153065891)},
+                {new Interval("chr5", 176255669, 176255768),	new Interval("chr5", 176323063, 176323162)},
+                {new Interval("chr6", 10810586, 10810710),	new Interval("chr6", 10702600, 10702724)},
+                {new Interval("chr6", 30666289, 30666459),	new Interval("chr6", 30558310, 30558480)},
+                {new Interval("chr6", 33082591, 33082598),	new Interval("chr6", 32974613, 32974620)},
+                {new Interval("chr6", 39940185, 39940263),	new Interval("chr6", 39832207, 39832285)},
+                {new Interval("chr6", 50789726, 50789768),	new Interval("chr6", 50681767, 50681809)},
+                {new Interval("chr6", 79721666, 79721720),	new Interval("chr6", 79664947, 79665001)},
+                {new Interval("chr6", 108336822, 108336934),	new Interval("chr6", 108230129, 108230241)},
+                {new Interval("chr6", 131240935, 131241085),	new Interval("chr6", 131199242, 131199392)},
+                {new Interval("chr6", 151799272, 151799384),	new Interval("chr6", 151757579, 151757691)},
+                {new Interval("chr6", 169897302, 169897445),	new Interval("chr6", 170155377, 170155520)},
+                {new Interval("chr7", 17341792, 17341937),	new Interval("chr7", 17375267, 17375412)},
+                {new Interval("chr7", 38875269, 38875380),	new Interval("chr7", 38908744, 38908855)},
+                {new Interval("chr7", 72563000, 72563120),	new Interval("chr7", 72925064, 72925184)},
+                {new Interval("chr7", 89839403, 89839480),	new Interval("chr7", 90001467, 90001544)},
+                {new Interval("chr7", 100063781, 100063867),	new Interval("chr7", 100225845, 100225931)},
+                {new Interval("chr7", 111889559, 111889671),	new Interval("chr7", 112102323, 112102435)},
+                {new Interval("chr7", 133900771, 133900840),	new Interval("chr7", 134250231, 134250300)},
+                {new Interval("chr7", 149124615, 149124769),	new Interval("chr7", 149493682, 149493836)},
+                {new Interval("chr8", 9647462, 9647548),	new Interval("chr8", 9610052, 9610138)},
+                {new Interval("chr8", 27203588, 27203614),	new Interval("chr8", 27147671, 27147697)},
+                {new Interval("chr8", 43171970, 43172044),	new Interval("chr8", 43052813, 43052887)},
+                {new Interval("chr8", 76088775, 76088894),	new Interval("chr8", 75926220, 75926339)},
+                {new Interval("chr8", 103641854, 103642290),	new Interval("chr8", 103572678, 103573114)},
+                {new Interval("chr8", 133913660, 133913828),	new Interval("chr8", 133844478, 133844646)},
+                {new Interval("chr8", 145697031, 145697164),	new Interval("chr8", 145726223, 145726356)},
+                {new Interval("chr9", 26985517, 26985849),	new Interval("chr9", 26995517, 26995849)},
+                {new Interval("chr9", 68496721, 68496793),	new Interval("chr9", 69206901, 69206973)},
+                {new Interval("chr9", 94051959, 94052046),	new Interval("chr9", 95012138, 95012225)},
+                {new Interval("chr9", 110750285, 110750337),	new Interval("chr9", 111710464, 111710516)},
+                {new Interval("chr9", 124416836, 124417782),	new Interval("chr9", 125377015, 125377961)},
+                {new Interval("chr9", 130939690, 130939794),	new Interval("chr9", 131899869, 131899973)},
+                {new Interval("chr9", 138395593, 138395667),	new Interval("chr9", 139275772, 139275846)},
+                {new Interval("chr10", 6048112, 6048310),	new Interval("chr10", 6008106, 6008304)},
+                {new Interval("chr10", 26599573, 26599693),	new Interval("chr10", 26559567, 26559687)},
+                {new Interval("chr10", 51507890, 51507920),	new Interval("chr10", 51837884, 51837914)},
+                {new Interval("chr10", 74343070, 74343234),	new Interval("chr10", 74673064, 74673228)},
+                {new Interval("chr10", 93604764, 93604865),	new Interval("chr10", 93614784, 93614885)},
+                {new Interval("chr10", 101985412, 101985513),	new Interval("chr10", 101995422, 101995523)},
+                {new Interval("chr10", 115325644, 115325755),	new Interval("chr10", 115335654, 115335765)},
+                {new Interval("chr10", 129062310, 129062470),	new Interval("chr10", 129172320, 129172480)},
+                {new Interval("chr11", 1904274, 1904289),	new Interval("chr11", 1947698, 1947713)},
+                {new Interval("chr11", 11928485, 11928607),	new Interval("chr11", 11971909, 11972031)},
+                {new Interval("chr11", 33326642, 33326942),	new Interval("chr11", 33370066, 33370366)},
+                {new Interval("chr11", 55554469, 55555445),	new Interval("chr11", 55797893, 55798869)},
+                {new Interval("chr11", 62505888, 62506060),	new Interval("chr11", 62749312, 62749484)},
+                {new Interval("chr11", 65488560, 65488619),	new Interval("chr11", 65731984, 65732043)},
+                {new Interval("chr11", 71618353, 71618446),	new Interval("chr11", 71940705, 71940798)},
+                {new Interval("chr11", 89174516, 89174750),	new Interval("chr11", 89534868, 89535102)},
+                {new Interval("chr11", 111349955, 111350190),	new Interval("chr11", 111844745, 111844980)},
+                {new Interval("chr11", 120195672, 120195841),	new Interval("chr11", 120690462, 120690631)},
+                {new Interval("chr12", 1089617, 1089776),	new Interval("chr12", 1219356, 1219515)},
+                {new Interval("chr12", 8894021, 8894139),	new Interval("chr12", 9002754, 9002872)},
+                {new Interval("chr12", 26455518, 26455614),	new Interval("chr12", 26564251, 26564347)},
+                {new Interval("chr12", 46663731, 46663788),	new Interval("chr12", 48377464, 48377521)},
+                {new Interval("chr12", 51502394, 51502432),	new Interval("chr12", 53216127, 53216165)},
+                {new Interval("chr12", 55603883, 55604103),	new Interval("chr12", 57317616, 57317836)},
+                {new Interval("chr12", 69218200, 69218280),	new Interval("chr12", 70931933, 70932013)},
+                {new Interval("chr12", 97543837, 97544677),	new Interval("chr12", 99019706, 99020546)},
+                {new Interval("chr12", 108438951, 108439074),	new Interval("chr12", 109954568, 109954691)},
+                {new Interval("chr12", 119021215, 119021343),	new Interval("chr12", 120536832, 120536960)},
+                {new Interval("chr12", 127849755, 127849917),	new Interval("chr12", 129283802, 129283964)},
+                {new Interval("chr13", 28900978, 28901035),	new Interval("chr13", 30002978, 30003035)},
+                {new Interval("chr13", 48646570, 48646698),	new Interval("chr13", 49748569, 49748697)},
+                {new Interval("chr13", 98989699, 98989814),	new Interval("chr13", 100191698, 100191813)},
+                {new Interval("chr14", 20929460, 20929643),	new Interval("chr14", 21859620, 21859803)},
+                {new Interval("chr14", 33338689, 33340068),	new Interval("chr14", 34268938, 34270317)},
+                {new Interval("chr14", 55217155, 55217163),	new Interval("chr14", 56147402, 56147410)},
+                {new Interval("chr14", 71260115, 71260358),	new Interval("chr14", 72190362, 72190605)},
+                {new Interval("chr14", 89806293, 89806451),	new Interval("chr14", 90736540, 90736698)},
+                {new Interval("chr14", 102548185, 102548280),	new Interval("chr14", 103478432, 103478527)},
+                {new Interval("chr15", 31917122, 31918453),	new Interval("chr15", 34129830, 34131161)},
+                {new Interval("chr15", 40481129, 40481302),	new Interval("chr15", 42693837, 42694010)},
+                {new Interval("chr15", 48649374, 48649484),	new Interval("chr15", 50862082, 50862192)},
+                {new Interval("chr15", 61768839, 61768953),	new Interval("chr15", 63981786, 63981900)},
+                {new Interval("chr15", 72115399, 72115456),	new Interval("chr15", 74328346, 74328403)},
+                {new Interval("chr15", 83031858, 83032011),	new Interval("chr15", 85230854, 85231007)},
+                {new Interval("chr16", 79709, 79902),	new Interval("chr16", 139709, 139902)},
+                {new Interval("chr16", 2285590, 2285744),	new Interval("chr16", 2345589, 2345743)},
+                {new Interval("chr16", 14872977, 14873044),	new Interval("chr16", 14965476, 14965543)},
+                {new Interval("chr16", 23611004, 23611155),	new Interval("chr16", 23703503, 23703654)},
+                {new Interval("chr16", 31004784, 31005007),	new Interval("chr16", 31097283, 31097506)},
+                {new Interval("chr16", 55745701, 55745922),	new Interval("chr16", 57188200, 57188421)},
+                {new Interval("chr16", 66647766, 66647830),	new Interval("chr16", 68090265, 68090329)},
+                {new Interval("chr16", 79224415, 79224636),	new Interval("chr16", 80666914, 80667135)},
+                {new Interval("chr17", 1320663, 1320735),	new Interval("chr17", 1373913, 1373985)},
+                {new Interval("chr17", 5304981, 5305155),	new Interval("chr17", 5364257, 5364431)},
+                {new Interval("chr17", 8588568, 8588654),	new Interval("chr17", 8647843, 8647929)},
+                {new Interval("chr17", 18192362, 18192481),	new Interval("chr17", 18251637, 18251756)},
+                {new Interval("chr17", 26514328, 26514522),	new Interval("chr17", 29490202, 29490396)},
+                {new Interval("chr17", 35069238, 35069334),	new Interval("chr17", 37815712, 37815808)},
+                {new Interval("chr17", 38377148, 38377241),	new Interval("chr17", 41123622, 41123715)},
+                {new Interval("chr17", 44472316, 44472454),	new Interval("chr17", 47117317, 47117455)},
+                {new Interval("chr17", 55482984, 55483122),	new Interval("chr17", 58128202, 58128340)},
+                {new Interval("chr17", 64595087, 64595211),	new Interval("chr17", 67083492, 67083616)},
+                {new Interval("chr17", 72814816, 72814876),	new Interval("chr17", 75303221, 75303281)},
+                {new Interval("chr17", 78167687, 78167812),	new Interval("chr17", 80574398, 80574523)},
+                {new Interval("chr18", 19653801, 19653961),	new Interval("chr18", 21399803, 21399963)},
+                {new Interval("chr18", 46766985, 46767455),	new Interval("chr18", 48512987, 48513457)},
+                {new Interval("chr19", 822924, 823120),	new Interval("chr19", 871924, 872120)},
+                {new Interval("chr19", 4200223, 4200327),	new Interval("chr19", 4249223, 4249327)},
+                {new Interval("chr19", 8094666, 8094894),	new Interval("chr19", 8188666, 8188894)},
+                {new Interval("chr19", 11657040, 11657607),	new Interval("chr19", 11796040, 11796607)},
+                {new Interval("chr19", 16298665, 16298844),	new Interval("chr19", 16437665, 16437844)},
+                {new Interval("chr19", 19650533, 19650597),	new Interval("chr19", 19789533, 19789597)},
+                {new Interval("chr19", 42008351, 42008363),	new Interval("chr19", 37316511, 37316523)},
+                {new Interval("chr19", 46446486, 46446567),	new Interval("chr19", 41754646, 41754727)},
+                {new Interval("chr19", 51212087, 51212169),	new Interval("chr19", 46520247, 46520329)},
+                {new Interval("chr19", 55052042, 55052201),	new Interval("chr19", 50360230, 50360389)},
+                {new Interval("chr19", 60200495, 60200669),	new Interval("chr19", 55508683, 55508857)},
+                {new Interval("chr20", 3244380, 3244434),	new Interval("chr20", 3296380, 3296434)},
+                {new Interval("chr20", 25145282, 25145374),	new Interval("chr20", 25197282, 25197374)},
+                {new Interval("chr20", 35182714, 35182855),	new Interval("chr20", 35749300, 35749441)},
+                {new Interval("chr20", 46797751, 46797826),	new Interval("chr20", 47364344, 47364419)},
+                {new Interval("chr20", 61546454, 61546633),	new Interval("chr20", 62076010, 62076189)},
+                {new Interval("chr21", 36666540, 36666701),	new Interval("chr21", 37744670, 37744831)},
+                {new Interval("chr21", 46450176, 46450285),	new Interval("chr21", 47625748, 47625857)},
+                {new Interval("chr22", 22890366, 22890533),	new Interval("chr22", 24560366, 24560533)},
+                {new Interval("chr22", 32487356, 32487465),	new Interval("chr22", 34157356, 34157465)},
+                {new Interval("chr22", 40469028, 40469146),	new Interval("chr22", 42139082, 42139200)},
+                {new Interval("chr22", 49365651, 49365713),	new Interval("chr22", 51018785, 51018847)},
+                {new Interval("chrX", 24135748, 24135895),	new Interval("chrX", 24225827, 24225974)},
+                {new Interval("chrX", 48708293, 48708459),	new Interval("chrX", 48823349, 48823515)},
+                {new Interval("chrX", 69406673, 69406721),	new Interval("chrX", 69489948, 69489996)},
+                {new Interval("chrX", 101459444, 101459531),	new Interval("chrX", 101572788, 101572875)},
+                {new Interval("chrX", 128442357, 128442474),	new Interval("chrX", 128614676, 128614793)},
+                {new Interval("chrX", 152701873, 152701902),	new Interval("chrX", 153048679, 153048708)},
+                {new Interval("chrY", 2715028, 2715646),	new Interval("chrY", 2655028, 2655646)},
+                {new Interval("chrY", 26179988, 26180064),	new Interval("chrY", 27770600, 27770676)},
+                // Some intervals that are flipped in the new genome
+                {new Interval("chr1", 2479704, 2479833, false, "target_549"),        new Interval("chr1", 2494585, 2494714, true, "target_549")},
+                {new Interval("chr1", 2480081, 2480116, false, "target_550"),        new Interval("chr1", 2494302, 2494337, true, "target_550")},
+                {new Interval("chr1", 2481162, 2481308, false, "target_551"),        new Interval("chr1", 2493110, 2493256, true, "target_551")},
+                {new Interval("chr1", 2482263, 2482357, false, "target_552"),        new Interval("chr1", 2492061, 2492155, true, "target_552")},
+                {new Interval("chr1", 2482999, 2483158, false, "target_553"),        new Interval("chr1", 2491260, 2491419, true, "target_553")},
+                {new Interval("chr1", 2484509, 2484638, false, "target_554"),        new Interval("chr1", 2489780, 2489909, true, "target_554")},
+                {new Interval("chr1", 2485143, 2485255, false, "target_555"),        new Interval("chr1", 2489163, 2489275, true, "target_555")},
+                {new Interval("chr1", 2486244, 2486316, false, "target_556"),        new Interval("chr1", 2488102, 2488174, true, "target_556")},
+                {new Interval("chr2", 110735471, 110735558, false, "target_101982"), new Interval("chr2", 110585640, 110585727, true, "target_101982")},
+                {new Interval("chr2", 110735648, 110735831, false, "target_101983"), new Interval("chr2", 110585367, 110585550, true, "target_101983")},
+                {new Interval("chr2", 110736772, 110736922, false, "target_101984"), new Interval("chr2", 110584276, 110584426, true, "target_101984")},
+                {new Interval("chr2", 110737181, 110737322, false, "target_101985"), new Interval("chr2", 110583876, 110584017, true, "target_101985")},
+                {new Interval("chr2", 110737585, 110737747, false, "target_101986"), new Interval("chr2", 110583451, 110583613, true, "target_101986")},
+                {new Interval("chr2", 110738666, 110738793, false, "target_101987"), new Interval("chr2", 110582405, 110582532, true, "target_101987")},
+                {new Interval("chr2", 110738957, 110739136, false, "target_101988"), new Interval("chr2", 110582062, 110582241, true, "target_101988")},
+                {new Interval("chr2", 110739216, 110739401, false, "target_101989"), new Interval("chr2", 110581797, 110581982, true, "target_101989")},
+                {new Interval("chr2", 110741555, 110741768, false, "target_101990"), new Interval("chr2", 110579480, 110579693, true, "target_101990")},
+                {new Interval("chr2", 110743887, 110743978, false, "target_101991"), new Interval("chr2", 110577271, 110577362, true, "target_101991")},
+                {new Interval("chr2", 110750021, 110750220, false, "target_101992"), new Interval("chr2", 110571035, 110571234, true, "target_101992")},
+                {new Interval("chr2", 110754786, 110754935, false, "target_101993"), new Interval("chr2", 110566325, 110566474, true, "target_101993")},
+                {new Interval("chr2", 110755277, 110755511, false, "target_101994"), new Interval("chr2", 110565749, 110565983, true, "target_101994")},
+                {new Interval("chr2", 110759547, 110759703, false, "target_101995"), new Interval("chr2", 110561554, 110561710, true, "target_101995")},
+                {new Interval("chr2", 110760135, 110760250, false, "target_101996"), new Interval("chr2", 110561007, 110561122, true, "target_101996")},
+                {new Interval("chr2", 110761828, 110761899, false, "target_101997"), new Interval("chr2", 110559358, 110559429, true, "target_101997")},
+                {new Interval("chr2", 110769521, 110769596, false, "target_101998"), new Interval("chr2", 110552041, 110552116, true, "target_101998")},
+                {new Interval("chr2", 111012182, 111012298, false, "target_101999"), new Interval("chr2", 108484181, 108484297, true, "target_101999")},
+                {new Interval("chr13", 113547048, 113547139, false, "target_51005"), new Interval("chr13", 114566804, 114566895, true, "target_51005")},
+                {new Interval("chr13", 113547227, 113547397, false, "target_51006"), new Interval("chr13", 114566546, 114566716, true, "target_51006")},
+                {new Interval("chr13", 113562918, 113562946, false, "target_51007"), new Interval("chr13", 114550997, 114551025, true, "target_51007")},
+                {new Interval("chr13", 113564379, 113564445, false, "target_51008"), new Interval("chr13", 114549498, 114549564, true, "target_51008")},
+                {new Interval("chr13", 113571118, 113571244, false, "target_51009"), new Interval("chr13", 114542699, 114542825, true, "target_51009")},
+                {new Interval("chr13", 113572777, 113572903, false, "target_51010"), new Interval("chr13", 114541040, 114541166, true, "target_51010")},
+                {new Interval("chr13", 113575333, 113575459, false, "target_51011"), new Interval("chr13", 114538484, 114538610, true, "target_51011")},
+                {new Interval("chr13", 113576296, 113576421, false, "target_51012"), new Interval("chr13", 114537522, 114537647, true, "target_51012")},
+                {new Interval("chr13", 113578216, 113578338, false, "target_51013"), new Interval("chr13", 114535605, 114535727, true, "target_51013")},
+                {new Interval("chr13", 113578480, 113578673, false, "target_51014"), new Interval("chr13", 114535270, 114535463, true, "target_51014")},
+                {new Interval("chr13", 113582257, 113582425, false, "target_51015"), new Interval("chr13", 114531518, 114531686, true, "target_51015")},
+                {new Interval("chr13", 113583804, 113583976, false, "target_51016"), new Interval("chr13", 114529967, 114530139, true, "target_51016")},
+                {new Interval("chr13", 113587418, 113587597, false, "target_51017"), new Interval("chr13", 114526346, 114526525, true, "target_51017")},
+                {new Interval("chr13", 113588782, 113589014, false, "target_51018"), new Interval("chr13", 114524929, 114525161, true, "target_51018")},
+                {new Interval("chr13", 113589950, 113590108, false, "target_51019"), new Interval("chr13", 114523835, 114523993, true, "target_51019")},
+                {new Interval("chr13", 113599065, 113599236, false, "target_51020"), new Interval("chr13", 114514707, 114514878, true, "target_51020")},
+                {new Interval("chr13", 113605940, 113606087, false, "target_51021"), new Interval("chr13", 114507856, 114508003, true, "target_51021")},
+                {new Interval("chr13", 113609156, 113609319, false, "target_51022"), new Interval("chr13", 114504624, 114504787, true, "target_51022")},
+                {new Interval("chr13", 113610056, 113610145, false, "target_51023"), new Interval("chr13", 114503798, 114503887, true, "target_51023")},
+                {new Interval("chr13", 113611549, 113611633, false, "target_51024"), new Interval("chr13", 114502310, 114502394, true, "target_51024")},
+                {new Interval("chr13", 113615731, 113615824, false, "target_51025"), new Interval("chr13", 114498119, 114498212, true, "target_51025")},
+                {new Interval("chr13", 113641808, 113641874, false, "target_51026"), new Interval("chr13", 114472069, 114472135, true, "target_51026")},
+                {new Interval("chr13", 113644711, 113644857, false, "target_51027"), new Interval("chr13", 114469086, 114469232, true, "target_51027")},
+                {new Interval("chr13", 113651799, 113651848, false, "target_51028"), new Interval("chr13", 114462241, 114462290, true, "target_51028")},
+                {new Interval("chr17", 33541604, 33542176, false, "target_76102"),   new Interval("chr17", 36294030, 36294602, true, "target_76102")},
+                {new Interval("chr17", 33543154, 33543310, false, "target_76103"),   new Interval("chr17", 36292896, 36293052, true, "target_76103")},
+                {new Interval("chr17", 33543677, 33543780, false, "target_76104"),   new Interval("chr17", 36292426, 36292529, true, "target_76104")},
+                {new Interval("chr17", 33544240, 33544309, false, "target_76105"),   new Interval("chr17", 36291897, 36291966, true, "target_76105")},
+                {new Interval("chr17", 33544690, 33544788, false, "target_76106"),   new Interval("chr17", 36291418, 36291516, true, "target_76106")},
+                {new Interval("chr17", 33545498, 33545622, false, "target_76107"),   new Interval("chr17", 36290584, 36290708, true, "target_76107")},
+                {new Interval("chr17", 33547465, 33547578, false, "target_76109"),   new Interval("chr17", 36288629, 36288742, true, "target_76109")},
+                {new Interval("chr17", 33547904, 33548015, false, "target_76110"),   new Interval("chr17", 36288192, 36288303, true, "target_76110")},
+                {new Interval("chr17", 33548455, 33548539, false, "target_76111"),   new Interval("chr17", 36287668, 36287752, true, "target_76111")},
+                {new Interval("chr17", 33549018, 33549061, false, "target_76112"),   new Interval("chr17", 36287146, 36287189, true, "target_76112")},
+                {new Interval("chr17", 33550341, 33550430, false, "target_76113"),   new Interval("chr17", 36285777, 36285866, true, "target_76113")},
+                {new Interval("chr17", 33550589, 33550664, false, "target_76114"),   new Interval("chr17", 36285543, 36285618, true, "target_76114")},
+                {new Interval("chrX", 148575967, 148576994, false, "target_184692"), new Interval("chrX", 148797411, 148798438, true, "target_184692")},
+                {new Interval("chrX", 148577066, 148577143, false, "target_184693"), new Interval("chrX", 148797262, 148797339, true, "target_184693")},
+                {new Interval("chrX", 148578167, 148578266, false, "target_184694"), new Interval("chrX", 148796139, 148796238, true, "target_184694")},
+                {new Interval("chrX", 148579488, 148579587, false, "target_184695"), new Interval("chrX", 148794818, 148794917, true, "target_184695")},
+                {new Interval("chrX", 148603758, 148603770, false, "target_184696"), new Interval("chrX", 148770634, 148770646, true, "target_184696")},
+                // Some that don't map in hg19
+                {new Interval("chr2", 111013693, 111013832), null},
+                {new Interval("chr3", 14174511, 14175398), null},
+                {new Interval("chr3", 50911035, 50911051), null},
+                {new Interval("chr6", 32071709, 32071869), null},
+                {new Interval("chr6", 32072183, 32072358), null},
+                {new Interval("chr6", 32104446, 32104606), null},
+                {new Interval("chr6", 32104920, 32105095), null},
+                {new Interval("chr7", 101995561, 101995739), null},
+                {new Interval("chr7", 142178782, 142178825), null},
+                {new Interval("chr7", 142179850, 142180013), null},
+                {new Interval("chr7", 142181067, 142181324), null},
+                {new Interval("chr7", 142181720, 142181860), null},
+                {new Interval("chr7", 142182157, 142182313), null},
+                {new Interval("chr15", 19335778, 19336302), null},
+                {new Interval("chr17", 33364376, 33364428), null},
+                {new Interval("chr17", 33546162, 33546214), null},
+                {new Interval("chr17", 33706667, 33706736), null},
+                {new Interval("chr17", 59772721, 59772781), null},
+                {new Interval("chr17", 59779355, 59779421), null},
+                {new Interval("chr17", 59781483, 59781540), null},
+                {new Interval("chr17", 59783488, 59783565), null},
+                {new Interval("chr17", 59784584, 59784615), null},
+                {new Interval("chr17", 59786025, 59786136), null},
+                {new Interval("chr17", 59787203, 59787494), null},
+                {new Interval("chr17", 59791235, 59791514), null},
+                {new Interval("chr17", 59794247, 59794502), null},
+                {new Interval("chr17", 59801884, 59802193), null},
+                {new Interval("chr17", 59804685, 59804982), null},
+                {new Interval("chr17", 59817352, 59817382), null},
+                {new Interval("chr17", 59817465, 59817532), null},
+                {new Interval("chr17", 59875754, 59875812), null},
+                {new Interval("chr17", 59875899, 59875944), null},
+                {new Interval("chr17", 59879183, 59879456), null},
+                {new Interval("chr17", 59883988, 59884276), null},
+                {new Interval("chr17", 59887398, 59887512), null},
+                {new Interval("chrX", 48774611, 48775058), null},
+
+        };
+    }
+
+    @Test(dataProvider = "failingIntervals")
+    public void testDiagnosticLiftover(final Interval fromInterval) {
+        final List<LiftOver.PartialLiftover> partials = liftOver.diagnosticLiftover(fromInterval);
+        System.out.println("Diagnosing " + fromInterval + " (len " + fromInterval.length() + ")");
+        for (final LiftOver.PartialLiftover partial : partials) {
+            System.out.println(partial);
+        }
+    }
+
+    @DataProvider(name = "failingIntervals")
+    public Object[][] makeFailingIntervals() {
+        return new Object[][] {
+                {new Interval("chr3", 50911035, 50911051)},
+                {new Interval("chr2", 111013693, 111013832)},
+                {new Interval("chr3", 14174511, 14175398)},
+                {new Interval("chr3", 50911035, 50911051)},
+                {new Interval("chr6", 32071709, 32071869)},
+                {new Interval("chr6", 32072183, 32072358)},
+                {new Interval("chr6", 32104446, 32104606)},
+                {new Interval("chr6", 32104920, 32105095)},
+                {new Interval("chr7", 101995561, 101995739)},
+                {new Interval("chr7", 142178782, 142178825)},
+                {new Interval("chr7", 142179850, 142180013)},
+                {new Interval("chr7", 142181067, 142181324)},
+                {new Interval("chr7", 142181720, 142181860)},
+                {new Interval("chr7", 142182157, 142182313)},
+                {new Interval("chr15", 19335778, 19336302)},
+                {new Interval("chr17", 33364376, 33364428)},
+                {new Interval("chr17", 33546162, 33546214)},
+                {new Interval("chr17", 33706667, 33706736)},
+                {new Interval("chr17", 59772721, 59772781)},
+                {new Interval("chr17", 59779355, 59779421)},
+                {new Interval("chr17", 59781483, 59781540)},
+                {new Interval("chr17", 59783488, 59783565)},
+                {new Interval("chr17", 59784584, 59784615)},
+                {new Interval("chr17", 59786025, 59786136)},
+                {new Interval("chr17", 59787203, 59787494)},
+                {new Interval("chr17", 59791235, 59791514)},
+                {new Interval("chr17", 59794247, 59794502)},
+                {new Interval("chr17", 59801884, 59802193)},
+                {new Interval("chr17", 59804685, 59804982)},
+                {new Interval("chr17", 59817352, 59817382)},
+                {new Interval("chr17", 59817465, 59817532)},
+                {new Interval("chr17", 59875754, 59875812)},
+                {new Interval("chr17", 59875899, 59875944)},
+                {new Interval("chr17", 59879183, 59879456)},
+                {new Interval("chr17", 59883988, 59884276)},
+                {new Interval("chr17", 59887398, 59887512)},
+                {new Interval("chrX", 48774611, 48775058)},
+
+        };
+    }
+
+    @Test
+    public void testWriteChain() throws Exception {
+        final OverlapDetector<Chain> chains = Chain.loadChains(CHAIN_FILE);
+        File outFile = File.createTempFile("test.", ".chain");
+        outFile.deleteOnExit();
+        PrintWriter pw = new PrintWriter(outFile);
+        final Map<Integer, Chain> originalChainMap = new TreeMap<Integer, Chain>();
+        for (final Chain chain : chains.getAll()) {
+            chain.write(pw);
+            originalChainMap.put(chain.id, chain);
+        }
+        pw.close();
+
+        final OverlapDetector<Chain> newChains = Chain.loadChains(outFile);
+        final Map<Integer, Chain> newChainMap = new TreeMap<Integer, Chain>();
+        for (final Chain chain : newChains.getAll()) {
+            newChainMap.put(chain.id, chain);
+        }
+        Assert.assertEquals(newChainMap, originalChainMap);
+    }
+
+    @Test(dataProvider = "testIntervals")
+    public void testGetContigMap(final Interval in, final Interval expected) {
+        if (expected != null) {
+            Assert.assertTrue(contigMap.get(in.getContig()).contains(expected.getContig()));
+        }
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/metrics/MetricBaseTest.java b/src/test/java/htsjdk/samtools/metrics/MetricBaseTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/metrics/MetricBaseTest.java
rename to src/test/java/htsjdk/samtools/metrics/MetricBaseTest.java
diff --git a/src/test/java/htsjdk/samtools/metrics/MetricsFileTest.java b/src/test/java/htsjdk/samtools/metrics/MetricsFileTest.java
new file mode 100644
index 0000000..228d87d
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/metrics/MetricsFileTest.java
@@ -0,0 +1,213 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools.metrics;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.util.FormatUtil;
+import htsjdk.samtools.util.Histogram;
+import htsjdk.samtools.util.TestUtil;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.Date;
+
+/**
+ * Tests for the various classes in the metrics package.  Constructs a MetricsFile,
+ * populates it with various items and then ensure that it can be written to disk
+ * and read back without altering any values.
+ *
+ * @author Tim Fennell
+ */
+public class MetricsFileTest {
+    public enum TestEnum {One, Two, Three}
+
+    public static class TestMetric extends MetricBase implements Cloneable, Serializable {
+        private static final long serialVersionUID = 1l;
+
+        public String    STRING_PROP;
+        public Date      DATE_PROP;
+        public Short     SHORT_PROP;
+        public Integer   INTEGER_PROP;
+        public Long      LONG_PROP;
+        public Float     FLOAT_PROP;
+        public Double    DOUBLE_PROP;
+        public TestEnum  ENUM_PROP;
+        public Boolean   BOOLEAN_PROP;
+        public Character CHARACTER_PROP;
+        public short     SHORT_PRIMITIVE;
+        public int       INT_PRIMITIVE;
+        public long      LONG_PRIMITIVE;
+        public float     FLOAT_PRIMITIVE;
+        public double    DOUBLE_PRIMITIVE;
+        public boolean   BOOLEAN_PRIMITIVE;
+        public char      CHAR_PRIMITIVE;
+
+        @Override
+        public TestMetric clone()  {
+            try { return (TestMetric) super.clone(); }
+            catch (CloneNotSupportedException cnse) { throw new SAMException("That's Unpossible!"); }
+        }
+    }
+
+    public static class FloatingPointMetric extends MetricBase{
+        public double DOUBLE_PRIMITIVE;
+        public Double DOUBLE_PROP;
+        public float  FLOAT_PRIMITIVE;
+        public Float FLOAT_PROP;
+    }
+
+    @Test
+    public void testFloatingPointEquality() throws IOException {
+        MetricsFile<FloatingPointMetric,Integer> file = new MetricsFile<FloatingPointMetric,Integer>();
+
+        FloatingPointMetric metric = new FloatingPointMetric();
+        metric.DOUBLE_PRIMITIVE = .0000000000000000001d;
+        metric.DOUBLE_PROP = .0000000000000000001d;
+        metric.FLOAT_PRIMITIVE = .0000000000000000001f;
+        metric.FLOAT_PROP = .0000000000000000001f;
+        file.addMetric(metric);
+
+        MetricsFile<FloatingPointMetric,Integer> file2 = writeThenReadBack(file);
+        Assert.assertEquals(file, file2);
+
+
+
+    }
+
+    @Test
+    public void testWriteMetricsFile() throws IOException, ClassNotFoundException {
+        MetricsFile<TestMetric,Integer> file = new MetricsFile<TestMetric,Integer>();
+        TestMetric metric = new TestMetric();
+        metric.STRING_PROP       = "Hello World";
+        metric.DATE_PROP         = new FormatUtil().parseDate("2008-12-31");
+        metric.SHORT_PROP        = 123;
+        metric.INTEGER_PROP      = null;
+        metric.LONG_PROP         = Long.MAX_VALUE;
+        metric.FLOAT_PROP        = 456.789f;
+        metric.DOUBLE_PROP       = 0.713487;
+        metric.ENUM_PROP         = TestEnum.Two;
+        metric.BOOLEAN_PROP      = false;
+        metric.CHARACTER_PROP    = 'A';
+        metric.SHORT_PRIMITIVE   = 123;
+        metric.INT_PRIMITIVE     = 919834781;
+        metric.LONG_PRIMITIVE    = Long.MAX_VALUE - Integer.MAX_VALUE;
+        metric.FLOAT_PRIMITIVE   = 0.55694f;
+        metric.DOUBLE_PRIMITIVE  = 0.229233;
+        metric.BOOLEAN_PRIMITIVE = true;
+        metric.CHAR_PRIMITIVE    = 'B';
+        file.addMetric(metric);
+
+        MetricsFile<TestMetric,Integer> file2 = writeThenReadBack(file);
+        Assert.assertEquals(file, file2);
+
+        // Now add some headers and run the test again
+        StringHeader stringHeader = new StringHeader();
+        stringHeader.setValue("Hello, I'm a String Header!");
+        file.addHeader(stringHeader);
+
+        VersionHeader version = new VersionHeader();
+        version.setVersionedItem("MetricsFileTest");
+        version.setVersionString("1.0");
+        file.addHeader(version);
+
+        version = new VersionHeader();
+        version.setVersionedItem("Nada");
+        version.setVersionString("0.0alpha1");
+        file.addHeader(version);
+
+        file2 = writeThenReadBack(file);
+        Assert.assertEquals(file, file2);
+
+        // Now add a Histogram and make sure it still works
+        Histogram<Integer> histo = new Histogram<Integer>();
+        histo.setBinLabel("small_number");
+        histo.setValueLabel("big_number");
+        histo.increment(1, 101);
+        histo.increment(2, 202);
+        histo.increment(3, 4000);
+        histo.increment(5, 123981);
+        histo.increment(1000, 10981982);
+        file.setHistogram(histo);
+
+        file2 = writeThenReadBack(file);
+        Assert.assertEquals(file, file2);
+
+        // And lastly add some more metrics rows to the file
+        TestMetric metric2 = metric.clone();
+        metric2.ENUM_PROP = TestEnum.One;
+        metric2.FLOAT_PROP = 0.998f;
+        metric2.STRING_PROP = "Wheeeee!";
+        file.addMetric(metric2);
+
+        metric2 = metric.clone();
+        metric2.ENUM_PROP = TestEnum.Three;
+        metric2.DOUBLE_PRIMITIVE = 1.299d;
+        file.addMetric(metric2);
+
+        file2 = writeThenReadBack(file);
+        Assert.assertEquals(file, file2);
+
+        //Test that we can serialize and deserialize this whole thing
+        MetricsFile<TestMetric, Integer> file3 = TestUtil.serializeAndDeserialize(file);
+
+        Assert.assertEquals(file, file3);
+    }
+
+    @Test
+    public void areMetricsFilesEqualTest(){
+        final File TEST_DIR = new File("src/test/resources/htsjdk/samtools/metrics/");
+        final File file1 = new File(TEST_DIR,"metricsOne.metrics");
+        final File file2 = new File(TEST_DIR,"metricsOneCopy.metrics");
+        final File fileModifiedHist = new File(TEST_DIR,"metricsOneModifiedHistogram.metrics");
+        final File fileModifiedMet = new File(TEST_DIR,"metricsOneModifiedMetrics.metrics");
+
+        Assert.assertTrue(MetricsFile.areMetricsEqual(file1, file2));
+        Assert.assertTrue(MetricsFile.areMetricsEqual(file1, fileModifiedHist));
+
+        Assert.assertFalse(MetricsFile.areMetricsAndHistogramsEqual(file1, fileModifiedHist));
+        Assert.assertFalse(MetricsFile.areMetricsEqual(file1, fileModifiedMet));
+        Assert.assertFalse(MetricsFile.areMetricsAndHistogramsEqual(file1, fileModifiedMet));
+    }
+
+    /** Helper method to persist metrics to file and read them back again. */
+    private <METRIC extends MetricBase> MetricsFile<METRIC, Integer> writeThenReadBack(MetricsFile<METRIC,Integer> in) throws IOException {
+        File f = File.createTempFile("test", ".metrics");
+        f.deleteOnExit();
+        FileWriter out = new FileWriter(f);
+        in.write(out);
+
+        MetricsFile<METRIC,Integer> retval = new MetricsFile<METRIC,Integer>();
+        retval.read(new FileReader(f));
+        return retval;
+    }
+
+
+
+}
diff --git a/src/tests/java/htsjdk/samtools/metrics/StringHeaderTest.java b/src/test/java/htsjdk/samtools/metrics/StringHeaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/metrics/StringHeaderTest.java
rename to src/test/java/htsjdk/samtools/metrics/StringHeaderTest.java
diff --git a/src/tests/java/htsjdk/samtools/metrics/VersionHeaderTest.java b/src/test/java/htsjdk/samtools/metrics/VersionHeaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/metrics/VersionHeaderTest.java
rename to src/test/java/htsjdk/samtools/metrics/VersionHeaderTest.java
diff --git a/src/tests/java/htsjdk/samtools/reference/FakeReferenceSequenceFile.java b/src/test/java/htsjdk/samtools/reference/FakeReferenceSequenceFile.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/reference/FakeReferenceSequenceFile.java
rename to src/test/java/htsjdk/samtools/reference/FakeReferenceSequenceFile.java
diff --git a/src/test/java/htsjdk/samtools/reference/FastaSequenceFileTest.java b/src/test/java/htsjdk/samtools/reference/FastaSequenceFileTest.java
new file mode 100644
index 0000000..87927f6
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/reference/FastaSequenceFileTest.java
@@ -0,0 +1,85 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.reference;
+
+import htsjdk.samtools.util.StringUtil;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.PrintWriter;
+
+/**
+ * @author alecw at broadinstitute.org
+ */
+public class FastaSequenceFileTest {
+    @Test
+    public void testTrailingWhitespace() throws Exception {
+        final File fasta = File.createTempFile("test", ".fasta");
+        fasta.deleteOnExit();
+        final PrintWriter writer = new PrintWriter(fasta);
+        final String chr1 = "chr1";
+        writer.println(">" + chr1);
+        final String sequence = "ACGTACGT";
+        writer.println(sequence);
+        writer.println(sequence + " \t");
+        writer.close();
+        final FastaSequenceFile fastaReader = new FastaSequenceFile(fasta, true);
+        final ReferenceSequence referenceSequence = fastaReader.nextSequence();
+        Assert.assertEquals(referenceSequence.getName(), chr1);
+        Assert.assertEquals(StringUtil.bytesToString(referenceSequence.getBases()), sequence + sequence);
+    }
+
+    @Test
+    public void testIntermediateWhitespace() throws Exception {
+        final File fasta = File.createTempFile("test", ".fasta");
+        fasta.deleteOnExit();
+        final PrintWriter writer = new PrintWriter(fasta);
+        final String chr1 = "chr1";
+        writer.println(">" + chr1 + " extra stuff after sequence name");
+        final String sequence = "ACGTACGT";
+        writer.println(sequence + "  ");
+        writer.println(sequence + " \t");
+        writer.println(sequence);
+        writer.close();
+        final FastaSequenceFile fastaReader = new FastaSequenceFile(fasta, true);
+        final ReferenceSequence referenceSequence = fastaReader.nextSequence();
+        Assert.assertEquals(referenceSequence.getName(), chr1);
+        Assert.assertEquals(StringUtil.bytesToString(referenceSequence.getBases()), sequence + sequence + sequence);
+    }
+
+    // There was a bug when reading a fasta with trailing whitespace, only when a sequence dictionary exists.
+    @Test
+    public void testTrailingWhitespaceWithPreexistingSequenceDictionary() throws Exception {
+        final File fasta = new File("src/test/resources/htsjdk/samtools/reference/reference_with_trailing_whitespace.fasta");
+        final FastaSequenceFile fastaReader = new FastaSequenceFile(fasta, true);
+        ReferenceSequence referenceSequence = fastaReader.nextSequence();
+        Assert.assertEquals(referenceSequence.getName(), "chr1");
+        Assert.assertEquals(StringUtil.bytesToString(referenceSequence.getBases()), "ACGTACGT");
+        referenceSequence = fastaReader.nextSequence();
+        Assert.assertEquals(referenceSequence.getName(), "chr2");
+        Assert.assertEquals(StringUtil.bytesToString(referenceSequence.getBases()), "TCGATCGA");
+
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java b/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java
new file mode 100644
index 0000000..bfef121
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java
@@ -0,0 +1,256 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools.reference;
+
+import htsjdk.samtools.SAMException;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.Iterator;
+
+/**
+ * Test the fasta sequence index reader.
+ */
+public class FastaSequenceIndexTest {
+    private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/reference");
+
+    @DataProvider(name="homosapiens")
+    public Object[][] provideHomoSapiens() throws FileNotFoundException {
+        final File sequenceIndexFile = new File(TEST_DATA_DIR,"Homo_sapiens_assembly18.fasta.fai");
+        return new Object[][] { new Object[]
+            { new FastaSequenceIndex(sequenceIndexFile) },
+            { new FastaSequenceIndex(sequenceIndexFile.toPath()) } };
+    }
+
+    @DataProvider(name="specialcharacters")
+    public Object[][] provideSpecialCharacters() throws FileNotFoundException {
+        final File sequenceIndexFile = new File(TEST_DATA_DIR,"testing.fai");
+        return new Object[][] { new Object[]
+            { new FastaSequenceIndex(sequenceIndexFile) },
+            { new FastaSequenceIndex(sequenceIndexFile.toPath()) } };
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testInitialContig(FastaSequenceIndex sequenceIndex) {
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrM"),"Contig chrM is not present");
+        FastaSequenceIndexEntry entry = sequenceIndex.getIndexEntry("chrM");
+        Assert.assertEquals(entry.getContig(),"chrM","Contig chrM name is incorrect");
+        Assert.assertEquals(entry.getLocation(),6L,"Contig chrM location is incorrect");
+        Assert.assertEquals(entry.getSize(),16571L,"Contig chrM size is incorrect");
+        Assert.assertEquals(entry.getBasesPerLine(),50,"Contig chrM bases per line is incorrect");
+        Assert.assertEquals(entry.getBytesPerLine(),51,"Contig chrM bytes per line is incorrect");
+
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testMiddleContig(FastaSequenceIndex sequenceIndex) {
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr8"),"Contig chr8 is not present");
+        FastaSequenceIndexEntry entry = sequenceIndex.getIndexEntry("chr8");
+        Assert.assertEquals(entry.getContig(),"chr8","Contig chr8 name is incorrect");
+        Assert.assertEquals(entry.getLocation(),1419403101L,"Contig chr8 location is incorrect");
+        Assert.assertEquals(entry.getSize(),146274826L,"Contig chr8 size is incorrect");
+        Assert.assertEquals(entry.getBasesPerLine(),50,"Contig chr8 bases per line is incorrect");
+        Assert.assertEquals(entry.getBytesPerLine(),51,"Contig chr8 bytes per line is incorrect");
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testLastContig(FastaSequenceIndex sequenceIndex) {
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrX_random"),"Contig chrX_random is not present");
+        FastaSequenceIndexEntry entry = sequenceIndex.getIndexEntry("chrX_random");
+        Assert.assertEquals(entry.getContig(),"chrX_random","Contig chrX_random name is incorrect");
+        Assert.assertEquals(entry.getLocation(),3156698441L,"Contig chrX_random location is incorrect");
+        Assert.assertEquals(entry.getSize(),1719168L,"Contig chrX_random size is incorrect");
+        Assert.assertEquals(entry.getBasesPerLine(),50,"Contig chrX_random bases per line is incorrect");
+        Assert.assertEquals(entry.getBytesPerLine(),51,"Contig chrX_random bytes per line is incorrect");
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testAllContigsPresent(FastaSequenceIndex sequenceIndex) {
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrM"),"Contig chrM is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr1"),"Contig chr1 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr2"),"Contig chr2 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr3"),"Contig chr3 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr4"),"Contig chr4 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr5"),"Contig chr5 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr6"),"Contig chr6 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr7"),"Contig chr7 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr8"),"Contig chr8 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr9"),"Contig chr9 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr10"),"Contig chr10 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr11"),"Contig chr11 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr12"),"Contig chr12 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr13"),"Contig chr13 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr14"),"Contig chr14 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr15"),"Contig chr15 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr16"),"Contig chr16 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr17"),"Contig chr17 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr18"),"Contig chr18 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr19"),"Contig chr19 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr20"),"Contig chr20 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr21"),"Contig chr21 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr22"),"Contig chr22 is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrX"),"Contig chrX is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrY"),"Contig chrY is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr1_random"),"Contig chr1_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr2_random"),"Contig chr2_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr3_random"),"Contig chr3_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr4_random"),"Contig chr4_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr5_random"),"Contig chr5_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr6_random"),"Contig chr6_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr7_random"),"Contig chr7_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr8_random"),"Contig chr8_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr9_random"),"Contig chr9_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr10_random"),"Contig chr10_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr11_random"),"Contig chr11_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr13_random"),"Contig chr13_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr15_random"),"Contig chr15_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr16_random"),"Contig chr16_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr17_random"),"Contig chr17_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr18_random"),"Contig chr18_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr19_random"),"Contig chr19_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr21_random"),"Contig chr21_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr22_random"),"Contig chr22_random is not present");
+        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrX_random"),"Contig chrX_random is not present");
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testHasInvalidEntry(FastaSequenceIndex sequenceIndex) {
+        Assert.assertFalse(sequenceIndex.hasIndexEntry("invalid"),"Found an invalid entry");
+    }
+
+    @Test(dataProvider="homosapiens",expectedExceptions=SAMException.class)
+    public void testGetInvalidEntry(FastaSequenceIndex sequenceIndex) {
+        sequenceIndex.getIndexEntry("invalid");
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testIteration(FastaSequenceIndex sequenceIndex) {
+        Iterator<FastaSequenceIndexEntry> sequenceIndexEntries = sequenceIndex.iterator();
+
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chrM","Contig chrM is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr1","Contig chr1 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr2","Contig chr2 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr3","Contig chr3 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr4","Contig chr4 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr5","Contig chr5 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr6","Contig chr6 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr7","Contig chr7 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr8","Contig chr8 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr9","Contig chr9 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr10","Contig chr10 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr11","Contig chr11 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr12","Contig chr12 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr13","Contig chr13 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr14","Contig chr14 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr15","Contig chr15 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr16","Contig chr16 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr17","Contig chr17 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr18","Contig chr18 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr19","Contig chr19 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr20","Contig chr20 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr21","Contig chr21 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr22","Contig chr22 is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chrX","Contig chrX is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chrY","Contig chrY is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr1_random","Contig chr1_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr2_random","Contig chr2_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr3_random","Contig chr3_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr4_random","Contig chr4_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr5_random","Contig chr5_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr6_random","Contig chr6_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr7_random","Contig chr7_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr8_random","Contig chr8_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr9_random","Contig chr9_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr10_random","Contig chr10_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr11_random","Contig chr11_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr13_random","Contig chr13_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr15_random","Contig chr15_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr16_random","Contig chr16_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr17_random","Contig chr17_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr18_random","Contig chr18_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr19_random","Contig chr19_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr21_random","Contig chr21_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr22_random","Contig chr22_random is not present");
+        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chrX_random","Contig chrX_random is not present");
+        Assert.assertFalse(sequenceIndexEntries.hasNext(),"Iterator still has more entries");
+    }
+
+    @Test(dataProvider="specialcharacters")
+    public void testSpecialCharacters(FastaSequenceIndex specialCharactersIndex) {
+        /* file contents:
+        chrM	16571	6	50	51
+        chr1;boat	247249719	16915	50	51
+        chr2:money	242951149	252211635	50	51
+        chr3::;	199501827	500021813	50	51
+        ;;;;;;  1234            1234            1234    1234
+        file:gi|17981852|ref|NC_001807.4|    16571   2911876801      70      71
+        */
+        Iterator<FastaSequenceIndexEntry> sequenceIndexEntries = specialCharactersIndex.iterator();
+        FastaSequenceIndexEntry ent = sequenceIndexEntries.next();
+        Assert.assertEquals(ent.getContig(),"chrM","Contig chrM is not present");
+        Assert.assertEquals(ent.getSize(),16571,"Contig chrM size is not correct");
+        Assert.assertEquals(ent.getLocation(),6,"Contig chrM location is not correct");
+        Assert.assertEquals(ent.getBasesPerLine(),50,"Contig chrM bases per line is not correct");
+        Assert.assertEquals(ent.getBytesPerLine(),51,"Contig chrM bytes per line is not correct");
+
+        ent = sequenceIndexEntries.next();
+        Assert.assertEquals(ent.getContig(),"chr1;boat","Contig chr1;boat is not present");
+        Assert.assertEquals(ent.getSize(),247249719,"Contig chr1;boat size is not correct");
+        Assert.assertEquals(ent.getLocation(),16915,"Contig chr1;boat location is not correct");
+        Assert.assertEquals(ent.getBasesPerLine(),50,"Contig chr1;boat bases per line is not correct");
+        Assert.assertEquals(ent.getBytesPerLine(),51,"Contig chr1;boat bytes per line is not correct");
+
+        ent = sequenceIndexEntries.next();
+        Assert.assertEquals(ent.getContig(),"chr2:money","Contig chr2:money is not present");
+        Assert.assertEquals(ent.getSize(),242951149,"Contig chr2:money size is not correct");
+        Assert.assertEquals(ent.getLocation(),252211635,"Contig chr2:money location is not correct");
+        Assert.assertEquals(ent.getBasesPerLine(),50,"Contig chr2:money bases per line is not correct");
+        Assert.assertEquals(ent.getBytesPerLine(),51,"Contig chr2:money bytes per line is not correct");
+
+        ent = sequenceIndexEntries.next();
+        Assert.assertEquals(ent.getContig(),"chr3::;","Contig chr3::; is not present");
+        Assert.assertEquals(ent.getSize(),199501827,"Contig chr3::; size is not correct");
+        Assert.assertEquals(ent.getLocation(),500021813,"Contig chrM location is not correct");
+        Assert.assertEquals(ent.getBasesPerLine(),50,"Contig chr3::; bases per line is not correct");
+        Assert.assertEquals(ent.getBytesPerLine(),51,"Contig chr3::; bytes per line is not correct");
+
+        ent = sequenceIndexEntries.next();
+        Assert.assertEquals(ent.getContig(),";;;;;;;;","Contig ;;;;;;;; is not present");
+        Assert.assertEquals(ent.getSize(),123,"Contig ;;;;;;;; size is not correct");
+        Assert.assertEquals(ent.getLocation(),234,"Contig ;;;;;;;; location is not correct");
+        Assert.assertEquals(ent.getBasesPerLine(),456,"Contig ;;;;;;;; bases per line is not correct");
+        Assert.assertEquals(ent.getBytesPerLine(),789,"Contig ;;;;;;;; bytes per line is not correct");
+
+        ent = sequenceIndexEntries.next();
+        Assert.assertEquals(ent.getContig(),"file:gi|17981852|ref|NC_001807.4|","Contig file:gi|17981852|ref|NC_001807.4| is not present");
+        Assert.assertEquals(ent.getSize(),16571,"Contig file:gi|17981852|ref|NC_001807.4| size is not correct");
+        Assert.assertEquals(ent.getLocation(),2911876801L,"Contig file:gi|17981852|ref|NC_001807.4| location is not correct");
+        Assert.assertEquals(ent.getBasesPerLine(),70,"Contig file:gi|17981852|ref|NC_001807.4| bases per line is not correct");
+        Assert.assertEquals(ent.getBytesPerLine(),71,"Contig file:gi|17981852|ref|NC_001807.4| bytes per line is not correct");
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/reference/InMemoryReferenceSequenceFile.java b/src/test/java/htsjdk/samtools/reference/InMemoryReferenceSequenceFile.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/reference/InMemoryReferenceSequenceFile.java
rename to src/test/java/htsjdk/samtools/reference/InMemoryReferenceSequenceFile.java
diff --git a/src/test/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java b/src/test/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java
new file mode 100644
index 0000000..086b3be
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java
@@ -0,0 +1,308 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools.reference;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.StringUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+
+/**
+ * Test the indexed fasta sequence file reader.
+ */
+public class IndexedFastaSequenceFileTest{
+    private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/reference");
+    private static File SEQUENCE_FILE = new File(TEST_DATA_DIR,"Homo_sapiens_assembly18.trimmed.fasta");
+    private static File SEQUENCE_FILE_NODICT = new File(TEST_DATA_DIR,"Homo_sapiens_assembly18.trimmed.nodict.fasta");
+
+    private final String firstBasesOfChrM = "GATCACAGGTCTATCACCCT";
+    private final String extendedBasesOfChrM = "GATCACAGGTCTATCACCCTATTAACCACTCACGGGAGCTCTCCATGCAT" +
+                                               "TTGGTATTTTCGTCTGGGGGGTGTGCACGCGATAGCATTGCGAGACGCTG" +
+                                               "GAGCCGGAGCACCCTATGTCGCAGTATCTGTCTTTGATTCCTGCCTCATT";
+    private final String lastBasesOfChr20 = "ttgtctgatgctcatattgt";
+    private final int CHR20_LENGTH = 1000000;
+
+    @DataProvider(name="homosapiens")
+    public Object[][] provideSequenceFile() throws FileNotFoundException {
+        return new Object[][] { new Object[]
+                { new IndexedFastaSequenceFile(SEQUENCE_FILE) },
+                { new IndexedFastaSequenceFile(SEQUENCE_FILE_NODICT) },
+                { new IndexedFastaSequenceFile(SEQUENCE_FILE.toPath()) },
+                { new IndexedFastaSequenceFile(SEQUENCE_FILE_NODICT.toPath()) }};
+    }
+
+    @DataProvider(name="comparative")
+    public Object[][] provideOriginalAndNewReaders() throws FileNotFoundException {
+        return new Object[][] {
+                new Object[] { ReferenceSequenceFileFactory.getReferenceSequenceFile(SEQUENCE_FILE),
+                                               new IndexedFastaSequenceFile(SEQUENCE_FILE) },
+                new Object[] { ReferenceSequenceFileFactory.getReferenceSequenceFile(SEQUENCE_FILE, true),
+                                               new IndexedFastaSequenceFile(SEQUENCE_FILE) },
+                new Object[] { ReferenceSequenceFileFactory.getReferenceSequenceFile(SEQUENCE_FILE.toPath()),
+                                               new IndexedFastaSequenceFile(SEQUENCE_FILE.toPath()) },
+                new Object[] { ReferenceSequenceFileFactory.getReferenceSequenceFile(SEQUENCE_FILE.toPath(), true),
+                                               new IndexedFastaSequenceFile(SEQUENCE_FILE.toPath()) },};
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testOpenFile(IndexedFastaSequenceFile sequenceFile) {
+        long startTime = System.currentTimeMillis();
+        Assert.assertNotNull(sequenceFile);
+        long endTime = System.currentTimeMillis();
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testOpenFile runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testFirstSequence(IndexedFastaSequenceFile sequenceFile) {
+        long startTime = System.currentTimeMillis();
+        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chrM",1,firstBasesOfChrM.length());
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),firstBasesOfChrM,"First n bases of chrM are incorrect");
+
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testFirstSequence runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testFirstSequenceExtended(IndexedFastaSequenceFile sequenceFile) {
+        long startTime = System.currentTimeMillis();
+        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chrM",1,extendedBasesOfChrM.length());
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),extendedBasesOfChrM,"First n bases of chrM are incorrect");
+
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testFirstSequenceExtended runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testReadStartingInCenterOfFirstLine(IndexedFastaSequenceFile sequenceFile) {
+        final int bytesToChopOff = 5;
+        String truncated = extendedBasesOfChrM.substring(bytesToChopOff);
+
+        long startTime = System.currentTimeMillis();
+        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chrM",
+                                                                   bytesToChopOff + 1,
+                                                                   bytesToChopOff + truncated.length());
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),truncated,"First n bases of chrM are incorrect");
+
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testReadStartingInCenterOfFirstLine runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="homosapiens")
+    public void testReadStartingInCenterOfMiddleLine(IndexedFastaSequenceFile sequenceFile) {
+        final int bytesToChopOff = 120;
+        String truncated = extendedBasesOfChrM.substring(bytesToChopOff);
+
+        long startTime = System.currentTimeMillis();
+        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chrM",
+                                                                   bytesToChopOff + 1,
+                                                                   bytesToChopOff + truncated.length());
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),truncated,"First n bases of chrM are incorrect");
+
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testReadStartingInCenterOfMiddleLine runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="comparative")
+    public void testFirstCompleteContigRead(ReferenceSequenceFile originalSequenceFile, IndexedFastaSequenceFile sequenceFile) {
+        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
+
+        long startTime = System.currentTimeMillis();
+        ReferenceSequence sequence = sequenceFile.getSequence("chrM");
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chrM is incorrect");
+
+        CloserUtil.close(originalSequenceFile);
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testFirstCompleteContigRead runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="homosapiens",expectedExceptions=SAMException.class)
+    public void testReadThroughEndOfContig(IndexedFastaSequenceFile sequenceFile) {
+        long startTime = System.currentTimeMillis();
+        try {
+            sequenceFile.getSubsequenceAt("chrM",16500,16600);
+        }
+        finally {
+            long endTime = System.currentTimeMillis();
+
+            CloserUtil.close(sequenceFile);
+
+            System.err.printf("testReadThroughEndOfContig runtime: %dms%n", (endTime - startTime)) ;
+        }
+    }
+
+    @Test(dataProvider="homosapiens",expectedExceptions=SAMException.class)
+    public void testReadPastEndOfContig(IndexedFastaSequenceFile sequenceFile) {
+         long startTime = System.currentTimeMillis();
+         try {
+             sequenceFile.getSubsequenceAt("chrM",16800,16900);
+         }
+         finally {
+             long endTime = System.currentTimeMillis();
+
+             CloserUtil.close(sequenceFile);
+
+             System.err.printf("testReadPastEndOfContig runtime: %dms%n", (endTime - startTime)) ;
+         }
+     }
+
+    @Test(dataProvider="comparative")
+    public void testLastCompleteContigRead(ReferenceSequenceFile originalSequenceFile, IndexedFastaSequenceFile sequenceFile) {
+        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
+        while( !expectedSequence.getName().equals("chr20") )
+            expectedSequence = originalSequenceFile.nextSequence();
+
+        long startTime = System.currentTimeMillis();
+        ReferenceSequence sequence = sequenceFile.getSequence("chr20");
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chr20","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),1,"Sequence contig index is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chrX_random is incorrect");
+
+        CloserUtil.close(originalSequenceFile);
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testLastCompleteContigRead runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+
+    @Test(dataProvider="homosapiens")
+    public void testLastOfChr20(IndexedFastaSequenceFile sequenceFile) {
+        long startTime = System.currentTimeMillis();
+        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chr20",
+                                                                   CHR20_LENGTH - lastBasesOfChr20.length()+1,
+                                                                   CHR20_LENGTH);
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chr20","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),1,"Sequence contig index is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),lastBasesOfChr20,"First n bases of chr1 are incorrect");
+
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testFirstOfChr1 runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="comparative")
+    public void testFirstElementOfIterator(ReferenceSequenceFile originalSequenceFile,IndexedFastaSequenceFile sequenceFile) {
+        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
+
+        long startTime = System.currentTimeMillis();
+        ReferenceSequence sequence = sequenceFile.nextSequence();
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(), "chrM","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(), 0,"Sequence contig index is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chrM is incorrect");
+
+        CloserUtil.close(originalSequenceFile);
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testFirstElementOfIterator runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="comparative")
+    public void testNextElementOfIterator(ReferenceSequenceFile originalSequenceFile, IndexedFastaSequenceFile sequenceFile) {
+        // Skip past the first one and load the second one.
+        originalSequenceFile.nextSequence();
+        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
+
+        long startTime = System.currentTimeMillis();
+        sequenceFile.nextSequence();
+        ReferenceSequence sequence = sequenceFile.nextSequence();
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chr20","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),1,"Sequence contig index is not correct");
+        Assert.assertEquals(sequence.length(),expectedSequence.length(),"Sequence size is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chr1 is incorrect");
+
+        CloserUtil.close(originalSequenceFile);
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testNextElementOfIterator runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(dataProvider="comparative")
+    public void testReset(ReferenceSequenceFile originalSequenceFile, IndexedFastaSequenceFile sequenceFile) {
+        // Skip past the first one and load the second one.
+        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
+
+        long startTime = System.currentTimeMillis();
+        sequenceFile.nextSequence();
+        sequenceFile.nextSequence();
+        sequenceFile.reset();
+        ReferenceSequence sequence = sequenceFile.nextSequence();
+        long endTime = System.currentTimeMillis();
+
+        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
+        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
+        Assert.assertEquals(sequence.length(),expectedSequence.length(), "Sequence size is not correct");
+        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chrM is incorrect");
+
+        CloserUtil.close(originalSequenceFile);
+        CloserUtil.close(sequenceFile);
+
+        System.err.printf("testReset runtime: %dms%n", (endTime - startTime)) ;
+    }
+
+    @Test(expectedExceptions = FileNotFoundException.class)
+    public void testMissingFile() throws Exception {
+        new IndexedFastaSequenceFile(new File(TEST_DATA_DIR, "non-existent.fasta"));
+        Assert.fail("FileNotFoundException should have been thrown");
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java
new file mode 100644
index 0000000..6eeae7b
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java
@@ -0,0 +1,39 @@
+package htsjdk.samtools.reference;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * Simple tests for the reference sequence file factory
+ */
+public class ReferenceSequenceFileFactoryTests {
+    public static final File hg18 = new File("src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta");
+
+    @Test public void testPositivePath() {
+        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18);
+        Assert.assertTrue(f instanceof AbstractFastaSequenceFile);
+    }
+
+    @Test public void testGetIndexedReader() {
+        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18, true, true);
+        Assert.assertTrue(f instanceof IndexedFastaSequenceFile, "Got non-indexed reader when expecting indexed reader.");
+    }
+
+    @Test public void testGetNonIndexedReader1() {
+        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18, false, true);
+        Assert.assertTrue(f instanceof FastaSequenceFile, "Got indexed reader when truncating at whitespace! FAI must truncate.");
+    }
+
+    @Test public void testGetNonIndexedReader2() {
+        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18, true, false);
+        Assert.assertTrue(f instanceof FastaSequenceFile, "Got indexed reader when requesting non-indexed reader.");
+    }
+
+    @Test public void testDefaultToIndexed() {
+        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18, true);
+        Assert.assertTrue(f instanceof IndexedFastaSequenceFile, "Got non-indexed reader by default.");
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java
new file mode 100644
index 0000000..fa746d6
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java
@@ -0,0 +1,73 @@
+package htsjdk.samtools.reference;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.util.CloserUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * Created by farjoun on 2/14/14.
+ */
+public class ReferenceSequenceFileWalkerTest {
+
+
+    @DataProvider(name = "TestReference")
+    public Object[][] TestReference() {
+        return new Object[][]{
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 0, 1},
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 1, 1},
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 0, 0},
+        };
+    }
+
+
+    @Test(dataProvider = "TestReference")
+    public void testGet(final String fileName, final int index1, final int index2) throws SAMException {
+        final File refFile = new File(fileName);
+        final ReferenceSequenceFileWalker refWalker = new ReferenceSequenceFileWalker(refFile);
+
+        ReferenceSequence sequence = refWalker.get(index1);
+        Assert.assertEquals(sequence.getContigIndex(), index1);
+
+        sequence = refWalker.get(index2);
+        Assert.assertEquals(sequence.getContigIndex(), index2);
+        CloserUtil.close(refWalker);
+    }
+
+
+    @DataProvider(name = "TestFailReference")
+    public Object[][] TestFailReference() {
+        return new Object[][]{
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta", 1,3},  //fail because out of bounds
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta", 2,3},  //fail because out of bounds
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta", 1,0},  //fail because not allowed to look back
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta", -1,0},  //fail because out of bounds
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", -1, 0},  //fail because out of bounds
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 1, -1},    //fail because out of bounds
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 2,3},  //fail because out of bounds
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 1,3},  //fail because out of bounds
+                new Object[]{"src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 1, 0} // fasta is indexed, but not allowed to look back
+        };
+    }
+
+
+    @Test(expectedExceptions = {SAMException.class}, dataProvider = "TestFailReference")
+    public void testFailGet(final String fileName, final int index1, final int index2) throws SAMException {
+        final File refFile = new File(fileName);
+        final ReferenceSequenceFileWalker refWalker = new ReferenceSequenceFileWalker(refFile);
+
+        try {
+            refWalker.get(index1);
+
+            refWalker.get(index2);
+        }
+        finally {
+            CloserUtil.close(refWalker);
+        }
+    }
+
+
+}
diff --git a/src/tests/java/htsjdk/samtools/reference/ReferenceSequenceTests.java b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceTests.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/reference/ReferenceSequenceTests.java
rename to src/test/java/htsjdk/samtools/reference/ReferenceSequenceTests.java
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java
new file mode 100644
index 0000000..9720218
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java
@@ -0,0 +1,162 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools.seekablestream;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+
+import static org.testng.Assert.assertEquals;
+
+public class SeekableBufferedStreamTest {
+
+//    private final File BAM_INDEX_FILE = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai");
+    private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
+    private final String BAM_URL_STRING = "http://broadinstitute.github.io/picard/testdata/index_test.bam";
+    private static File TestFile = new File("src/test/resources/htsjdk/samtools/seekablestream/megabyteZeros.dat");
+
+    /**
+     * Test reading across a buffer boundary (buffer size is 512000).   The test first reads a range of
+     * bytes using an unbuffered stream file stream,  then compares this to results from a buffered http stream.
+     *
+     * @throws IOException
+     */
+    @Test
+    public void testRandomRead() throws IOException {
+
+        int startPosition = 500000;
+        int length = 50000;
+
+        byte[] buffer1 = new byte[length];
+        SeekableStream unBufferedStream = new SeekableFileStream(BAM_FILE);
+        unBufferedStream.seek(startPosition);
+        int bytesRead = unBufferedStream.read(buffer1, 0, length);
+        assertEquals(length, bytesRead);
+
+        byte[] buffer2 = new byte[length];
+        SeekableStream bufferedStream = new SeekableBufferedStream(new SeekableHTTPStream(new URL(BAM_URL_STRING)));
+        bufferedStream.seek(startPosition);
+        bytesRead = bufferedStream.read(buffer2, 0, length);
+        assertEquals(length, bytesRead);
+
+        assertEquals(buffer1, buffer2);
+    }
+
+    /**
+     * Test an attempt to read past the end of the file.  The test file is 594,149 bytes in length.  The test
+     * attempts to read a 1000 byte block starting at position 594000.  A correct result would return 149 bytes.
+     *
+     * @throws IOException
+     */
+    @Test
+    public void testEOF() throws IOException {
+
+        int remainder = 149;
+        long fileLength = BAM_FILE.length();
+        long startPosition = fileLength - remainder;
+        int length = 1000;
+
+
+        byte[] buffer = new byte[length];
+        SeekableStream bufferedStream = new SeekableBufferedStream(new SeekableHTTPStream(new URL(BAM_URL_STRING)));
+        bufferedStream.seek(startPosition);
+        int bytesRead = bufferedStream.read(buffer, 0, length);
+        assertEquals(remainder, bytesRead);
+
+        // Subsequent reads should return -1
+        bytesRead = bufferedStream.read(buffer, 0, length);
+        assertEquals(-1, bytesRead);
+    }
+
+    @Test
+    public void testSkip() throws IOException {
+        final int[] BUFFER_SIZES = new int[]{8, 96, 1024, 8*1024, 16*1024, 96*1024, 48*1024};
+
+        for (final int bufferSize : BUFFER_SIZES) {
+            final SeekableBufferedStream in1 = new SeekableBufferedStream(new SeekableFileStream(BAM_FILE), bufferSize);
+            final SeekableBufferedStream in2 = new SeekableBufferedStream(new SeekableFileStream(BAM_FILE), bufferSize);
+
+            final int SIZE = 10000;
+            final byte[] bytes1 = new byte[SIZE];
+            final byte[] bytes2 = new byte[SIZE];
+
+            reallyRead(bytes1, in1);
+            reallyRead(bytes1, in1);
+            in1.skip(bytes1.length);
+            reallyRead(bytes1, in1);
+
+            reallyRead(bytes2, in2);
+            reallyRead(bytes2, in2);
+            in2.seek(bytes2.length * 3);
+            reallyRead(bytes2, in2);
+
+            in1.close();
+            in2.close();
+
+            Assert.assertEquals(bytes1, bytes2, "Error at buffer size " + bufferSize);
+        }
+    }
+
+    private int reallyRead(final byte[] bytes, final SeekableBufferedStream in) throws IOException {
+        int read = 0, total = 0;
+        do {
+            read = in.read(bytes, total, bytes.length-total);
+            total += read;
+        } while (total != bytes.length && read > 0);
+
+        return total;
+    }
+
+
+    @Test
+    public void testDivisableReads()throws IOException{
+
+        testReadsLength(1);
+        testReadsLength(2);
+        testReadsLength(4);
+        testReadsLength(5);
+        testReadsLength(10);
+        testReadsLength(20);
+        testReadsLength(50);
+        testReadsLength(100);
+
+    }
+
+    private void testReadsLength(final int length) throws IOException {
+
+        final int BUFFERED_STREAM_BUFFER_SIZE = 100;
+        final byte buffer[]=new byte[BUFFERED_STREAM_BUFFER_SIZE*10];
+        final SeekableFileStream fileStream = new SeekableFileStream(TestFile);
+        final SeekableBufferedStream  bufferedStream = new SeekableBufferedStream(fileStream,BUFFERED_STREAM_BUFFER_SIZE);
+
+        for( int i=0; i<10*BUFFERED_STREAM_BUFFER_SIZE/length ; ++i ){
+            assertEquals(bufferedStream.read(buffer, 0, length), length);
+        }
+    }
+
+}
diff --git a/src/tests/java/htsjdk/samtools/seekablestream/SeekableFTPStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableFTPStreamTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/seekablestream/SeekableFTPStreamTest.java
rename to src/test/java/htsjdk/samtools/seekablestream/SeekableFTPStreamTest.java
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java
new file mode 100644
index 0000000..35e1545
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java
@@ -0,0 +1,52 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2013 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.seekablestream;
+
+import htsjdk.samtools.util.BufferedLineReader;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: jrobinso
+ * Date: Dec 20, 2009
+ * Time: 11:13:19 AM
+ * To change this template use File | Settings | File Templates.
+ */
+public class SeekableFileStreamTest {
+
+    @Test
+    public void testSeek() throws Exception {
+        String expectedLine = "ccccccccc";
+        File testFile = new File("src/test/resources/htsjdk/samtools/seekablestream/seekTest.txt");
+        SeekableFileStream is = new SeekableFileStream(testFile);
+        is.seek(20);
+        BufferedLineReader reader = new BufferedLineReader(is);
+        String nextLine = reader.readLine();
+        Assert.assertEquals(expectedLine, nextLine);
+        reader.close();
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableMemoryStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableMemoryStreamTest.java
new file mode 100644
index 0000000..09ad92d
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableMemoryStreamTest.java
@@ -0,0 +1,93 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools.seekablestream;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.EOFException;
+import java.io.IOException;
+
+public class SeekableMemoryStreamTest {
+
+    @Test
+    public void test_getSource() {
+        String source = "source";
+        SeekableMemoryStream stream = new SeekableMemoryStream("qwe".getBytes(), source);
+        Assert.assertEquals(stream.getSource(), source);
+    }
+
+    @Test
+    public void test_EOF() throws IOException {
+        SeekableMemoryStream stream = new SeekableMemoryStream(new byte[]{}, null);
+        Assert.assertTrue(stream.eof());
+        Assert.assertEquals(stream.read(), -1);
+        Assert.assertTrue(stream.eof());
+    }
+
+    @Test
+    public void test_read_byte() throws IOException {
+        byte[] data = new byte[1024];
+        for (int i = 0; i < data.length; i++) {
+            data[i] = (byte) i;
+        }
+        SeekableMemoryStream stream = new SeekableMemoryStream(data, null);
+
+        for (int i = 0; i < data.length; i++) {
+            byte expectedByteValue = (byte) i;
+            Assert.assertEquals((byte) stream.read(), expectedByteValue);
+        }
+    }
+
+    @Test
+    public void test_read_into_array() throws IOException {
+        byte[] data = new byte[1024];
+        for (int i = 0; i < data.length; i++) {
+            data[i] = (byte) i;
+        }
+        SeekableMemoryStream stream = new SeekableMemoryStream(data, null);
+
+        byte[] copy = new byte[data.length];
+
+        int length = data.length;
+        int numberOfBytesReadSoFar = 0, maxBytesPerRead = 11;
+        while (numberOfBytesReadSoFar < length) {
+            final int count = stream.read(copy, numberOfBytesReadSoFar, Math.min(maxBytesPerRead, length - numberOfBytesReadSoFar));
+            if (count < 0) {
+                throw new EOFException();
+            }
+            numberOfBytesReadSoFar += count;
+        }
+
+        Assert.assertEquals(copy, data);
+    }
+
+    @Test(expectedExceptions = IOException.class)
+    public void test_reset() throws IOException {
+        SeekableMemoryStream stream = new SeekableMemoryStream("qwe".getBytes(), null);
+        stream.mark(3);
+        stream.reset();
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekablePathStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekablePathStreamTest.java
new file mode 100644
index 0000000..067f5be
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekablePathStreamTest.java
@@ -0,0 +1,51 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2013 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.seekablestream;
+
+import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class SeekablePathStreamTest {
+
+    @Test
+    public void testRead() throws Exception {
+        Path testPath = new File("src/test/resources/htsjdk/samtools/seekablestream/seekTest.txt").toPath();
+        SeekablePathStream is = new SeekablePathStream(testPath);
+        Assert.assertEquals(is.position(), 0);
+        Assert.assertEquals(is.read(), (int) 'a');
+        Assert.assertEquals(is.position(), 1);
+        is.seek(20);
+        Assert.assertEquals(is.position(), 20);
+        byte[] buf = new byte[2];
+        Assert.assertEquals(is.read(buf, 0, buf.length), 2);
+        Assert.assertEquals(buf, new byte[] { (byte) 'c', (byte) 'c' });
+        Assert.assertEquals(is.skip(8), 8);
+        Assert.assertEquals(is.position(), 30);
+        Assert.assertEquals(is.length(), Files.size(testPath));
+        is.close();
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java
new file mode 100644
index 0000000..979d944
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java
@@ -0,0 +1,44 @@
+package htsjdk.samtools.seekablestream;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+
+public class SeekableStreamFactoryTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
+
+    @Test
+    public void testIsFilePath() throws Exception {
+        Assert.assertEquals(SeekableStreamFactory.isFilePath("x"), true);
+        Assert.assertEquals(SeekableStreamFactory.isFilePath(""), true);
+        Assert.assertEquals(SeekableStreamFactory.isFilePath("http://broadinstitute.org"), false);
+        Assert.assertEquals(SeekableStreamFactory.isFilePath("https://broadinstitute.org"), false);
+        Assert.assertEquals(SeekableStreamFactory.isFilePath("ftp://broadinstitute.org"), false);
+    }
+
+    @DataProvider(name="getStreamForData")
+    public Object[][] getStreamForData() throws Exception {
+        return new Object[][] {
+                { new File(TEST_DATA_DIR, "BAMFileIndexTest/index_test.bam").getAbsolutePath(),
+                        new File(TEST_DATA_DIR, "BAMFileIndexTest/index_test.bam").getAbsolutePath() },
+                { new File(TEST_DATA_DIR, "cram_with_bai_index.cram").getAbsolutePath(),
+                        new File(TEST_DATA_DIR, "cram_with_bai_index.cram").getAbsolutePath() },
+                { new URL("file://" + new File(TEST_DATA_DIR, "cram_with_bai_index.cram").getAbsolutePath()).toExternalForm(),
+                        new File(TEST_DATA_DIR, "cram_with_bai_index.cram").getAbsolutePath() },
+                { new URL("http://www.broadinstitute.org/~picard/testdata/index_test.bam").toExternalForm(),
+                        new URL("http://www.broadinstitute.org/~picard/testdata/index_test.bam").toExternalForm() },
+                { new URL("http://www.broadinstitute.org/~picard/testdata/index_test.bam.bai").toExternalForm(),
+                       new URL("http://www.broadinstitute.org/~picard/testdata/index_test.bam.bai").toExternalForm() }
+        };
+    }
+
+    @Test(dataProvider = "getStreamForData")
+    public void testGetStreamFor(final String path, final String expectedPath) throws IOException {
+        Assert.assertEquals(SeekableStreamFactory.getInstance().getStreamFor(path).getSource(), expectedPath);
+    }
+
+}
diff --git a/src/tests/java/htsjdk/samtools/sra/AbstractSRATest.java b/src/test/java/htsjdk/samtools/sra/AbstractSRATest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/sra/AbstractSRATest.java
rename to src/test/java/htsjdk/samtools/sra/AbstractSRATest.java
diff --git a/src/test/java/htsjdk/samtools/sra/SRAAccessionTest.java b/src/test/java/htsjdk/samtools/sra/SRAAccessionTest.java
new file mode 100644
index 0000000..e241ca9
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/sra/SRAAccessionTest.java
@@ -0,0 +1,29 @@
+package htsjdk.samtools.sra;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+/**
+ * Tests for SRAAccession logic
+ */
+public class SRAAccessionTest extends AbstractSRATest {
+
+    @DataProvider(name = "isValidAccData")
+    private Object[][] getIsValidAccData() {
+        return new Object[][] {
+            { "SRR000123", true },
+            { "DRR000001", true },
+            { "SRR000000", false },
+            { "src/test/resources/htsjdk/samtools/sra/test_archive.sra", true },
+            { "src/test/resources/htsjdk/samtools/compressed.bam", false },
+            { "src/test/resources/htsjdk/samtools/uncompressed.sam", false },
+        };
+    }
+
+    @Test(dataProvider = "isValidAccData")
+    public void testIsValidAcc(String accession, boolean isValid) {
+        Assert.assertEquals(isValid, SRAAccession.isValid(accession));
+    }
+
+}
diff --git a/src/tests/java/htsjdk/samtools/sra/SRAIndexTest.java b/src/test/java/htsjdk/samtools/sra/SRAIndexTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/sra/SRAIndexTest.java
rename to src/test/java/htsjdk/samtools/sra/SRAIndexTest.java
diff --git a/src/tests/java/htsjdk/samtools/sra/SRALazyRecordTest.java b/src/test/java/htsjdk/samtools/sra/SRALazyRecordTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/sra/SRALazyRecordTest.java
rename to src/test/java/htsjdk/samtools/sra/SRALazyRecordTest.java
diff --git a/src/tests/java/htsjdk/samtools/sra/SRAQueryTest.java b/src/test/java/htsjdk/samtools/sra/SRAQueryTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/sra/SRAQueryTest.java
rename to src/test/java/htsjdk/samtools/sra/SRAQueryTest.java
diff --git a/src/test/java/htsjdk/samtools/sra/SRAReferenceTest.java b/src/test/java/htsjdk/samtools/sra/SRAReferenceTest.java
new file mode 100644
index 0000000..dd2b374
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/sra/SRAReferenceTest.java
@@ -0,0 +1,95 @@
+package htsjdk.samtools.sra;
+
+import htsjdk.samtools.reference.ReferenceSequence;
+import htsjdk.samtools.reference.ReferenceSequenceFile;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class SRAReferenceTest extends AbstractSRATest {
+    @DataProvider(name = "testReference")
+    private Object[][] createDataForReference() {
+        return new Object[][] {
+                {"SRR2096940", "CM000681.1", 95001, 95050, "AGATGATTCAGTCTCACCAAGAACACTGAAAGTCACATGGCTACCAGCAT"},
+        };
+    }
+
+    @Test(dataProvider = "testReference")
+    public void testReference(String acc, String refContig, int refStart, int refStop, String refBases) {
+        final ReferenceSequenceFile refSeqFile = new SRAIndexedSequenceFile(new SRAAccession(acc));
+        final ReferenceSequence refSeq = refSeqFile.getSubsequenceAt(refContig, refStart, refStop);
+        Assert.assertEquals(new String(refSeq.getBases()), refBases);
+    }
+
+    class TestReferenceMtData {
+        String refContig;
+        int refStart;
+        int refStop;
+        String refBases;
+
+        TestReferenceMtData(String refContig, int refStart, int refStop, String refBases) {
+            this.refContig = refContig;
+            this.refStart = refStart;
+            this.refStop = refStop;
+            this.refBases = refBases;
+        }
+
+        @Override
+        public String toString() {
+            return refContig + ":" + refStart + "-" + refStop + " = " + refBases;
+        }
+    }
+
+    @DataProvider(name = "testReferenceMt")
+    private Object[][] createDataForReferenceMt() {
+        return new Object[][] {
+                {
+                    "SRR353866", Arrays.asList(
+                        new TestReferenceMtData("AAAB01001871.1", 1, 50, "TGACGCGCATGAATGGATTAACGAGATTCCCTCTGTCCCTATCTACTATC"),
+                        new TestReferenceMtData("AAAB01001871.1", 901, 950, "ACCAAGCGTACGATTGTTCACCCTTTCAAGGGAACGTGAGCTGGGTTTAG"),
+                        new TestReferenceMtData("AAAB01008987.1", 1, 50, "TTTTGGACGATGTTTTTGGTGAACAGAAAACGAGCTCAATCATCCAGAGC"),
+                        new TestReferenceMtData("AAAB01008859.1", 1, 50, "CAAAACGATGCCACAGATCAGAAGTTAATTAACGCACATTCTCCACCCAC")
+                    )
+                },
+        };
+    }
+
+    @Test(dataProvider = "testReferenceMt")
+    public void testReferenceMt(String acc, List<TestReferenceMtData> parallelTests) throws Exception {
+        final ReferenceSequenceFile refSeqFile = new SRAIndexedSequenceFile(new SRAAccession(acc));
+        final long timeout = 1000L * 5; // just in case
+        final List<Thread> threads = new ArrayList<Thread>(parallelTests.size());
+        final Map<TestReferenceMtData, Exception> runErrors = Collections.synchronizedMap(new HashMap<TestReferenceMtData, Exception>());
+        for (final TestReferenceMtData testData: parallelTests) {
+            threads.add(new Thread() {
+                @Override
+                public void run() {
+                    try {
+                        final ReferenceSequence refSeq = refSeqFile.getSubsequenceAt(testData.refContig,
+                                testData.refStart, testData.refStop);
+                        Assert.assertEquals(new String(refSeq.getBases()), testData.refBases);
+                    } catch (final Exception e) {
+                        Assert.assertNull(runErrors.put(testData, e));
+                    }
+                }
+            });
+        }
+        for (final Thread thread: threads) {
+            thread.start();
+        }
+        for (final Thread thread: threads) {
+            thread.join(timeout);
+        }
+        for (final Map.Entry<TestReferenceMtData, Exception> result: runErrors.entrySet()) {
+            // Will fail only on the first, but a debugger will be able to see all the results.
+            Assert.fail("failed: " + result.getKey(), result.getValue());
+        }
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/sra/SRATest.java b/src/test/java/htsjdk/samtools/sra/SRATest.java
new file mode 100644
index 0000000..2bdd7d7
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/sra/SRATest.java
@@ -0,0 +1,432 @@
+/*===========================================================================
+*
+*                            PUBLIC DOMAIN NOTICE
+*               National Center for Biotechnology Information
+*
+*  This software/database is a "United States Government Work" under the
+*  terms of the United States Copyright Act.  It was written as part of
+*  the author's official duties as a United States Government employee and
+*  thus cannot be copyrighted.  This software/database is freely available
+*  to the public for use. The National Library of Medicine and the U.S.
+*  Government have not placed any restriction on its use or reproduction.
+*
+*  Although all reasonable efforts have been taken to ensure the accuracy
+*  and reliability of the software and data, the NLM and the U.S.
+*  Government do not and cannot warrant the performance or results that
+*  may be obtained by using this software or data. The NLM and the U.S.
+*  Government disclaim all warranties, express or implied, including
+*  warranties of performance, merchantability or fitness for any particular
+*  purpose.
+*
+*  Please cite the author in any work or product based on this material.
+*
+* ===========================================================================
+*
+*/
+
+package htsjdk.samtools.sra;
+
+import htsjdk.samtools.BAMFileSpan;
+import htsjdk.samtools.BrowseableBAMIndex;
+import htsjdk.samtools.Chunk;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMReadGroupRecord;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMRecordIterator;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.SAMUtils;
+import htsjdk.samtools.SAMValidationError;
+import htsjdk.samtools.SamInputResource;
+import htsjdk.samtools.SamReader;
+import htsjdk.samtools.SamReaderFactory;
+import htsjdk.samtools.ValidationStringency;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Integration tests for SRA functionality
+ *
+ * Created by andrii.nikitiuk on 8/24/15.
+ */
+public class SRATest extends AbstractSRATest {
+
+    @DataProvider(name = "testCounts")
+    private Object[][] createDataForCounts() {
+        return new Object[][] {
+            {"SRR2096940", 10591, 498},
+            {"SRR000123", 0, 4583}
+        };
+    }
+
+    @Test(dataProvider = "testCounts")
+    public void testCounts(String acc, int expectedNumMapped, int expectedNumUnmapped) {
+        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
+                SamInputResource.of(new SRAAccession(acc))
+        );
+
+        final SAMRecordIterator samRecordIterator = reader.iterator();
+
+        assertCorrectCountsOfMappedAndUnmappedRecords(samRecordIterator, expectedNumMapped, expectedNumUnmapped);
+    }
+
+    @DataProvider(name = "testCountsBySpan")
+    private Object[][] createDataForCountsBySpan() {
+        return new Object[][] {
+            {"SRR2096940", Arrays.asList(new Chunk(0, 59128983), new Chunk(59128983, 59141089)), 10591, 498},
+            {"SRR2096940", Arrays.asList(new Chunk(0, 29128983), new Chunk(29128983, 59141089)), 10591, 498},
+            {"SRR2096940", Arrays.asList(new Chunk(0, 59134983), new Chunk(59134983, 59141089)), 10591, 498},
+            {"SRR2096940", Arrays.asList(new Chunk(0, 59130000)),                                10591, 0},
+            {"SRR2096940", Arrays.asList(new Chunk(0, 59140889)),                                10591, 298}
+        };
+    }
+
+    @Test(dataProvider = "testCountsBySpan")
+    public void testCountsBySpan(String acc, List<Chunk> chunks, int expectedNumMapped, int expectedNumUnmapped) {
+        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
+                SamInputResource.of(new SRAAccession(acc))
+        );
+
+        final SAMRecordIterator samRecordIterator = ((SamReader.Indexing) reader).iterator(new BAMFileSpan(chunks));
+
+        assertCorrectCountsOfMappedAndUnmappedRecords(samRecordIterator, expectedNumMapped, expectedNumUnmapped);
+    }
+
+    @DataProvider(name = "testGroups")
+    private Object[][] createDataForGroups() {
+        return new Object[][] {
+            {"SRR822962", new TreeSet<>(Arrays.asList(
+                    "GS54389-FS3-L08", "GS57511-FS3-L08", "GS54387-FS3-L02", "GS54387-FS3-L01",
+                    "GS57510-FS3-L01", "GS57510-FS3-L03", "GS54389-FS3-L07", "GS54389-FS3-L05",
+                    "GS54389-FS3-L06", "GS57510-FS3-L02", "GS57510-FS3-L04", "GS54387-FS3-L03",
+                    "GS46253-FS3-L03"))
+            },
+            {"SRR2096940", new HashSet<>(Arrays.asList("SRR2096940"))}
+        };
+    }
+
+    @Test(dataProvider = "testGroups")
+    public void testGroups(String acc, Set<String> groups) {
+        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
+                SamInputResource.of(new SRAAccession(acc))
+        );
+
+        final SAMRecordIterator samRecordIterator = reader.iterator();
+
+        SAMFileHeader header = reader.getFileHeader();
+        Set<String> headerGroups = new TreeSet<>();
+        for (SAMReadGroupRecord group : header.getReadGroups()) {
+            Assert.assertEquals(group.getReadGroupId(), group.getId());
+            headerGroups.add(group.getReadGroupId());
+        }
+
+        Assert.assertEquals(groups, headerGroups);
+
+        Set<String> foundGroups = new TreeSet<>();
+
+        for (int i = 0; i < 10000; i++) {
+            if (!samRecordIterator.hasNext()) {
+                break;
+            }
+            SAMRecord record = samRecordIterator.next();
+            String groupName = (String)record.getAttribute("RG");
+
+            foundGroups.add(groupName);
+        }
+
+        // please note that some groups may be introduced after 10k records, which is not an error
+        Assert.assertEquals(groups, foundGroups);
+    }
+
+    @DataProvider(name = "testReferences")
+    private Object[][] createDataForReferences() {
+        return new Object[][] {
+            // primary alignment only
+            {"SRR1063272", 1,
+                    Arrays.asList("supercont2.1", "supercont2.2", "supercont2.3", "supercont2.4",
+                                  "supercont2.5", "supercont2.6", "supercont2.7", "supercont2.8",
+                                  "supercont2.9", "supercont2.10", "supercont2.11", "supercont2.12",
+                                  "supercont2.13", "supercont2.14"),
+                    Arrays.asList(2291499, 1621675, 1575141, 1084805,
+                                  1814975, 1422463, 1399503, 1398693,
+                                  1186808, 1059964, 1561994, 774062,
+                                  756744, 926563)},
+        };
+    }
+
+    @Test(dataProvider = "testReferences")
+    public void testReferences(String acc, int numberFirstReferenceFound, List<String> references, List<Integer> refLengths) {
+        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
+                SamInputResource.of(new SRAAccession(acc))
+        );
+
+        final SAMRecordIterator samRecordIterator = reader.iterator();
+
+        SAMFileHeader header = reader.getFileHeader();
+        Set<String> headerRefNames = new TreeSet<>();
+
+        for (SAMSequenceRecord ref : header.getSequenceDictionary().getSequences()) {
+            String refName = ref.getSequenceName();
+
+            int refIndex = references.indexOf(refName);
+            Assert.assertTrue(refIndex != -1, "Unexpected reference: " + refName);
+
+            Assert.assertEquals(refLengths.get(refIndex), (Integer) ref.getSequenceLength(), "Reference length is incorrect");
+
+            headerRefNames.add(refName);
+        }
+
+        Assert.assertEquals(new TreeSet<>(references), headerRefNames);
+
+        Set<String> foundRefNames = new TreeSet<>();
+        for (int i = 0; i < 10000; i++) {
+            if (!samRecordIterator.hasNext()) {
+                break;
+            }
+            SAMRecord record = samRecordIterator.next();
+
+            if (record.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX)) {
+                continue;
+            }
+
+            String refName = record.getReferenceName();
+            Assert.assertNotNull(refName);
+
+            foundRefNames.add(refName);
+        }
+
+        Assert.assertEquals(new TreeSet<>(references.subList(0, numberFirstReferenceFound)), foundRefNames);
+    }
+
+    @DataProvider(name = "testRows")
+    private Object[][] createDataForRowsTest() {
+        return new Object[][] {
+            // primary alignment only
+            {"SRR1063272", 0, 99, "SRR1063272.R.1",
+                    "ACTCGACATTCTGCCTTCGACCTATCTTTCTCCTCTCCCAGTCATCGCCCAGTAGAATTACCAGGCAATGAACCAGGGCCTTCCATCCCAACGGCACAGCA",
+                    "@@CDDBDFFBFHFIEEFGIGGHIEHIGIGGFGEGAFDHIIIIIGGGDFHII;=BF at FEHGIEEH?AHHFHFFFFDC5'5=?CC?ADCD at AC??9BDDCDB<",
+                    86, "101M", "supercont2.1", 60, true, false},
+
+            // small SRA archive
+            {"SRR2096940", 1, 16, "SRR2096940.R.3",
+                    "GTGTGTCACCAGATAAGGAATCTGCCTAACAGGAGGTGTGGGTTAGACCCAATATCAGGAGACCAGGAAGGAGGAGGCCTAAGGATGGGGCTTTTCTGTCACCAATCCTGTCCCTAGTGGCCCCACTGTGGGGTGGAGGGGACAGATAAAAGTACCCAGAACCAGAG",
+                    "AAAABFFFFFFFGGGGGGGGIIIIIIIIIIIIIIIIIIIIIIIIIIIIII7IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIGGGGGFGFFDFFFFFC",
+                    55627016, "167M", "CM000681.1", 42, false, false},
+
+            {"SRR2096940", 10591, 4, "SRR2096940.R.10592",
+                    "CTCTGGTTCTGGGTACTTTTATCTGTCCCCTCCACCCCACAGTGGCGAGCCAGATTCCTTATCTGGTGACACAC",
+                    "IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII",
+                    -1, null, null, -1, false, false},
+
+            // primary and secondary alignments
+            {"SRR833251", 81, 393, "SRR833251.R.51",
+                    "ATGCAAATCCGAATGGGCTATTTGTGGGTACTTGGGCAGGTAAGTAGCTGGCAATCTTGGTCGGTAAACCAATACCCAAGTTCACATAGGCACCATCGGGA",
+                    "CCCFFFFFHHHHHIJJJIJJJJJIIJJJGIJIJIIJIJJJDGIGIIJIJIHIJJJJJJGIGHIHEDFFFFDDEEEDDDDDCDEEDDDDDDDDDDDDDBBDB",
+                    1787186, "38M63S", "gi|169794206|ref|NC_010410.1|", 11, true, true},
+
+            // local SRA file
+            {"src/test/resources/htsjdk/samtools/sra/test_archive.sra", 1, 99, "test_archive.R.2",
+                    "TGTCGATGCTGAAAGTGTCTGCGGTGAACCACTTCATGCACAGCGCACACTGCAGCTCCACTTCACCCAGCTGACGGCCGTTCTCATCGTCTCCAGAGCCCGTCTGAGCGTCCGCTGCTTCAGAACTGTCCCCGGCTGTATCCTGAAGAC",
+                    "BBAABBBFAFFFGGGGGGGGGGGGEEFHHHHGHHHHHFHHGHFDGGGGGHHGHHHHHHHHHHHHFHHHGHHHHHHGGGGGGGHGGHHHHHHHHHGHHHHHGGGGHGHHHGGGGGGGGGHHHHEHHHHHHHHHHGCGGGHHHHHHGBFFGF",
+                    2811570, "150M", "NC_007121.5", 60, true, false}
+        };
+    }
+
+    @Test(dataProvider = "testRows")
+    public void testRows(String acc, int recordIndex, int flags, String readName, String bases, String quals, int refStart, String cigar,
+                         String refName, int mapQ, boolean hasMate, boolean isSecondaryAlignment) {
+        SAMRecord record = getRecordByIndex(acc, recordIndex, false);
+
+        checkSAMRecord(record, flags, readName, bases, quals, refStart, cigar, refName, mapQ, hasMate, isSecondaryAlignment);
+    }
+
+    @Test(dataProvider = "testRows")
+    public void testRowsAfterIteratorDetach(String acc, int recordIndex, int flags, String readName, String bases, String quals,
+                                            int refStart, String cigar, String refName, int mapQ, boolean hasMate,
+                                            boolean isSecondaryAlignment) {
+        SAMRecord record = getRecordByIndex(acc, recordIndex, true);
+
+        checkSAMRecord(record, flags, readName, bases, quals, refStart, cigar, refName, mapQ, hasMate, isSecondaryAlignment);
+    }
+
+    @Test(dataProvider = "testRows")
+    public void testRowsOverrideValues(String acc, int recordIndex, int flags, String readName, String bases, String quals,
+                                       int refStart, String cigar, String refName, int mapQ, boolean hasMate,
+                                       boolean isSecondaryAlignment) {
+        SAMRecord record = getRecordByIndex(acc, recordIndex, true);
+        SAMFileHeader header = record.getHeader();
+
+
+        record.setFlags(0);
+        record.setReadUnmappedFlag(refStart == -1);
+        record.setReadBases("C".getBytes());
+        record.setBaseQualities(SAMUtils.fastqToPhred("A"));
+        if (refStart == -1) {
+            checkSAMRecord(record, 4, readName, "C", "A", refStart, "1M", refName, mapQ, false, false);
+        } else {
+            int sequenceIndex = header.getSequenceIndex(refName);
+            Assert.assertFalse(sequenceIndex == -1);
+
+            if (sequenceIndex == 0) {
+                if (header.getSequenceDictionary().getSequences().size() > 1) {
+                    sequenceIndex++;
+                }
+            } else {
+                sequenceIndex--;
+            }
+
+            refName = header.getSequence(sequenceIndex).getSequenceName();
+
+            record.setAlignmentStart(refStart - 100);
+            record.setCigarString("1M");
+            record.setMappingQuality(mapQ - 1);
+            record.setReferenceIndex(sequenceIndex);
+
+            checkSAMRecord(record, 0, readName, "C", "A", refStart - 100, "1M", refName, mapQ - 1, false, false);
+        }
+    }
+
+    @Test(dataProvider = "testRows")
+    public void testRowsBySpan(String acc, int recordIndex, int flags, String readName, String bases, String quals,
+                                            int refStart, String cigar, String refName, int mapQ, boolean hasMate,
+                                            boolean isSecondaryAlignment) {
+        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
+                SamInputResource.of(new SRAAccession(acc))
+        );
+
+        SAMFileHeader header = reader.getFileHeader();
+
+        Chunk chunk;
+        if (refStart != -1) {
+            long refOffset = 0;
+            int refIndex = header.getSequenceDictionary().getSequence(refName).getSequenceIndex();
+            for (SAMSequenceRecord sequenceRecord : header.getSequenceDictionary().getSequences()) {
+                if (sequenceRecord.getSequenceIndex() <  refIndex) {
+                    refOffset += sequenceRecord.getSequenceLength();
+                }
+            }
+
+            chunk = new Chunk(refOffset + refStart - 1, refOffset + refStart);
+        } else {
+            long totalRefLength = header.getSequenceDictionary().getReferenceLength();
+            long totalRecordRange = ((BAMFileSpan)reader.indexing().getFilePointerSpanningReads()).toCoordinateArray()[1];
+            chunk = new Chunk(totalRefLength, totalRecordRange);
+        }
+
+        final SAMRecordIterator samRecordIterator = ((SamReader.Indexing) reader).iterator(new BAMFileSpan(chunk));
+
+        SAMRecord record = null;
+        while (samRecordIterator.hasNext()) {
+            SAMRecord currentRecord = samRecordIterator.next();
+            if (currentRecord.getReadName().equals(readName)) {
+                record = currentRecord;
+                break;
+            }
+        }
+
+        checkSAMRecord(record, flags, readName, bases, quals, refStart, cigar, refName, mapQ, hasMate, isSecondaryAlignment);
+    }
+
+    @Test(dataProvider = "testRows")
+    public void testRowsByIndex(String acc, int recordIndex, int flags, String readName, String bases, String quals,
+                                int refStart, String cigar, String refName, int mapQ, boolean hasMate,
+                                boolean isSecondaryAlignment) {
+        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
+                SamInputResource.of(new SRAAccession(acc))
+        );
+
+        Assert.assertTrue(reader.hasIndex());
+        Assert.assertTrue(reader.indexing().hasBrowseableIndex());
+
+        SAMFileHeader header = reader.getFileHeader();
+        BrowseableBAMIndex index = reader.indexing().getBrowseableIndex();
+
+        BAMFileSpan span;
+        if (refStart != -1) {
+            int refIndex = header.getSequenceDictionary().getSequence(refName).getSequenceIndex();
+            span = index.getSpanOverlapping(refIndex, refStart, refStart + 1);
+        } else {
+            long chunkStart = index.getStartOfLastLinearBin();
+            long totalRecordRange = ((BAMFileSpan) reader.indexing().getFilePointerSpanningReads()).toCoordinateArray()[1];
+            span = new BAMFileSpan(new Chunk(chunkStart, totalRecordRange));
+        }
+
+        final SAMRecordIterator samRecordIterator = ((SamReader.Indexing) reader).iterator(span);
+
+        SAMRecord record = null;
+        while (samRecordIterator.hasNext()) {
+            SAMRecord currentRecord = samRecordIterator.next();
+            if (refStart != -1 && currentRecord.getAlignmentStart() + currentRecord.getReadLength() < refStart) {
+                continue;
+            }
+
+            if (currentRecord.getReadName().equals(readName)) {
+                record = currentRecord;
+                break;
+            }
+        }
+
+        checkSAMRecord(record, flags, readName, bases, quals, refStart, cigar, refName, mapQ, hasMate, isSecondaryAlignment);
+    }
+
+    private SAMRecord getRecordByIndex(String acc, int recordIndex, boolean detach) {
+        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
+                SamInputResource.of(new SRAAccession(acc))
+        );
+
+        final SAMRecordIterator samRecordIterator = reader.iterator();
+
+        while (recordIndex != 0) {
+            Assert.assertTrue(samRecordIterator.hasNext(), "Record set is too small");
+
+            samRecordIterator.next();
+            recordIndex--;
+        }
+        Assert.assertTrue(samRecordIterator.hasNext(), "Record set is too small");
+
+        SAMRecord record = samRecordIterator.next();
+
+        if (detach) {
+            samRecordIterator.next();
+        }
+
+        return record;
+    }
+
+    private void checkSAMRecord(SAMRecord record, int flags, String readName, String bases, String quals,
+                                int refStart, String cigar, String refName, int mapQ, boolean hasMate,
+                                boolean isSecondaryAlignment) {
+
+        Assert.assertNotNull(record, "Record with read id: " + readName + " was not found by span created from index");
+
+        List<SAMValidationError> validationErrors = record.isValid();
+        Assert.assertNull(validationErrors, "SRA Lazy record is invalid. List of errors: " +
+                (validationErrors != null ? validationErrors.toString() : ""));
+
+        Assert.assertEquals(new String(record.getReadBases()), bases);
+        Assert.assertEquals(record.getBaseQualityString(), quals);
+        Assert.assertEquals(record.getReadPairedFlag(), hasMate);
+        Assert.assertEquals(record.getFlags(), flags);
+        Assert.assertEquals(record.getNotPrimaryAlignmentFlag(), isSecondaryAlignment);
+        if (refStart == -1) {
+            Assert.assertEquals(record.getReadUnmappedFlag(), true);
+            Assert.assertEquals(record.getAlignmentStart(), 0);
+            Assert.assertEquals(record.getCigarString(), "*");
+            Assert.assertEquals(record.getReferenceName(), "*");
+            Assert.assertEquals(record.getMappingQuality(), 0);
+        } else {
+            Assert.assertEquals(record.getReadUnmappedFlag(), false);
+            Assert.assertEquals(record.getAlignmentStart(), refStart);
+            Assert.assertEquals(record.getCigarString(), cigar);
+            Assert.assertEquals(record.getReferenceName(), refName);
+            Assert.assertEquals(record.getMappingQuality(), mapQ);
+        }
+    }
+
+}
diff --git a/src/tests/java/htsjdk/samtools/util/AsyncBufferedIteratorTest.java b/src/test/java/htsjdk/samtools/util/AsyncBufferedIteratorTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/AsyncBufferedIteratorTest.java
rename to src/test/java/htsjdk/samtools/util/AsyncBufferedIteratorTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/BinaryCodecTest.java b/src/test/java/htsjdk/samtools/util/BinaryCodecTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/BinaryCodecTest.java
rename to src/test/java/htsjdk/samtools/util/BinaryCodecTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/BlockCompressedFilePointerUtilTest.java b/src/test/java/htsjdk/samtools/util/BlockCompressedFilePointerUtilTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/BlockCompressedFilePointerUtilTest.java
rename to src/test/java/htsjdk/samtools/util/BlockCompressedFilePointerUtilTest.java
diff --git a/src/test/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java b/src/test/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java
new file mode 100644
index 0000000..b988415
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java
@@ -0,0 +1,165 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import htsjdk.samtools.util.zip.DeflaterFactory;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.zip.Deflater;
+
+public class BlockCompressedOutputStreamTest {
+
+    @Test
+    public void testBasic() throws Exception {
+        final File f = File.createTempFile("BCOST.", ".gz");
+        f.deleteOnExit();
+        final List<String> linesWritten = new ArrayList<String>();
+        System.out.println("Creating file " + f);
+        final BlockCompressedOutputStream bcos = new BlockCompressedOutputStream(f);
+        String s = "Hi, Mom!\n";
+        bcos.write(s.getBytes());
+        linesWritten.add(s);
+        s = "Hi, Dad!\n";
+        bcos.write(s.getBytes());
+        linesWritten.add(s);
+        bcos.flush();
+        final StringBuilder sb = new StringBuilder(BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE * 2);
+        s = "1234567890123456789012345678901234567890123456789012345678901234567890\n";
+        while (sb.length() <= BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE) {
+            sb.append(s);
+            linesWritten.add(s);
+        }
+        bcos.write(sb.toString().getBytes());
+        bcos.close();
+        final BlockCompressedInputStream bcis = new BlockCompressedInputStream(f);
+        final BufferedReader reader = new BufferedReader(new InputStreamReader(bcis));
+        String line;
+        for(int i = 0; (line = reader.readLine()) != null; ++i) {
+            Assert.assertEquals(line + "\n", linesWritten.get(i));
+        }
+        bcis.close();
+        final BlockCompressedInputStream bcis2 = new BlockCompressedInputStream(f);
+        int available = bcis2.available();
+        Assert.assertFalse(bcis2.endOfBlock(), "Should not be at end of block");
+        Assert.assertTrue(available > 0);
+        byte[] buffer = new byte[available];
+        Assert.assertEquals(bcis2.read(buffer), available, "Should read to end of block");
+        Assert.assertTrue(bcis2.endOfBlock(), "Should be at end of block");
+        bcis2.close();
+    }
+
+    @Test
+    public void testOverflow() throws Exception {
+        final File f = File.createTempFile("BCOST.", ".gz");
+        f.deleteOnExit();
+        final List<String> linesWritten = new ArrayList<String>();
+        System.out.println("Creating file " + f);
+        final BlockCompressedOutputStream bcos = new BlockCompressedOutputStream(f);
+        Random r = new Random(15555);
+        final int INPUT_SIZE = 64 * 1024;
+        byte[] input = new byte[INPUT_SIZE];
+        r.nextBytes(input);
+        bcos.write(input);
+        bcos.close();
+
+        final BlockCompressedInputStream bcis = new BlockCompressedInputStream(f);
+        byte[] output = new byte[INPUT_SIZE];
+        int len;
+        int i = 0;
+        while ((len = bcis.read(output, 0, output.length)) != -1) {
+            for (int j = 0; j < len; j++) {
+               Assert.assertEquals(output[j], input[i++]);
+            }
+        }
+        Assert.assertEquals(i, INPUT_SIZE);
+        bcis.close();
+    }
+
+    // PIC-393 exception closing BGZF stream opened to /dev/null
+    // I don't think this will work on Windows, because /dev/null doesn't work
+    @Test(groups = "broken")
+    public void testDevNull() throws Exception {
+        final BlockCompressedOutputStream bcos = new BlockCompressedOutputStream("/dev/null");
+        bcos.write("Hi, Mom!".getBytes());
+        bcos.close();
+    }
+
+    @Test
+    public void testCustomDeflater() throws Exception {
+        final File f = File.createTempFile("testCustomDeflater.", ".gz");
+        f.deleteOnExit();
+        System.out.println("Creating file " + f);
+
+        final int[] deflateCalls = {0}; //Note: using and array is a HACK to fool the compiler
+
+        class MyDeflater extends Deflater{
+            MyDeflater(int level, boolean nowrap){
+                super(level, nowrap);
+            }
+            @Override
+            public int deflate(byte[] b, int off, int len) {
+                deflateCalls[0]++;
+                return super.deflate(b, off, len);
+            }
+
+        }
+        final DeflaterFactory myDeflaterFactory= new DeflaterFactory(){
+            public Deflater makeDeflater(final int compressionLevel, final boolean nowrap) {
+                return new MyDeflater(compressionLevel, nowrap);
+            }
+        };
+        final List<String> linesWritten = new ArrayList<>();
+        final BlockCompressedOutputStream bcos = new BlockCompressedOutputStream(f, 5, myDeflaterFactory);
+        String s = "Hi, Mom!\n";
+        bcos.write(s.getBytes()); //Call 1
+        linesWritten.add(s);
+        s = "Hi, Dad!\n";
+        bcos.write(s.getBytes()); //Call 2
+        linesWritten.add(s);
+        bcos.flush();
+        final StringBuilder sb = new StringBuilder(BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE * 2);
+        s = "1234567890123456789012345678901234567890123456789012345678901234567890\n";
+        while (sb.length() <= BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE) {
+            sb.append(s);
+            linesWritten.add(s);
+        }
+        bcos.write(sb.toString().getBytes()); //Call 3
+        bcos.close();
+        final BlockCompressedInputStream bcis = new BlockCompressedInputStream(f);
+        final BufferedReader reader = new BufferedReader(new InputStreamReader(bcis));
+        String line;
+        for(int i = 0; (line = reader.readLine()) != null; ++i) {
+            Assert.assertEquals(line + "\n", linesWritten.get(i));
+        }
+        bcis.close();
+        Assert.assertEquals(deflateCalls[0], 3, "deflate calls");
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java b/src/test/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java
new file mode 100644
index 0000000..5b58372
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java
@@ -0,0 +1,59 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * @author alecw at broadinstitute.org
+ */
+public class BlockCompressedTerminatorTest {
+    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/util");
+
+    @Test
+    public void testFileWithTerminator() throws Exception {
+        final File tmpCompressedFile = File.createTempFile("test.", ".bgzf");
+        tmpCompressedFile.deleteOnExit();
+        final BlockCompressedOutputStream os = new BlockCompressedOutputStream(tmpCompressedFile);
+        os.write("Hi, Mom!\n".getBytes());
+        os.close();
+        Assert.assertEquals(BlockCompressedInputStream.checkTermination(tmpCompressedFile),
+                BlockCompressedInputStream.FileTermination.HAS_TERMINATOR_BLOCK);
+    }
+
+    @Test
+    public void testValidFileWithoutTerminator() throws Exception {
+        Assert.assertEquals(BlockCompressedInputStream.checkTermination(new File(TEST_DATA_DIR, "no_bgzf_terminator.bam")),
+                BlockCompressedInputStream.FileTermination.HAS_HEALTHY_LAST_BLOCK);
+    }
+
+    @Test
+    public void testDefectiveFile() throws Exception {
+        Assert.assertEquals(BlockCompressedInputStream.checkTermination(new File(TEST_DATA_DIR, "defective_bgzf.bam")),
+                BlockCompressedInputStream.FileTermination.DEFECTIVE);
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/util/CigarUtilTest.java b/src/test/java/htsjdk/samtools/util/CigarUtilTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/CigarUtilTest.java
rename to src/test/java/htsjdk/samtools/util/CigarUtilTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/CloseableIteratorTest.java b/src/test/java/htsjdk/samtools/util/CloseableIteratorTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/CloseableIteratorTest.java
rename to src/test/java/htsjdk/samtools/util/CloseableIteratorTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/CodeUtilTest.java b/src/test/java/htsjdk/samtools/util/CodeUtilTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/CodeUtilTest.java
rename to src/test/java/htsjdk/samtools/util/CodeUtilTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/ComparableTupleTest.java b/src/test/java/htsjdk/samtools/util/ComparableTupleTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/ComparableTupleTest.java
rename to src/test/java/htsjdk/samtools/util/ComparableTupleTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/CoordSpanInputSteamTest.java b/src/test/java/htsjdk/samtools/util/CoordSpanInputSteamTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/CoordSpanInputSteamTest.java
rename to src/test/java/htsjdk/samtools/util/CoordSpanInputSteamTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/DiskBackedQueueTest.java b/src/test/java/htsjdk/samtools/util/DiskBackedQueueTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/DiskBackedQueueTest.java
rename to src/test/java/htsjdk/samtools/util/DiskBackedQueueTest.java
diff --git a/src/test/java/htsjdk/samtools/util/HistogramTest.java b/src/test/java/htsjdk/samtools/util/HistogramTest.java
new file mode 100644
index 0000000..62b1441
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/HistogramTest.java
@@ -0,0 +1,366 @@
+package htsjdk.samtools.util;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashSet;
+
+import static java.lang.Math.abs;
+import static java.lang.StrictMath.pow;
+
+public class HistogramTest {
+
+    @Test(dataProvider = "histogramData")
+    public void testHistogramFunctions(final int[] values, final double mean, final double stdev, final Integer trimByWidth) {
+        final Histogram<Integer> histo = new Histogram<>();
+        for (int value : values) {
+            histo.increment(value);
+        }
+
+        if (trimByWidth != null) histo.trimByWidth(trimByWidth);
+        final double m = histo.getMean();
+        final double sd = histo.getStandardDeviation();
+
+        Assert.assertEquals(round(mean), round(m), "Means are not equal");
+        Assert.assertEquals(round(stdev), round(sd), "Stdevs are not equal");
+    }
+
+    @DataProvider(name = "histogramData")
+    public Object[][] histogramData() {
+        return new Object[][] {
+            new Object[] {new int[] {1,2,3,4,5,6,7,8,9,10} , 5.5d, 3.027650d, null },
+            new Object[] {new int[] {1,2,2,3,3,3,4,4,4,4,5,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,9}, 6.333333d, 2.236068d, null  },
+            new Object[] {new int[] {-5, -4, -3, -2, -1,  0,  1,  2,  3,  4,  5,  6,  7,  8,  9, 10, 11, 12, 13, 14, 15}, 5d, 6.204837d, null  },
+                new Object[] {new int[] {1,2,3,4,5,6,7,8,9,10, 11, 11, 12, 100, 1000} , 5.5d, 3.027650d, 10 },
+                new Object[] {new int[] {1,2,2,3,3,3,4,4,4,4,5,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,9, 20, 20, 21, 25, 25}, 6.333333d, 2.236068d, 11  },
+                new Object[] {new int[] {-5, -4, -3, -2, -1,  0,  1,  2,  3,  4,  5,  6,  7,  8,  9, 10, 11, 12, 13, 14, 15, 101, 102, 103, 200, 2000}, 5d, 6.204837d, 20  }
+        };
+    }
+
+    @Test
+    public void testGeometricMean() {
+        final int[] is = {4,4,4,4,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertTrue(abs(histo.getGeometricMean() - 6.216797) < 0.00001);
+    }
+
+    @Test
+    public void testGetSum() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getSum(), (double)(2*4+3*5), 0.000001);
+    }
+
+    @Test(expectedExceptions = UnsupportedOperationException.class)
+    public void testGetSumBlowup() {
+        final String[] is = {"foo", "foo", "bar"};
+        final Histogram<String> histo = new Histogram<>();
+        for (final String i : is) histo.increment(i);
+        histo.getSum();//blow up
+    }
+
+    @Test
+    public void testGetSumOfValues() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getSumOfValues(), (double)(2+3), 0.000001);
+    }
+
+    @Test
+    public void testGetMeanBinSize() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getMeanBinSize(), (2+3)/2.0, 0.000001);
+    }
+
+    @Test
+    public void testGetStandardDeviationBinSize() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        final double std = Math.sqrt((pow(2.0-2.5, 2)+pow(3.0-2.5, 2.0))); //sample variance so dividing by 1
+        Assert.assertEquals(histo.getStandardDeviationBinSize(histo.getMeanBinSize()), std, 0.000001);
+    }
+
+    @Test
+    public void testGetKeySet() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+
+        Assert.assertEquals(histo.keySet(), new HashSet<>(Arrays.asList(4,5)));
+    }
+
+    @Test
+    public void testLabelsAndComparator() {
+        final String[] is = {"a", "B", "a"};
+        final Histogram<String> histo = new Histogram<>("FOO", "BAR", String.CASE_INSENSITIVE_ORDER);
+        for (final String i : is) histo.increment(i);
+        Assert.assertEquals(histo.get("a").getValue(), 2.0);
+        Assert.assertEquals(histo.get("B").getValue(), 1.0);
+        Assert.assertEquals(histo.get("a").getId(), "a");
+        Assert.assertEquals(histo.get("B").getId(), "B");
+    }
+
+
+    @Test
+    public void testPrefillBins() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        Assert.assertEquals(histo.get(4), null);
+        Assert.assertEquals(histo.get(5), null);
+        histo.prefillBins(4);
+        Assert.assertEquals(histo.get(4).getValue(),0.0);
+        Assert.assertEquals(histo.get(5), null);
+
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.get(4).getValue(),2.0);
+        Assert.assertEquals(histo.get(5).getValue(),3.0);
+    }
+
+    @Test
+    public void testLabels() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>("FOO", "BAR");
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getBinLabel(),"FOO");
+        Assert.assertEquals(histo.getValueLabel(),"BAR");
+    }
+
+    @Test
+    public void testCopyCtor() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo1 = new Histogram<>();
+        for (final int i : is) histo1.increment(i);
+
+        final Histogram<Integer> histo2 = new Histogram<>(histo1);
+        Assert.assertEquals(histo1, histo2);
+        Assert.assertEquals(histo2, histo1);
+    }
+
+    @Test
+    public void testGet() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+
+        Assert.assertEquals(histo.get(4).getValue(), 2.0);
+        Assert.assertEquals(histo.get(5).getValue(), 3.0);
+        Assert.assertEquals(histo.get(6), null);
+    }
+
+    @Test
+    public void testAddHistogram() {
+        final int[] is1 = {4,4,5,5,5};
+        final Histogram<Integer> histo1 = new Histogram<>();
+        Assert.assertTrue(histo1.isEmpty());
+        for (final int i : is1) histo1.increment(i);
+
+        Assert.assertFalse(histo1.isEmpty());
+
+        final int[] is2 = {5,5, 6,6,6,6};
+        final Histogram<Integer> histo2 = new Histogram<>();
+        for (final int i : is2) histo2.increment(i);
+
+        Assert.assertEquals(histo1.get(4).getValue(), 2.0);
+        Assert.assertEquals(histo1.get(5).getValue(), 3.0);
+        Assert.assertEquals(histo1.get(6), null);
+
+        histo1.addHistogram(histo2);
+
+        Assert.assertEquals(histo1.get(4).getValue(), 2.0);
+        Assert.assertEquals(histo1.get(5).getValue(), 5.0);
+        Assert.assertEquals(histo1.get(6).getValue(), 4.0);
+    }
+
+    @Test
+    public void testGetCumulativeProbability() {
+        final int[] is = {4,4,5,5,5,6,6,6,6};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getCumulativeProbability(2.0), 0.0);
+        Assert.assertEquals(histo.getCumulativeProbability(4.0), 2.0/9);
+        Assert.assertEquals(histo.getCumulativeProbability(5.0), 5.0/9);
+        Assert.assertEquals(histo.getCumulativeProbability(6.0), 9.0/9);
+    }
+
+    @Test(expectedExceptions = UnsupportedOperationException.class)
+    public void testGetCumulativeProbabilityBlowup() {
+        final String[] is = {"foo"};
+        final Histogram<String> histo = new Histogram<>();
+        for (final String i : is) histo.increment(i);
+        histo.getCumulativeProbability(2.0);
+    }
+
+    @Test
+    public void testPercentile() {
+        final int[] is = {4,4,5,5,5,6,6,6,6};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getPercentile(0.01), 4.0);
+        Assert.assertEquals(histo.getPercentile(2.0/9), 4.0);
+        Assert.assertEquals(histo.getPercentile(5.0/9), 5.0);
+        Assert.assertEquals(histo.getPercentile(0.99999), 6.0);
+    }
+
+    @Test
+    public void testGetMinMax() {
+        final int[] is = {4,4,5,5,5,6,6,6,6};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getMin(), 4.0);
+        Assert.assertEquals(histo.getMax(), 6.0);
+    }
+
+    @Test(expectedExceptions = UnsupportedOperationException.class)
+    public void testGetMinBlowup() {
+        final String[] is = {"foo", "bar", "bar"};
+        final Histogram<String> histo = new Histogram<>();
+        for (final String i : is) histo.increment(i);
+        histo.getMin();//blow up
+    }
+
+    @Test(expectedExceptions = UnsupportedOperationException.class)
+    public void testGetMaxBlowup() {
+        final String[] is = {"foo", "bar", "bar"};
+        final Histogram<String> histo = new Histogram<>();
+        for (final String i : is) histo.increment(i);
+        histo.getMax();//blow up
+    }
+
+    @Test
+    public void testGetMedianBinSize() {
+        final int[] is = {4,4,5,5,5,6,6,6,6};
+        final Histogram<Integer> histo = new Histogram<>();
+        Assert.assertEquals(histo.getMedianBinSize(), 0, 0.000001); //empty
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getMedianBinSize(), 3, 0.000001); //three fives
+    }
+
+    @Test
+    public void testGetMedianBinSize_Even() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        Assert.assertEquals(histo.getMedianBinSize(), 0, 0.000001); //empty
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getMedianBinSize(), (2+3)/2.0, 0.000001); //even split
+    }
+
+    @Test
+    public void testSize() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.size(), 2); //2 unique values
+    }
+
+
+    @Test
+    public void testMode() {
+        final int[] is = {4,4,5,5,5,6};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+        Assert.assertEquals(histo.getMode(), 5.0);
+    }
+
+    @Test(expectedExceptions = UnsupportedOperationException.class)
+    public void testModeBlowup() {
+        final String[] is = {"foo"};
+        final Histogram<String> histo = new Histogram<>();
+        for (final String i : is) histo.increment(i);
+        histo.getMode();//blowup
+    }
+
+    @Test
+    public void testComparator() {
+        final int[] is = {4,4,5,5,5};
+        final Histogram<Integer> histo1 = new Histogram<>();
+        for (final int i : is) histo1.increment(i);
+        Assert.assertNull(histo1.comparator());
+
+        final Histogram<Integer> histo2 = new Histogram<>(Comparator.comparingInt(Integer::intValue));
+        Comparator<Integer> comp = (Comparator<Integer>) histo2.comparator();
+        Assert.assertNotNull(comp);
+        Assert.assertEquals(comp.compare(4,5), -1);
+    }
+
+    @Test
+    public void testEquals() {
+        final int[] is = {4,4,4,4,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8};
+        final Histogram<Integer> histo1 = new Histogram<>();
+        final Histogram<Integer> histo2 = new Histogram<>();
+        for (final int i : is) histo1.increment(i);
+        for (final int i : is) histo2.increment(i);
+        Assert.assertEquals(histo1, histo1);
+        Assert.assertEquals(histo2, histo1);
+        Assert.assertEquals(histo1, histo2);
+
+        Assert.assertEquals(histo1.hashCode(), histo2.hashCode());
+
+        Assert.assertNotEquals(null, histo1);
+        Assert.assertNotEquals(histo1, null);
+
+        histo2.increment(4);//make them not equal
+        Assert.assertEquals(histo1, histo1);
+        Assert.assertNotEquals(histo2, histo1);
+        Assert.assertNotEquals(histo1, histo2);
+        Assert.assertNotEquals(histo1.hashCode(), histo2.hashCode());
+
+
+    }
+
+    @Test(dataProvider = "medianTestData")
+    public void testMedian(final int [] values, final double median) {
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : values) histo.increment(i);
+        Assert.assertEquals(histo.getMedian(), median);
+    }
+
+    @DataProvider(name = "medianTestData")
+    public Object[][] medianTestData() {
+        return new Object[][] {
+                new Object[] {new int[] {} , 0d},
+                new Object[] {new int[] {999} , 999d},
+                new Object[] {new int[] {1,2,3,4,5,6} , 3.5d},
+                new Object[] {new int[] {5,5,5,5,5,6,6} , 5d},
+                new Object[] {new int[] {5,5,5,5,5,6,6,6,6,6} , 5.5d},
+        };
+    }
+
+    @Test
+    public void testMad() {
+        final int[] is = {4,4,4,4,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8};
+        final Histogram<Integer> histo = new Histogram<>();
+        for (final int i : is) histo.increment(i);
+
+        Assert.assertEquals(7d, histo.getMedian());
+        Assert.assertEquals(1d, histo.getMedianAbsoluteDeviation());
+        Assert.assertTrue(abs(histo.estimateSdViaMad() - 1.4826) < 0.0001);
+    }
+
+
+    @Test(dataProvider = "histogramData") //this data provider has several extra variables that we don't make use of here
+    public void testSerializeHistogram(final int[] values, final double mean, final double stdev, final Integer trimByWidth) throws IOException, ClassNotFoundException {
+        final Histogram<Integer> histo = new Histogram<>();
+        for (int value : values) {
+            histo.increment(value);
+        }
+
+        Histogram<Integer> deserializedHistogram = TestUtil.serializeAndDeserialize(histo);
+        Assert.assertEquals(deserializedHistogram, histo);
+    }
+
+    private double round(final double in) {
+        long l = (long) (in * 10000);
+        return l / 10000d;
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/util/IntervalListTest.java b/src/test/java/htsjdk/samtools/util/IntervalListTest.java
new file mode 100644
index 0000000..6c5fcd4
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/IntervalListTest.java
@@ -0,0 +1,520 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2014 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.samtools.util;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.variant.vcf.VCFFileReader;
+import org.testng.Assert;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+/**
+ * Tests the IntervalList class
+ */
+public class IntervalListTest {
+
+    final SAMFileHeader fileHeader;
+    final IntervalList list1, list2, list3;
+
+    public IntervalListTest() {
+        fileHeader = IntervalList.fromFile(new File("src/test/resources/htsjdk/samtools/intervallist/IntervalListchr123_empty.interval_list")).getHeader();
+        fileHeader.setSortOrder(SAMFileHeader.SortOrder.unsorted);
+
+        list1 = new IntervalList(fileHeader);
+        list2 = new IntervalList(fileHeader);
+        list3 = new IntervalList(fileHeader);
+
+        list1.add(new Interval("1", 1, 100));     //de-facto: 1:1-200 1:202-300     2:100-150 2:200-300
+        list1.add(new Interval("1", 101, 200));
+        list1.add(new Interval("1", 202, 300));
+        list1.add(new Interval("2", 200, 300));
+        list1.add(new Interval("2", 100, 150));
+
+        list2.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
+        list2.add(new Interval("1", 301, 500));
+        list2.add(new Interval("2", 1, 150));
+        list2.add(new Interval("2", 250, 270));
+        list2.add(new Interval("2", 290, 400));
+
+        list3.add(new Interval("1", 25, 400));    //de-facto 1:25-400                2:200-600                            3:50-470
+        list3.add(new Interval("2", 200, 600));
+        list3.add(new Interval("3", 50, 470));
+    }
+
+    @DataProvider(name = "intersectData")
+    public Object[][] intersectData() {
+        final IntervalList intersect123 = new IntervalList(fileHeader);
+        final IntervalList intersect12 = new IntervalList(fileHeader);
+        final IntervalList intersect13 = new IntervalList(fileHeader);
+        final IntervalList intersect23 = new IntervalList(fileHeader);
+
+        intersect123.add(new Interval("1", 50, 150));
+        intersect123.add(new Interval("2", 250, 270));
+        intersect123.add(new Interval("2", 290, 300));
+
+        intersect12.add(new Interval("1", 50, 150));
+        intersect12.add(new Interval("2", 100, 150));
+        intersect12.add(new Interval("2", 250, 270));
+        intersect12.add(new Interval("2", 290, 300));
+
+        intersect13.add(new Interval("1", 25, 200));
+        intersect13.add(new Interval("1", 202, 300));
+        intersect13.add(new Interval("2", 200, 300));
+
+        intersect23.add(new Interval("1", 50, 150));
+        intersect23.add(new Interval("1", 301, 400));
+        intersect23.add(new Interval("2", 250, 270));
+        intersect23.add(new Interval("2", 290, 400));
+
+        return new Object[][]{
+                new Object[]{Arrays.asList(list1, list2, list3), intersect123},
+                new Object[]{Arrays.asList(list1, list2), intersect12},
+                new Object[]{Arrays.asList(list2, list1), intersect12},
+                new Object[]{Arrays.asList(list2, list3), intersect23},
+                new Object[]{Arrays.asList(list3, list2), intersect23},
+                new Object[]{Arrays.asList(list1, list3), intersect13},
+                new Object[]{Arrays.asList(list3, list1), intersect13}
+        };
+    }
+
+    @Test(dataProvider = "intersectData")
+    public void testIntersectIntervalLists(final List<IntervalList> lists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.intersection(lists).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @DataProvider(name = "mergeData")
+    public Object[][] mergeData() {
+        final IntervalList merge123 = new IntervalList(fileHeader);
+        final IntervalList merge12 = new IntervalList(fileHeader);
+        final IntervalList merge23 = new IntervalList(fileHeader);
+        final IntervalList merge13 = new IntervalList(fileHeader);
+
+        merge123.add(new Interval("1", 1, 100));     //de-facto: 1:1-200 1:202-300     2:100-150 2:200-300
+        merge123.add(new Interval("1", 101, 200));
+        merge123.add(new Interval("1", 202, 300));
+        merge123.add(new Interval("2", 200, 300));
+        merge123.add(new Interval("2", 100, 150));
+
+        merge123.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
+        merge123.add(new Interval("1", 301, 500));
+        merge123.add(new Interval("2", 1, 150));
+        merge123.add(new Interval("2", 250, 270));
+        merge123.add(new Interval("2", 290, 400));
+
+        merge123.add(new Interval("1", 25, 400));    //de-facto 1:25-400                2:200-600                            3:50-470
+        merge123.add(new Interval("2", 200, 600));
+        merge123.add(new Interval("3", 50, 470));
+
+        merge12.add(new Interval("1", 1, 100));     //de-facto: 1:1-200 1:202-300     2:100-150 2:200-300
+        merge12.add(new Interval("1", 101, 200));
+        merge12.add(new Interval("1", 202, 300));
+        merge12.add(new Interval("2", 200, 300));
+        merge12.add(new Interval("2", 100, 150));
+
+        merge12.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
+        merge12.add(new Interval("1", 301, 500));
+        merge12.add(new Interval("2", 1, 150));
+        merge12.add(new Interval("2", 250, 270));
+        merge12.add(new Interval("2", 290, 400));
+
+        merge23.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
+        merge23.add(new Interval("1", 301, 500));
+        merge23.add(new Interval("2", 1, 150));
+        merge23.add(new Interval("2", 250, 270));
+        merge23.add(new Interval("2", 290, 400));
+
+        merge23.add(new Interval("1", 25, 400));    //de-facto 1:25-400                2:200-600                            3:50-470
+        merge23.add(new Interval("2", 200, 600));
+        merge23.add(new Interval("3", 50, 470));
+
+        merge13.add(new Interval("1", 1, 100));     //de-facto: 1:1-200 1:202-300     2:100-150 2:200-300
+        merge13.add(new Interval("1", 101, 200));
+        merge13.add(new Interval("1", 202, 300));
+        merge13.add(new Interval("2", 200, 300));
+        merge13.add(new Interval("2", 100, 150));
+
+        merge13.add(new Interval("1", 25, 400));    //de-facto 1:25-400                2:200-600                            3:50-470
+        merge13.add(new Interval("2", 200, 600));
+        merge13.add(new Interval("3", 50, 470));
+
+        return new Object[][]{
+                new Object[]{Arrays.asList(list1, list2, list3), merge123},
+                new Object[]{Arrays.asList(list1, list2), merge12},
+                new Object[]{Arrays.asList(list2, list3), merge23},
+                new Object[]{Arrays.asList(list1, list3), merge13}
+        };
+    }
+
+    @Test(dataProvider = "mergeData")
+    public void testMergeIntervalLists(final List<IntervalList> lists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.concatenate(lists).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @DataProvider(name = "unionData")
+    public Object[][] unionData() {
+        final IntervalList union123 = new IntervalList(fileHeader);
+        final IntervalList union12 = new IntervalList(fileHeader);
+        final IntervalList union13 = new IntervalList(fileHeader);
+        final IntervalList union23 = new IntervalList(fileHeader);
+
+        union123.add(new Interval("1", 1, 500));
+        union123.add(new Interval("2", 1, 150));
+        union123.add(new Interval("2", 200, 600));
+        union123.add(new Interval("3", 50, 470));
+
+        union12.add(new Interval("1", 1, 200));
+        union12.add(new Interval("1", 202, 500));
+        union12.add(new Interval("2", 1, 150));
+        union12.add(new Interval("2", 200, 400));
+
+        union23.add(new Interval("1", 25, 500));
+        union23.add(new Interval("2", 1, 150));
+        union23.add(new Interval("2", 200, 600));
+        union23.add(new Interval("3", 50, 470));
+
+        union13.add(new Interval("1", 1, 400));
+        union13.add(new Interval("2", 100, 150));
+        union13.add(new Interval("2", 200, 600));
+        union13.add(new Interval("3", 50, 470));
+
+        return new Object[][]{
+                new Object[]{Arrays.asList(list1, list2, list3), union123},
+                new Object[]{Arrays.asList(list1, list2), union12},
+                new Object[]{Arrays.asList(list1, list2), union12},
+                new Object[]{Arrays.asList(list2, list3), union23},
+                new Object[]{Arrays.asList(list2, list3), union23},
+                new Object[]{Arrays.asList(list1, list3), union13},
+                new Object[]{Arrays.asList(list1, list3), union13}
+        };
+    }
+
+    @Test(dataProvider = "unionData", enabled = true)
+    public void testUnionIntervalLists(final List<IntervalList> lists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.union(lists).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @DataProvider(name = "invertData")
+    public Object[][] invertData() {
+        final IntervalList invert1 = new IntervalList(fileHeader);
+        final IntervalList invert2 = new IntervalList(fileHeader);
+        final IntervalList invert3 = new IntervalList(fileHeader);
+
+        final IntervalList full = new IntervalList(fileHeader);
+        final IntervalList fullChopped = new IntervalList(fileHeader);
+        final IntervalList empty = new IntervalList(fileHeader);
+
+        invert1.add(new Interval("1", 201, 201));
+        invert1.add(new Interval("1", 301, fileHeader.getSequence("1").getSequenceLength()));
+        invert1.add(new Interval("2", 1, 99));
+        invert1.add(new Interval("2", 151, 199));
+        invert1.add(new Interval("2", 301, fileHeader.getSequence("2").getSequenceLength()));
+        invert1.add(new Interval("3", 1, fileHeader.getSequence("3").getSequenceLength()));
+
+        invert2.add(new Interval("1", 1, 49));
+        invert2.add(new Interval("1", 151, 300));
+        invert2.add(new Interval("1", 501, fileHeader.getSequence("1").getSequenceLength()));
+        invert2.add(new Interval("2", 151, 249));
+        invert2.add(new Interval("2", 271, 289));
+        invert2.add(new Interval("2", 401, fileHeader.getSequence("2").getSequenceLength()));
+        invert2.add(new Interval("3", 1, fileHeader.getSequence("3").getSequenceLength()));
+
+        invert3.add(new Interval("1", 1, 24));
+        invert3.add(new Interval("1", 401, fileHeader.getSequence("1").getSequenceLength()));
+        invert3.add(new Interval("2", 1, 199));
+        invert3.add(new Interval("2", 601, fileHeader.getSequence("2").getSequenceLength()));
+        invert3.add(new Interval("3", 1, 49));
+        invert3.add(new Interval("3", 471, fileHeader.getSequence("3").getSequenceLength()));
+
+        for (final SAMSequenceRecord samSequenceRecord : fileHeader.getSequenceDictionary().getSequences()) {
+            full.add(new Interval(samSequenceRecord.getSequenceName(), 1, samSequenceRecord.getSequenceLength()));
+
+            fullChopped.add(new Interval(samSequenceRecord.getSequenceName(), 1, samSequenceRecord.getSequenceLength() / 2));
+            fullChopped.add(new Interval(samSequenceRecord.getSequenceName(), samSequenceRecord.getSequenceLength() / 2 + 1, samSequenceRecord.getSequenceLength()));
+        }
+
+        return new Object[][]{
+                new Object[]{list1, invert1},
+                new Object[]{list2, invert2},
+                new Object[]{list3, invert3},
+                new Object[]{full, empty},
+                new Object[]{empty, full},
+                new Object[]{fullChopped, empty}
+        };
+    }
+
+    @Test(dataProvider = "invertData")
+    public void testInvertSquared(final IntervalList list, @SuppressWarnings("UnusedParameters") final IntervalList ignored) throws Exception {
+        final IntervalList inverseSquared = IntervalList.invert(IntervalList.invert(list));
+        final IntervalList originalClone = new IntervalList(list.getHeader());
+
+        for (final Interval interval : list) {
+            originalClone.add(interval);
+        }
+
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(inverseSquared.iterator()),
+                CollectionUtil.makeCollection(originalClone.uniqued().iterator()));
+    }
+
+    @Test(dataProvider = "invertData")
+    public void testInvert(final IntervalList list, final IntervalList inverse) throws Exception {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.invert(list).iterator()),
+                CollectionUtil.makeCollection(inverse.iterator()));
+    }
+
+    @DataProvider(name = "subtractSingletonData")
+    public Object[][] subtractSingletonData() {
+        final IntervalList subtract1_from_2 = new IntervalList(fileHeader);
+        final IntervalList subtract2_from_3 = new IntervalList(fileHeader);
+        final IntervalList subtract1_from_3 = new IntervalList(fileHeader);
+        final IntervalList subtract3_from_1 = new IntervalList(fileHeader);
+
+        subtract1_from_2.add(new Interval("1", 301, 500));
+        subtract1_from_2.add(new Interval("2", 1, 99));
+        subtract1_from_2.add(new Interval("2", 301, 400));
+
+        subtract2_from_3.add(new Interval("1", 25, 49));
+        subtract2_from_3.add(new Interval("1", 151, 300));
+        subtract2_from_3.add(new Interval("2", 200, 249));
+        subtract2_from_3.add(new Interval("2", 271, 289));
+        subtract2_from_3.add(new Interval("2", 401, 600));
+        subtract2_from_3.add(new Interval("3", 50, 470));
+
+        subtract1_from_3.add(new Interval("1", 201, 201));
+        subtract1_from_3.add(new Interval("1", 301, 400));
+        subtract1_from_3.add(new Interval("2", 301, 600));
+        subtract1_from_3.add(new Interval("3", 50, 470));
+
+        subtract3_from_1.add(new Interval("1", 1, 49));    //de-facto 1:25-400                2:200-600                            3:50-470
+        subtract3_from_1.add(new Interval("2", 100, 150));
+
+        return new Object[][]{
+                new Object[]{list2, list1, subtract1_from_2},
+                new Object[]{list3, list2, subtract2_from_3},
+                new Object[]{list3, list1, subtract1_from_3},
+        };
+    }
+
+    @DataProvider(name = "subtractData")
+    public Object[][] subtractData() {
+        final IntervalList subtract12_from_3 = new IntervalList(fileHeader);
+
+        subtract12_from_3.add(new Interval("1", 201, 201));
+        subtract12_from_3.add(new Interval("2", 401, 600));
+        subtract12_from_3.add(new Interval("3", 50, 470));
+
+        return new Object[][]{
+                new Object[]{CollectionUtil.makeList(list3), CollectionUtil.makeList(list1, list2), subtract12_from_3},
+        };
+    }
+
+    @Test(dataProvider = "subtractData")
+    public void testSubtractIntervalLists(final List<IntervalList> fromLists, final List<IntervalList> whatLists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.subtract(fromLists, whatLists).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @Test(dataProvider = "subtractSingletonData")
+    public void testSubtractSingletonIntervalLists(final IntervalList fromLists, final IntervalList whatLists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.subtract(fromLists, whatLists).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @Test(dataProvider = "subtractSingletonData")
+    public void testSubtractSingletonasListIntervalList(final IntervalList fromLists, final IntervalList whatLists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.subtract(Collections.singletonList(fromLists), Collections.singletonList(whatLists)).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @DataProvider(name = "VCFCompData")
+    public Object[][] VCFCompData() {
+        return new Object[][]{
+                new Object[]{"src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTest.vcf", "src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list", false},
+                new Object[]{"src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTest.vcf", "src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverse.interval_list", true},
+                new Object[]{"src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestManual.vcf", "src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestManualComp.interval_list", false},
+                new Object[]{"src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestManual.vcf", "src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverseManual.interval_list", true}
+        };
+    }
+
+    @Test(dataProvider = "VCFCompData")
+    public void testFromVCF(final String vcf, final String compInterval, final boolean invertVCF) {
+
+        final File vcfFile = new File(vcf);
+        final File compIntervalFile = new File(compInterval);
+
+        final IntervalList compList = IntervalList.fromFile(compIntervalFile);
+        final IntervalList list = invertVCF ? IntervalList.invert(VCFFileReader.fromVcf(vcfFile)) : VCFFileReader.fromVcf(vcfFile);
+
+        compList.getHeader().getSequenceDictionary().assertSameDictionary(list.getHeader().getSequenceDictionary());
+
+        final Collection<Interval> intervals = CollectionUtil.makeCollection(list.iterator());
+        final Collection<Interval> compIntervals = CollectionUtil.makeCollection(compList.iterator());
+
+        //assert that the intervals correspond
+        Assert.assertEquals(intervals, compIntervals);
+
+        final List<String> intervalNames = new LinkedList<String>();
+        final List<String> compIntervalNames = new LinkedList<String>();
+
+        for (final Interval interval : intervals) {
+            intervalNames.add(interval.getName());
+        }
+        for (final Interval interval : compIntervals) {
+            compIntervalNames.add(interval.getName());
+        }
+        //assert that the names match
+        Assert.assertEquals(intervalNames, compIntervalNames);
+    }
+
+    @DataProvider
+    public Object[][] testFromSequenceData() {
+        return new Object[][]{
+                new Object[]{"src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list", "1", 249250621},
+                new Object[]{"src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list", "2", 243199373},
+                new Object[]{"src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list", "3", 198022430},
+        };
+    }
+
+    @Test(dataProvider = "testFromSequenceData")
+    public void testFromSequenceName(final String intervalList, final String referenceName, final Integer length) {
+
+        final IntervalList intervals = IntervalList.fromFile(new File(intervalList));
+        final IntervalList test = IntervalList.fromName(intervals.getHeader(), referenceName);
+        Assert.assertEquals(test.getIntervals(), CollectionUtil.makeList(new Interval(referenceName, 1, length)));
+    }
+
+    @Test
+    public void testMerges() {
+        final SortedSet<Interval> intervals = new TreeSet<Interval>() {{
+            add(new Interval("1", 500, 600, false, "foo"));
+            add(new Interval("1", 550, 650, false, "bar"));
+            add(new Interval("1", 625, 699, false, "splat"));
+        }};
+
+        Interval out = IntervalList.merge(intervals, false);
+        Assert.assertEquals(out.getStart(), 500);
+        Assert.assertEquals(out.getEnd(), 699);
+
+        intervals.add(new Interval("1", 626, 629, false, "whee"));
+        out = IntervalList.merge(intervals, false);
+        Assert.assertEquals(out.getStart(), 500);
+        Assert.assertEquals(out.getEnd(), 699);
+    }
+
+    @Test
+    public void testBreakAtBands() {
+        final List<Interval> intervals = new ArrayList<Interval>() {{
+            add(new Interval("A", 1, 99, false, "foo"));
+            add(new Interval("A", 98, 99, true, "psyduck"));
+            add(new Interval("1", 500, 600, false, "foo")); // -> 2
+            add(new Interval("1", 550, 650, false, "bar")); // -> 2
+            add(new Interval("1", 625, 699, false, "splat"));
+            add(new Interval("2", 99, 201, false, "geodude")); // -> 3
+            add(new Interval("3", 100, 99, false, "charizard"));  // Empty Interval
+            add(new Interval("3", 101, 100, false, "golduck"));   // Empty Interval
+        }};
+
+        final List<Interval> brokenIntervals = IntervalList.breakIntervalsAtBandMultiples(intervals, 100);
+
+        Assert.assertEquals(brokenIntervals.size(), 12);
+        Assert.assertEquals(brokenIntervals.get(0), new Interval("A", 1, 99, false, "foo"));
+
+        Assert.assertEquals(brokenIntervals.get(1), new Interval("A", 98, 99, true, "psyduck"));
+
+        Assert.assertEquals(brokenIntervals.get(2), new Interval("1", 500, 599, false, "foo.1"));
+        Assert.assertEquals(brokenIntervals.get(3), new Interval("1", 600, 600, false, "foo.2"));
+
+        Assert.assertEquals(brokenIntervals.get(4), new Interval("1", 550, 599, false, "bar.1"));
+        Assert.assertEquals(brokenIntervals.get(5), new Interval("1", 600, 650, false, "bar.2"));
+
+        Assert.assertEquals(brokenIntervals.get(6), new Interval("1", 625, 699, false, "splat"));
+
+        Assert.assertEquals(brokenIntervals.get(7), new Interval("2", 99, 99, false, "geodude.1"));
+        Assert.assertEquals(brokenIntervals.get(8), new Interval("2", 100, 199, false, "geodude.2"));
+        Assert.assertEquals(brokenIntervals.get(9), new Interval("2", 200, 201, false, "geodude.3"));
+
+        Assert.assertEquals(brokenIntervals.get(10), new Interval("3", 100, 99, false, "charizard"));
+        Assert.assertEquals(brokenIntervals.get(11), new Interval("3", 101, 100, false, "golduck"));
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void TestFailAdd() {
+        IntervalList test = new IntervalList(this.fileHeader);
+        test.add(new Interval("blarg", 1, 1));
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void TestFailAddAll() {
+        IntervalList test = new IntervalList(this.fileHeader);
+        test.addall(CollectionUtil.makeList(new Interval("blarg", 1, 1), new Interval("bloorg", 1, 1)));
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void changeHeader() {
+        SAMFileHeader clonedHeader = fileHeader.clone();
+        clonedHeader.addSequence(new SAMSequenceRecord("4", 1000));
+        IntervalList usingClone1 = new IntervalList(clonedHeader);
+        usingClone1.add(new Interval("4", 1, 100));
+        IntervalList usingClone2 = new IntervalList(clonedHeader);
+        usingClone2.add(new Interval("4", 10, 20));
+
+
+        IntervalList expected = new IntervalList(clonedHeader);
+        expected.add(new Interval("4", 1, 9));
+        expected.add(new Interval("4", 21, 100));
+
+        //pull rug from underneath (one call will change all the headers, since there's actually only one)
+        usingClone1.getHeader().setSequenceDictionary(fileHeader.getSequenceDictionary());
+
+        //now interval lists are in "illegal state" since they contain contigs that are not in the header.
+        //this next step should fail
+        IntervalList.subtract(usingClone1, usingClone2);
+
+        Assert.assertTrue(false);
+
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/util/IntervalTreeMapTest.java b/src/test/java/htsjdk/samtools/util/IntervalTreeMapTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/IntervalTreeMapTest.java
rename to src/test/java/htsjdk/samtools/util/IntervalTreeMapTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/IntervalTreeTest.java b/src/test/java/htsjdk/samtools/util/IntervalTreeTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/IntervalTreeTest.java
rename to src/test/java/htsjdk/samtools/util/IntervalTreeTest.java
diff --git a/src/test/java/htsjdk/samtools/util/IoUtilTest.java b/src/test/java/htsjdk/samtools/util/IoUtilTest.java
new file mode 100644
index 0000000..0e4cd7a
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/IoUtilTest.java
@@ -0,0 +1,190 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.StringWriter;
+import java.util.Arrays;
+import java.util.List;
+
+public class IoUtilTest {
+
+    private static final File SLURP_TEST_FILE = new File("src/test/resources/htsjdk/samtools/io/slurptest.txt");
+    private static final File EMPTY_FILE = new File("src/test/resources/htsjdk/samtools/io/empty.txt");
+    private static final File FIVE_SPACES_THEN_A_NEWLINE_THEN_FIVE_SPACES_FILE = new File("src/test/resources/htsjdk/samtools/io/5newline5.txt");
+    private static final List<String> SLURP_TEST_LINES = Arrays.asList("bacon   and rice   ", "for breakfast  ", "wont you join me");
+    private static final String SLURP_TEST_LINE_SEPARATOR = "\n";
+    private static final String TEST_FILE_PREFIX = "htsjdk-IOUtilTest";
+    private static final String TEST_FILE_EXTENSIONS[] = {".txt", ".txt.gz"};
+    private static final String TEST_STRING = "bar!";
+    private File existingTempFile;
+    private String systemTempDir;
+
+    @BeforeClass
+    public void setUp() throws IOException {
+        existingTempFile = File.createTempFile("FiletypeTest.", ".tmp");
+        existingTempFile.deleteOnExit();
+        systemTempDir = System.getProperty("java.io.tmpdir");
+        final File tmpDir = new File(systemTempDir);
+        if (!tmpDir.isDirectory()) tmpDir.mkdir();
+        if (!tmpDir.isDirectory())
+            throw new RuntimeException("java.io.tmpdir (" + systemTempDir + ") is not a directory");
+    }
+
+    @Test
+    public void testFileReadingAndWriting() throws IOException {
+        String randomizedTestString = TEST_STRING + System.currentTimeMillis();
+        for (String ext : TEST_FILE_EXTENSIONS) {
+            File f = File.createTempFile(TEST_FILE_PREFIX, ext);
+            f.deleteOnExit();
+
+            OutputStream os = IOUtil.openFileForWriting(f);
+            BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os));
+            writer.write(randomizedTestString);
+            writer.close();
+
+            InputStream is = IOUtil.openFileForReading(f);
+            BufferedReader reader = new BufferedReader(new InputStreamReader(is));
+            String line = reader.readLine();
+            Assert.assertEquals(randomizedTestString, line);
+        }
+    }
+
+    @Test(groups = {"unix"})
+    public void testGetCanonicalPath() throws IOException {
+        String tmpPath = System.getProperty("java.io.tmpdir");
+        String userName = System.getProperty("user.name");
+
+        if (tmpPath.endsWith(userName)) {
+            tmpPath = tmpPath.substring(0, tmpPath.length() - userName.length());
+        }
+
+        File tmpDir = new File(tmpPath, userName);
+        tmpDir.mkdir();
+        File actual = new File(tmpDir, "actual.txt");
+        ProcessExecutor.execute(new String[]{"touch", actual.getAbsolutePath()});
+        File symlink = new File(tmpDir, "symlink.txt");
+        ProcessExecutor.execute(new String[]{"ln", "-s", actual.getAbsolutePath(), symlink.getAbsolutePath()});
+        File lnDir = new File(tmpDir, "symLinkDir");
+        ProcessExecutor.execute(new String[]{"ln", "-s", tmpDir.getAbsolutePath(), lnDir.getAbsolutePath()});
+        File lnToActual = new File(lnDir, "actual.txt");
+        File lnToSymlink = new File(lnDir, "symlink.txt");
+
+
+        File files[] = {actual, symlink, lnToActual, lnToSymlink};
+        for (File f : files) {
+            Assert.assertEquals(IOUtil.getFullCanonicalPath(f), actual.getCanonicalPath());
+        }
+
+        actual.delete();
+        symlink.delete();
+        lnToActual.delete();
+        lnToSymlink.delete();
+        lnDir.delete();
+        tmpDir.delete();
+    }
+
+    @Test
+    public void testUtfWriting() throws IOException {
+        final String utf8 = new StringWriter().append((char) 168).append((char) 197).toString();
+        for (String ext : TEST_FILE_EXTENSIONS) {
+            final File f = File.createTempFile(TEST_FILE_PREFIX, ext);
+            f.deleteOnExit();
+
+            final BufferedWriter writer = IOUtil.openFileForBufferedUtf8Writing(f);
+            writer.write(utf8);
+            CloserUtil.close(writer);
+
+            final BufferedReader reader = IOUtil.openFileForBufferedUtf8Reading(f);
+            final String line = reader.readLine();
+            Assert.assertEquals(utf8, line, f.getAbsolutePath());
+
+            CloserUtil.close(reader);
+
+        }
+    }
+
+    @Test
+    public void slurpLinesTest() throws FileNotFoundException {
+        Assert.assertEquals(IOUtil.slurpLines(SLURP_TEST_FILE), SLURP_TEST_LINES);
+    }
+
+    @Test
+    public void slurpWhitespaceOnlyFileTest() throws FileNotFoundException {
+        Assert.assertEquals(IOUtil.slurp(FIVE_SPACES_THEN_A_NEWLINE_THEN_FIVE_SPACES_FILE), "     \n     ");
+    }
+
+    @Test
+    public void slurpEmptyFileTest() throws FileNotFoundException {
+        Assert.assertEquals(IOUtil.slurp(EMPTY_FILE), "");
+    }
+
+    @Test
+    public void slurpTest() throws FileNotFoundException {
+        Assert.assertEquals(IOUtil.slurp(SLURP_TEST_FILE), CollectionUtil.join(SLURP_TEST_LINES, SLURP_TEST_LINE_SEPARATOR));
+    }
+
+    @Test(dataProvider = "fileTypeTestCases")
+    public void testFileType(final String path, boolean expectedIsRegularFile) {
+        final File file = new File(path);
+        Assert.assertEquals(IOUtil.isRegularPath(file), expectedIsRegularFile);
+    }
+
+    @Test(dataProvider = "unixFileTypeTestCases", groups = {"unix"})
+    public void testFileTypeUnix(final String path, boolean expectedIsRegularFile) {
+        final File file = new File(path);
+        Assert.assertEquals(IOUtil.isRegularPath(file), expectedIsRegularFile);
+    }
+
+    @DataProvider(name = "fileTypeTestCases")
+    private Object[][] fileTypeTestCases() {
+        return new Object[][]{
+                {existingTempFile.getAbsolutePath(), Boolean.TRUE},
+                {systemTempDir, Boolean.FALSE}
+
+        };
+    }
+
+    @DataProvider(name = "unixFileTypeTestCases")
+    private Object[][] unixFileTypeTestCases() {
+        return new Object[][]{
+                {"/dev/null", Boolean.FALSE},
+                {"/dev/stdout", Boolean.FALSE},
+                {"/non/existent/file", Boolean.TRUE},
+        };
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/util/Iso8601DateTest.java b/src/test/java/htsjdk/samtools/util/Iso8601DateTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/Iso8601DateTest.java
rename to src/test/java/htsjdk/samtools/util/Iso8601DateTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/IupacTest.java b/src/test/java/htsjdk/samtools/util/IupacTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/IupacTest.java
rename to src/test/java/htsjdk/samtools/util/IupacTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/MergingIteratorTest.java b/src/test/java/htsjdk/samtools/util/MergingIteratorTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/MergingIteratorTest.java
rename to src/test/java/htsjdk/samtools/util/MergingIteratorTest.java
diff --git a/src/test/java/htsjdk/samtools/util/OverlapDetectorTest.java b/src/test/java/htsjdk/samtools/util/OverlapDetectorTest.java
new file mode 100644
index 0000000..ecde965
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/OverlapDetectorTest.java
@@ -0,0 +1,240 @@
+package htsjdk.samtools.util;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.*;
+
+public class OverlapDetectorTest {
+
+    @DataProvider(name="intervalsMultipleContigs")
+    public Object[][] intervalsMultipleContigs(){
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1", 10, 100),
+                new Interval("2", 200, 300)
+        );
+        final List<Locatable> empty = new ArrayList<>();
+        final List<Locatable> contig1 = Arrays.asList(
+                new Interval("1",  10, 100)
+        );
+        final List<Locatable> contig2 = Arrays.asList(
+                new Interval("2", 200, 300)
+        );
+
+        // returns input, query range, expected SimpleIntervals
+        return new Object[][] {
+                // we already test elsewhere that it works within a contig, so here we just have to make sure that
+                // it picks the correct contig and can deal with not-yet-mentioned contigs.
+                new Object[] {input, new Interval("1", 100, 200), contig1},
+                new Object[] {input, new Interval("1", 1, 5), empty},
+                new Object[] {input, new Interval("2", 100, 200), contig2},
+                new Object[] {input, new Interval("3", 100, 200), empty},
+        };
+    }
+
+    @Test(dataProvider = "intervalsMultipleContigs")
+    public void testOverlap(final List<Locatable> input, final Locatable query, final Collection<Locatable> expected) throws Exception {
+        final OverlapDetector<Locatable> targetDetector = new OverlapDetector<>(0, 0);
+        targetDetector.addAll(input, input);
+
+        final Collection<Locatable> actual = targetDetector.getOverlaps(query);
+        Assert.assertEquals(actual, expected);
+    }
+
+    @DataProvider(name="intervalsSameContig")
+    public Object[][] intervalsSameContig(){
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final List<Locatable> empty = new ArrayList<>();
+        final List<Locatable> manyOverlapping = Arrays.asList(
+                new Interval("1",10,100),
+                // special case: multiple intervals starting at the same place
+                new Interval("1",20,50),
+                new Interval("1",20,51),
+                new Interval("1",20,52)
+        );
+        final List<Locatable> mixInput = Arrays.asList(
+                // ends before query interval
+                new Interval("1",10,20),
+                // ends in query interval
+                new Interval("1",10,60),
+                // equal to query interval
+                new Interval("1",30,50),
+                // covered by query interval
+                new Interval("1",40,42),
+                // ends after query interval
+                new Interval("1",45,60),
+                // starts after query interval
+                new Interval("1",60,100)
+        );
+        final List<Locatable> mixExpected = Arrays.asList(
+                // ends in query interval
+                new Interval("1",10,60),
+                // equal to query interval
+                new Interval("1",30,50),
+                // covered by query interval
+                new Interval("1",40,42),
+                // ends after query interval
+                new Interval("1",45,60)
+        );
+        // returns input single SimpleInterval, query range, expected SimpleInterval
+        return new Object[][] {
+                // single-point boundary cases
+                new Object[] {input, new Interval("1", 10, 10), input},
+                new Object[] {input, new Interval("1", 100, 100), input},
+                new Object[] {input, new Interval("1", 9, 9), empty},
+                new Object[] {input, new Interval("1", 11, 11), input},
+                new Object[] {input, new Interval("1", 99, 99), input},
+                new Object[] {input, new Interval("1", 101, 101), empty},
+                // different contig
+                new Object[] {input, new Interval("2", 10, 100), empty},
+                // empty list boundary case
+                new Object[] {empty, new Interval("1", 101, 101), empty},
+                // input exactly matches the query interval
+                new Object[] {input, new Interval("1", 10, 100), input},
+                // multiple intervals in the same place (potential edge case for indexing)
+                new Object[] {manyOverlapping, new Interval("1", 20, 20), manyOverlapping},
+                // input with multiple intervals
+                new Object[] {mixInput, new Interval("1",30,50), mixExpected},
+                // input with multiple intervals , non overlapping query
+                new Object[] {mixInput, new Interval("1",300,500), empty},
+        };
+    }
+
+    @Test(dataProvider = "intervalsSameContig")
+    public void testOverlap(final List<Locatable> input, final Interval query, final List<Locatable> expected) throws Exception {
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+
+        final Set<Locatable> actual = targetDetector.getOverlaps(query);
+        Assert.assertEquals(actual, new HashSet<>(expected));
+
+        Assert.assertEquals(targetDetector.overlapsAny(query), !expected.isEmpty());
+
+        Assert.assertEquals(new HashSet<>(targetDetector.getAll()), new HashSet<>(input));
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testOverlapsNullArg() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+        targetDetector.overlapsAny(null);
+    }
+
+    @Test
+    public void testNoOverlapsAny() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,110)
+        );
+        final OverlapDetector<Locatable> trimmedTargetDetector = new OverlapDetector(20,20);
+        trimmedTargetDetector.addAll(input, input);
+        Assert.assertFalse(trimmedTargetDetector.overlapsAny( new Interval("1",50,85)));//no overlap because of trim
+        Assert.assertTrue(trimmedTargetDetector.getOverlaps( new Interval("1",50,85)).isEmpty());//no overlap because of trim
+
+        final OverlapDetector<Locatable> untrimmedTargetDetector = new OverlapDetector(0,0);
+        untrimmedTargetDetector.addAll(input, input);
+        Assert.assertTrue(untrimmedTargetDetector.overlapsAny( new Interval("1",50,85)));//overlaps - no trim
+    }
+
+    @Test
+    public void testLotsOfTinyIntervals() throws Exception {
+        final List<Locatable> input = new ArrayList<>();
+        final int n = 1000000;
+        for (int i = 0; i < n; i++) {
+            input.add(new Interval("1", 3*i+1, 3*i+2)); //1:1-2, 1:4-5, 1:7-8
+        }
+        final OverlapDetector<Locatable> detector = OverlapDetector.create(input);
+        final Set<Locatable> overlapping = detector.getOverlaps(new Interval("1", 1, 3 * n + 2));
+        Assert.assertEquals(new HashSet<>(input), overlapping);
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testAddAllDifferentSizes() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+
+        final List<Locatable> input1Interval = Arrays.asList(
+                new Interval("1",11,101)
+        );
+
+        final List<Locatable> input2Intervals = Arrays.asList(
+                new Interval("1",20,200),
+                new Interval("1",20,200)
+        );
+        targetDetector.addAll(input1Interval, input2Intervals);
+
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testNullObjectAddLHS() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+        targetDetector.addLhs(null, new Interval("2",10,100));
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testNullIntervalAddLHS() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+        targetDetector.addLhs(new Interval("2",10,100), null);
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testNullObjectsAddAll() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+        targetDetector.addAll(null, Arrays.asList(new Interval("2",10,100)));
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testNullIntervalsAddAll() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+        targetDetector.addAll(Arrays.asList(new Interval("2",10,100)), null);
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testDifferentSizesAddAll() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+        final List<Locatable> l1 = Arrays.asList(new Interval("2", 10, 100));
+        final List<Locatable> l2 = Arrays.asList(new Interval("2", 10, 100), new Interval("3", 10, 100));
+        targetDetector.addAll(l1, l2);
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testNullArgGetOverlaps() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> targetDetector = OverlapDetector.create(input);
+        targetDetector.getOverlaps(null);
+    }
+
+    @Test
+    public void testAddTwice() throws Exception {
+        final List<Locatable> input = Arrays.asList(
+                new Interval("1",10,100),
+                new Interval("1",10,100)
+        );
+        final OverlapDetector<Locatable> detector = OverlapDetector.create(input);
+        final Set<Locatable> overlaps = detector.getOverlaps(new Interval("1", 50, 200));
+        Assert.assertEquals(overlaps.size(), 1);
+        Assert.assertEquals(overlaps, Collections.singleton(new Interval("1",10,100)));
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java b/src/test/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java
new file mode 100644
index 0000000..9e014d7
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java
@@ -0,0 +1,109 @@
+package htsjdk.samtools.util;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.SAMRecordSetBuilder;
+import htsjdk.samtools.SamReader;
+import htsjdk.samtools.SamReaderFactory;
+import htsjdk.samtools.fastq.FastqReader;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+
+public class QualityEncodingDetectorTest {
+
+    private static class Testcase {
+        private final File f;
+        private final FastqQualityFormat q;
+
+        Testcase(final File file, final FastqQualityFormat qualityFormat) {
+            this.f = file;
+            this.q = qualityFormat;
+        }
+    }
+
+    final static List<Testcase> FASTQ_TESTCASES = Arrays.asList(
+            // Need to use full-range quality here, as Solexa and Illumina are near indistinguishable
+            new Testcase(new File("./src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa_full_range_as_solexa.fastq"), FastqQualityFormat.Solexa),
+            new Testcase(new File("./src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/s_1_sequence.txt"), FastqQualityFormat.Illumina),
+            new Testcase(new File("./src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/5k-30BB2AAXX.3.aligned.sam.fastq"), FastqQualityFormat.Standard)
+    );
+    final static List<Testcase> BAM_TESTCASES = Arrays.asList(
+            new Testcase(new File("./src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/unmapped.sam"), FastqQualityFormat.Standard),
+            new Testcase(new File("./src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam"), FastqQualityFormat.Standard),
+            new Testcase(new File("./src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa-as-standard.bam"), FastqQualityFormat.Solexa),
+            new Testcase(new File("./src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/illumina-as-standard.bam"), FastqQualityFormat.Illumina)
+
+    );
+
+    Object[][] renderObjectArrayArray(final List<Testcase> testcaseList) {
+        final Object[][] data = new Object[testcaseList.size()][];
+        for (int i = 0; i < data.length; i++) {
+            final Testcase testcase = testcaseList.get(i);
+            data[i] = new Object[]{testcase.f, testcase.q};
+        }
+        return data;
+    }
+
+    @DataProvider(name = "BAM_TESTCASES")
+    Object[][] bamTestcases() {
+        return renderObjectArrayArray(BAM_TESTCASES);
+    }
+
+    @DataProvider(name = "FASTQ_TESTCASES")
+    Object[][] fastqTestcases() {
+        return renderObjectArrayArray(FASTQ_TESTCASES);
+    }
+
+    @Test(dataProvider = "FASTQ_TESTCASES", groups = {"unix"})
+    public void testFastqQualityInference(final File input, final FastqQualityFormat expectedQualityFormat) {
+        final FastqReader reader = new FastqReader(input);
+        Assert.assertEquals(QualityEncodingDetector.detect(reader), expectedQualityFormat);
+        reader.close();
+    }
+
+    @Test(dataProvider = "BAM_TESTCASES", groups = {"unix"})
+    public void testBamQualityInference(final File input, final FastqQualityFormat expectedQualityFormat) {
+        final SamReader reader = SamReaderFactory.makeDefault().open(input);
+        Assert.assertEquals(QualityEncodingDetector.detect(reader), expectedQualityFormat);
+    }
+
+    @Test
+    public void testSmallBamForDetectorFailure() {
+        final SAMRecordSetBuilder samRecordSetBuilder = createSmallUnmappedSam();
+        Assert.assertNotSame(QualityEncodingDetector.detect(samRecordSetBuilder.getSamReader(),
+                null), FastqQualityFormat.Standard);
+    }
+
+    @Test
+    public void testSmallBamWithExpectedQuality() {
+        final SAMRecordSetBuilder samRecordSetBuilder = createSmallUnmappedSam();
+        Assert.assertEquals(QualityEncodingDetector.detect(samRecordSetBuilder.getSamReader(),
+                FastqQualityFormat.Standard), FastqQualityFormat.Standard);
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void testQualitySanity() {
+        final SAMRecordSetBuilder samRecordSetBuilder = createSmallUnmappedSam();
+        QualityEncodingDetector.detect(samRecordSetBuilder.getSamReader(),
+                FastqQualityFormat.Illumina);
+    }
+
+    private SAMRecordSetBuilder createSmallUnmappedSam() {
+        final SAMRecordSetBuilder samRecordSetBuilder = new SAMRecordSetBuilder();
+        samRecordSetBuilder.setReadLength(25);
+        samRecordSetBuilder.addFrag("READ0", -1, -1, false, true, null, "@@@FFFFFHHHHHJIJIIJIIJJJJ", -1);
+        samRecordSetBuilder.addFrag("READ1", -1, -1, false, true, null, "@@@FFFFFHHHHHJIJIIJIIJJJJ", -1);
+        samRecordSetBuilder.addFrag("READ2", -1, -1, false, true, null, "@CCFDFEDHHHFFHIIII at GH<FFH", -1);
+        samRecordSetBuilder.addFrag("READ3", -1, -1, false, true, null, "@@?DFFDFHFFHDHIIHIIEIIJGG", -1);
+        samRecordSetBuilder.addFrag("READ4", -1, -1, false, true, null, "@CCFFDDFHHHHHIIJJHFJJJJJH", -1);
+        samRecordSetBuilder.addFrag("READ5", -1, -1, false, true, null, "BCCFFFFFHHHHHJJJJJIJJJJJJ", -1);
+        samRecordSetBuilder.addFrag("READ6", -1, -1, false, true, null, "@@CDFFFFHHHFHHIJJJJJJJIJJ", -1);
+        samRecordSetBuilder.addFrag("READ7", -1, -1, false, true, null, "CCCFFFFFHHHHHJJJJIJJJJHII", -1);
+        samRecordSetBuilder.addFrag("READ8", -1, -1, false, true, null, "CCCFFFFFHHHHHJJJJJJJJJJJJ", -1);
+        return samRecordSetBuilder;
+    }
+}
diff --git a/src/tests/java/htsjdk/samtools/util/RelativeIso8601DateTest.java b/src/test/java/htsjdk/samtools/util/RelativeIso8601DateTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/RelativeIso8601DateTest.java
rename to src/test/java/htsjdk/samtools/util/RelativeIso8601DateTest.java
diff --git a/src/test/java/htsjdk/samtools/util/SamLocusIteratorTest.java b/src/test/java/htsjdk/samtools/util/SamLocusIteratorTest.java
new file mode 100644
index 0000000..092e6e5
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/SamLocusIteratorTest.java
@@ -0,0 +1,676 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2010 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecordSetBuilder;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+/**
+ * @author alecw at broadinstitute.org
+ */
+public class SamLocusIteratorTest {
+
+    /** Coverage for tests with the same reads */
+    final static int coverage = 2;
+
+    /** the read length for the testss */
+    final static int readLength = 36;
+
+    final static SAMFileHeader header = new SAMFileHeader();
+
+    static {
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        SAMSequenceDictionary dict = new SAMSequenceDictionary();
+        dict.addSequence(new SAMSequenceRecord("chrM", 100000));
+        header.setSequenceDictionary(dict);
+    }
+
+    /** Get the record builder for the tests with the default parameters that are needed */
+    private static SAMRecordSetBuilder getRecordBuilder() {
+        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        builder.setHeader(header);
+        builder.setReadLength(readLength);
+        return builder;
+    }
+
+    /** Create the SamLocusIterator with the builder*/
+    private SamLocusIterator createSamLocusIterator(final SAMRecordSetBuilder builder) {
+        final SamLocusIterator ret = new SamLocusIterator(builder.getSamReader());
+        ret.setEmitUncoveredLoci(false);
+        return ret;
+    }
+
+    /**
+     * Test a simple with only matches, with both including or not indels
+     */
+    @Test
+    public void testBasicIterator() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "36M", null, 10);
+        }
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth for each position
+            int pos = startPosition;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                Assert.assertEquals(li.getPosition(), pos++);
+                Assert.assertEquals(li.getRecordAndPositions().size(), coverage);
+                // make sure that we are not accumulating indels
+                Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+            }
+        }
+    }
+
+    @Test
+    public void testMissingQualityString() {
+
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+
+            builder.addFrag("record" + i, 0, startPosition, true, false, "36M", "*", 0);
+        }
+        final SamLocusIterator sli = createSamLocusIterator(builder);
+
+        // make sure we accumulated depth of 2 for each position
+        int pos = 165;
+        for (final SamLocusIterator.LocusInfo li : sli) {
+            Assert.assertEquals(pos++, li.getPosition());
+            Assert.assertEquals(2, li.getRecordAndPositions().size());
+        }
+
+    }
+
+    /**
+     * Test the emit uncovered loci, with both including or not indels
+     */
+    @Test
+    public void testEmitUncoveredLoci() {
+
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "36M", null, 10);
+        }
+
+        final int coveredEnd = CoordMath.getEnd(startPosition, readLength);
+
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setEmitUncoveredLoci(true);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth of 2 for each position
+            int pos = 1;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                Assert.assertEquals(li.getPosition(), pos++);
+                final int expectedReads;
+                if (li.getPosition() >= startPosition && li.getPosition() <= coveredEnd) {
+                    expectedReads = coverage;
+                } else {
+                    expectedReads = 0;
+                }
+                Assert.assertEquals(li.getRecordAndPositions().size(), expectedReads);
+                // make sure that we are not accumulating indels
+                Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+            }
+            Assert.assertEquals(pos, header.getSequence(0).getSequenceLength() + 1);
+        }
+    }
+
+    /**
+     * Test the quality filter, with both including or not indels
+     */
+    @Test
+    public void testQualityFilter() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            final String qualityString;
+            // half of the reads have a different quality
+            if (i % 2 == 0) {
+                qualityString = null;
+            } else {
+                qualityString = "+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*";
+            }
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "36M", qualityString, 10);
+        }
+
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setQualityScoreCutoff(10);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth coverage for even positions, coverage/2 for odd positions
+            int pos = startPosition;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                Assert.assertEquals(li.getRecordAndPositions().size(), (pos % 2 == 0) ? coverage / 2 : coverage);
+                Assert.assertEquals(li.getPosition(), pos++);
+                // make sure that we are not accumulating indels
+                Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+            }
+        }
+    }
+
+    /**
+     * Test a simple deletion, with both including or not indels
+     */
+    @Test
+    public void testSimpleDeletion() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "18M10D18M", null, 10);
+        }
+        final int deletionStart = 183;
+        final int deletionEnd = 192;
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth for each position
+            int pos = startPosition;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                boolean isDeletedPosition = (pos >= deletionStart && pos <= deletionEnd);
+                if (!incIndels && isDeletedPosition) {
+                    pos = deletionEnd + 1;
+                    isDeletedPosition = false;
+                }
+                Assert.assertEquals(li.getPosition(), pos++);
+                if (isDeletedPosition) {
+                    // make sure there are no reads without indels
+                    Assert.assertEquals(li.getRecordAndPositions().size(), 0);
+                    // make sure that we are accumulating indels
+                    Assert.assertEquals(li.getDeletedInRecord().size(), coverage);
+                    Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+                } else {
+                    // make sure we are accumulating normal coverage
+                    Assert.assertEquals(li.getRecordAndPositions().size(), coverage);
+                    // make sure that we are not accumulating indels
+                    Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                    Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+                }
+            }
+        }
+    }
+
+    /**
+     * Test a simple insertion, with both including or not indels
+     */
+    @Test
+    public void testSimpleInsertion() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "30M3I3M", null, 10);
+        }
+        final int insStart = 194;
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth for each position
+            int pos = startPosition;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                Assert.assertEquals(li.getPosition(), pos++);
+                // make sure we are accumulating normal coverage
+                Assert.assertEquals(li.getRecordAndPositions().size(), coverage);
+                // make sure that we are not accumulating deletions
+                Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                if (incIndels && li.getPosition() == insStart) {
+                    Assert.assertEquals(li.getInsertedInRecord().size(), coverage);
+                } else {
+                    Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+                }
+            }
+        }
+    }
+
+    /**
+     * Test an insertion at the start of the read, with both including or not indels
+     */
+    @Test
+    public void testStartWithInsertion() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "3I33M", null, 10);
+        }
+
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth for each position
+            int pos = (incIndels) ? startPosition - 1 : startPosition;
+            boolean indelPosition = incIndels;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                Assert.assertEquals(li.getPosition(), pos);
+                // accumulation of coverage
+                Assert.assertEquals(li.getRecordAndPositions().size(), (indelPosition) ? 0 : coverage);
+                // no accumulation of deletions
+                Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                // accumulation of insertion
+                Assert.assertEquals(li.getInsertedInRecord().size(), (indelPosition) ? coverage : 0);
+                // check offsets of the insertion
+                if (indelPosition) {
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), 0);
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), 0);
+                    indelPosition = false;
+                }
+                pos++;
+            }
+        }
+    }
+
+    /**
+     * Test an insertion at the start of a soft-clipped read, with both including or not indels
+     */
+    @Test
+    public void testStartWithSoftClipAndInsertion() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "1S3I32M", null, 10);
+        }
+
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth for each position
+            int pos = (incIndels) ? startPosition - 1 : startPosition;
+            boolean indelPosition = incIndels;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                Assert.assertEquals(li.getPosition(), pos);
+                // accumulation of coverage
+                Assert.assertEquals(li.getRecordAndPositions().size(), (indelPosition) ? 0 : coverage);
+                // no accumulation of deletions
+                Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                // accumulation of insertion
+                Assert.assertEquals(li.getInsertedInRecord().size(), (indelPosition) ? coverage : 0);
+                // check offsets of the insertion
+                if (indelPosition) {
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), 1);
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), 1);
+                    indelPosition = false;
+                }
+                pos++;
+            }
+        }
+    }
+
+    /**
+     * Test an insertion after N in CIGAR
+     */
+    @Test
+    public void testNBeforeInsertion() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "2M4N3I27M", null, 10);
+        }
+        final int startN = 167;
+        final int endN = 170;
+
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth for each position
+            int pos = startPosition;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                // skipping Ns
+                if (pos >= startN && pos <= endN) {
+                    pos = (incIndels) ? endN : endN + 1;
+                }
+                Assert.assertEquals(li.getPosition(), pos);
+                // accumulation of coverage
+                Assert.assertEquals(li.getRecordAndPositions().size(), (pos == endN) ? 0 : coverage);
+                // no accumulation of deletions
+                Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                // accumulation of insertion
+                Assert.assertEquals(li.getInsertedInRecord().size(), (pos == endN) ? coverage : 0);
+                // check offsets of the insertion
+                if (pos == endN) {
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), 2);
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), 2);
+                }
+                pos++;
+            }
+        }
+    }
+
+    /**
+     * Test a deletion after N in CIGAR
+     */
+    @Test
+    public void testNBeforeDeletion() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "2M4N4D5M", null, 10);
+        }
+        final int startN = 167;
+        final int endN = 170;
+        final int startDel = 171;
+        final int endDel = 174;
+
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setIncludeIndels(incIndels);
+            // make sure we accumulated depth for each position
+            int pos = startPosition;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                if (pos >= startN && pos <= endN) {
+                    if (incIndels) {
+                        // skipping Ns
+                        pos = endN + 1;
+                    } else {
+                        // skip deletions
+                        pos = endDel + 1;
+                    }
+                }
+                final boolean insideDeletion = incIndels && (pos >= startDel && pos <= endDel);
+                Assert.assertEquals(li.getPosition(), pos);
+                // accumulation of coverage
+                Assert.assertEquals(li.getRecordAndPositions().size(), (insideDeletion) ? 0 : coverage);
+                // accumulation of deletions
+                Assert.assertEquals(li.getDeletedInRecord().size(), (insideDeletion) ? coverage : 0);
+                // no accumulation of insertion
+                Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+                // check offsets of the insertion
+                if (pos == endN) {
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), 2);
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), 2);
+                }
+                pos++;
+            }
+        }
+    }
+
+    /**
+     * Try all CIGAR operands (except H and P) and confirm that loci produced by SamLocusIterator are as expected,
+     * with both including or not indels
+     */
+    @Test
+    public void testSimpleGappedAlignment() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        for (int i = 0; i < coverage; i++) {
+            // add a negative-strand fragment mapped on chrM with base quality of 10
+            builder.addFrag("record" + i, 0, startPosition, true, false, "3S3M3N3M3D3M3I18M3S", null, 10);
+        }
+
+        // make sure we accumulated depth of 2 for each position
+        final int[] expectedPositions = new int[]{
+                // 3S
+                165, 166, 167, // 3M
+                // 3N
+                171, 172, 173, // 3M
+                174, 175, 176, // 3D
+                177, 178, 179, // 3M
+                // 3I
+                180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197}; // 18M
+
+        final int[] expectedReadOffsets = new int[]{
+                // 3S
+                3, 4, 5, // 3M
+                // 3N
+                6, 7, 8, // 3M
+                8, 8, 8, // 3D previous 0-based offset
+                9, 10, 11, // 3M
+                // 3I
+                15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32 // 3M
+        };
+
+        // to check the range of the insertion
+        final int firstDelBase = 174;
+        final int lastDelBase = 176;
+
+        final int expectedInsertionPosition = 179; // previous reference base
+        final int expectedInsertionOffset = 12; // first read base in the insertion
+
+        // test both for include indels and do not include indels
+        for (final boolean incIndels : new boolean[] {false, true}) {
+            final SamLocusIterator sli = createSamLocusIterator(builder);
+            sli.setIncludeIndels(incIndels);
+
+            int i = 0;
+            for (final SamLocusIterator.LocusInfo li : sli) {
+                // check if it is in the deletion range
+                boolean inDelRange = (expectedPositions[i] >= firstDelBase && expectedPositions[i] <= lastDelBase);
+                // if we are not including indels, the expected position index change if it is in an deletion range
+                if (!incIndels && inDelRange) {
+                    i += 3;
+                    inDelRange = false; // set to false to do not check the range of deletions
+                }
+                // check if the LocusInfo is the expected
+                Assert.assertEquals(li.getPosition(), expectedPositions[i]);
+                // check the insertions
+                if (incIndels && li.getPosition() == expectedInsertionPosition) {
+                    // check the accumulated coverage
+                    Assert.assertEquals(li.getInsertedInRecord().size(), coverage);
+                    // check the record offset
+                    Assert.assertEquals(li.getInsertedInRecord().get(0).getOffset(), expectedInsertionOffset);
+                    Assert.assertEquals(li.getInsertedInRecord().get(1).getOffset(), expectedInsertionOffset);
+                } else {
+                    Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+                }
+                // check the range of deletions
+                if (inDelRange) {
+                    // check the coverage for insertion and normal records
+                    Assert.assertEquals(li.getDeletedInRecord().size(), coverage);
+                    Assert.assertEquals(li.getRecordAndPositions().size(), 0);
+                    // check the offset for the deletion
+                    Assert.assertEquals(li.getDeletedInRecord().get(0).getOffset(), expectedReadOffsets[i]);
+                    Assert.assertEquals(li.getDeletedInRecord().get(1).getOffset(), expectedReadOffsets[i]);
+                } else {
+                    // if it is not a deletion, perform the same test as before
+                    Assert.assertEquals(li.getRecordAndPositions().size(), coverage);
+                    // Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+                    Assert.assertEquals(li.getRecordAndPositions().get(0).getOffset(), expectedReadOffsets[i]);
+                    Assert.assertEquals(li.getRecordAndPositions().get(1).getOffset(), expectedReadOffsets[i]);
+                }
+                ++i;
+            }
+        }
+
+
+    }
+
+    /**
+     * Test two reads that overlap because one has a deletion in the middle of it, without tracking indels
+     */
+    @Test
+    public void testOverlappingGappedAlignmentsWithoutIndels() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        // Were it not for the gap, these two reads would not overlap
+        builder.addFrag("record1", 0, startPosition, true, false, "18M10D18M", null, 10);
+        builder.addFrag("record2", 0, 206, true, false, "36M", null, 10);
+
+        final SamLocusIterator sli = createSamLocusIterator(builder);
+
+        // 5 base overlap btw the two reads
+        final int numBasesCovered = 36 + 36 - 5;
+        final int[] expectedReferencePositions = new int[numBasesCovered];
+        final int[] expectedDepths = new int[numBasesCovered];
+        final int[][] expectedReadOffsets = new int[numBasesCovered][];
+
+        int i;
+        // First 18 bases are from the first read
+        for (i = 0; i < 18; ++i) {
+            expectedReferencePositions[i] = startPosition + i;
+            expectedDepths[i] = 1;
+            expectedReadOffsets[i] = new int[]{i};
+        }
+        // Gap of 10, then 13 bases from the first read
+        for (; i < 36 - 5; ++i) {
+            expectedReferencePositions[i] = startPosition + 10 + i;
+            expectedDepths[i] = 1;
+            expectedReadOffsets[i] = new int[]{i};
+        }
+        // Last 5 bases of first read overlap first 5 bases of second read
+        for (; i < 36; ++i) {
+            expectedReferencePositions[i] = startPosition + 10 + i;
+            expectedDepths[i] = 2;
+            expectedReadOffsets[i] = new int[]{i, i - 31};
+
+        }
+        // Last 31 bases of 2nd read
+        for (; i < 36 + 36 - 5; ++i) {
+            expectedReferencePositions[i] = startPosition + 10 + i;
+            expectedDepths[i] = 1;
+            expectedReadOffsets[i] = new int[]{i - 31};
+        }
+
+        i = 0;
+        for (final SamLocusIterator.LocusInfo li : sli) {
+            Assert.assertEquals(li.getRecordAndPositions().size(), expectedDepths[i]);
+            Assert.assertEquals(li.getPosition(), expectedReferencePositions[i]);
+            Assert.assertEquals(li.getRecordAndPositions().size(), expectedReadOffsets[i].length);
+            for (int j = 0; j < expectedReadOffsets[i].length; ++j) {
+                Assert.assertEquals(li.getRecordAndPositions().get(j).getOffset(), expectedReadOffsets[i][j]);
+            }
+            // make sure that we are not accumulating indels
+            Assert.assertEquals(li.getDeletedInRecord().size(), 0);
+            Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+            ++i;
+        }
+    }
+
+    /**
+     * Test two reads that overlap because one has a deletion in the middle of it, tracking indels
+     */
+    @Test
+    public void testOverlappingGappedAlignmentsWithIndels() {
+        final SAMRecordSetBuilder builder = getRecordBuilder();
+        // add records up to coverage for the test in that position
+        final int startPosition = 165;
+        // Were it not for the gap, these two reads would not overlap
+        builder.addFrag("record1", 0, startPosition, true, false, "18M10D18M", null, 10);
+        builder.addFrag("record2", 0, 206, true, false, "36M", null, 10);
+
+        final SamLocusIterator sli = createSamLocusIterator(builder);
+        sli.setIncludeIndels(true);
+
+        // 46 for the gapped alignment, and 5 base overlap btw the two reads
+        final int numBasesCovered = 46 + 36 - 5;
+        final int[] expectedReferencePositions = new int[numBasesCovered];
+        final int[] expectedDepths = new int[numBasesCovered];
+        final int[] expectedDelDepths = new int[numBasesCovered];
+        final int[][] expectedReadOffsets = new int[numBasesCovered][];
+        final int expectedDelOffset = 17; // previous 0-based offset
+
+        int i;
+        // First 18 bases are from the first read
+        for (i = 0; i < 18; ++i) {
+            expectedReferencePositions[i] = startPosition + i;
+            expectedDepths[i] = 1;
+            expectedDelDepths[i] = 0;
+            expectedReadOffsets[i] = new int[]{i};
+        }
+        // Gap of 10
+        for (; i < 18 + 10; ++i) {
+            expectedReferencePositions[i] = startPosition + i;
+            expectedDepths[i] = 0;
+            expectedDelDepths[i] = 1;
+            expectedReadOffsets[i] = new int[0];
+        }
+        // the next bases for the first read (without the 5 overlapping)
+        for (; i < 46 - 5; ++i) {
+            expectedReferencePositions[i] = startPosition + i;
+            expectedDepths[i] = 1;
+            expectedDelDepths[i] = 0;
+            expectedReadOffsets[i] = new int[]{i - 10};
+        }
+        // last 5 bases of the first read overlap first 5 bases of second read
+        for (; i < 46; ++i) {
+            expectedReferencePositions[i] = startPosition + i;
+            expectedDepths[i] = 2;
+            expectedDelDepths[i] = 0;
+            expectedReadOffsets[i] = new int[]{i - 10, i + 10 - 46 - 5};
+        }
+        // Last 31 bases of 2nd read
+        for (; i < numBasesCovered; ++i) {
+            expectedReferencePositions[i] = startPosition + i;
+            expectedDepths[i] = 1;
+            expectedDelDepths[i] = 0;
+            expectedReadOffsets[i] = new int[]{i + 10 - 46 - 5};
+        }
+        i = 0;
+        for (final SamLocusIterator.LocusInfo li : sli) {
+            // checking the same as without indels
+            Assert.assertEquals(li.getRecordAndPositions().size(), expectedDepths[i]);
+            Assert.assertEquals(li.getPosition(), expectedReferencePositions[i]);
+            Assert.assertEquals(li.getRecordAndPositions().size(), expectedReadOffsets[i].length);
+            for (int j = 0; j < expectedReadOffsets[i].length; ++j) {
+                Assert.assertEquals(li.getRecordAndPositions().get(j).getOffset(), expectedReadOffsets[i][j]);
+            }
+            // check the deletions
+            Assert.assertEquals(li.getDeletedInRecord().size(), expectedDelDepths[i]);
+            if (expectedDelDepths[i] != 0) {
+                Assert.assertEquals(li.getDeletedInRecord().get(0).getOffset(), expectedDelOffset);
+            }
+            // checking that insertions are not accumulating
+            Assert.assertEquals(li.getInsertedInRecord().size(), 0);
+            ++i;
+        }
+    }
+
+}
diff --git a/src/tests/java/htsjdk/samtools/util/SequenceUtilTest.java b/src/test/java/htsjdk/samtools/util/SequenceUtilTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/SequenceUtilTest.java
rename to src/test/java/htsjdk/samtools/util/SequenceUtilTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/SolexaQualityConverterTest.java b/src/test/java/htsjdk/samtools/util/SolexaQualityConverterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/SolexaQualityConverterTest.java
rename to src/test/java/htsjdk/samtools/util/SolexaQualityConverterTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/SortingCollectionTest.java b/src/test/java/htsjdk/samtools/util/SortingCollectionTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/SortingCollectionTest.java
rename to src/test/java/htsjdk/samtools/util/SortingCollectionTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/SortingLongCollectionTest.java b/src/test/java/htsjdk/samtools/util/SortingLongCollectionTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/SortingLongCollectionTest.java
rename to src/test/java/htsjdk/samtools/util/SortingLongCollectionTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/StringLineReaderTest.java b/src/test/java/htsjdk/samtools/util/StringLineReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/StringLineReaderTest.java
rename to src/test/java/htsjdk/samtools/util/StringLineReaderTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/StringUtilTest.java b/src/test/java/htsjdk/samtools/util/StringUtilTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/StringUtilTest.java
rename to src/test/java/htsjdk/samtools/util/StringUtilTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/TrimmingUtilTest.java b/src/test/java/htsjdk/samtools/util/TrimmingUtilTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/TrimmingUtilTest.java
rename to src/test/java/htsjdk/samtools/util/TrimmingUtilTest.java
diff --git a/src/tests/java/htsjdk/samtools/util/TupleTest.java b/src/test/java/htsjdk/samtools/util/TupleTest.java
similarity index 100%
rename from src/tests/java/htsjdk/samtools/util/TupleTest.java
rename to src/test/java/htsjdk/samtools/util/TupleTest.java
diff --git a/src/tests/java/htsjdk/tribble/AbstractFeatureReaderTest.java b/src/test/java/htsjdk/tribble/AbstractFeatureReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/AbstractFeatureReaderTest.java
rename to src/test/java/htsjdk/tribble/AbstractFeatureReaderTest.java
diff --git a/src/tests/java/htsjdk/tribble/BinaryFeaturesTest.java b/src/test/java/htsjdk/tribble/BinaryFeaturesTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/BinaryFeaturesTest.java
rename to src/test/java/htsjdk/tribble/BinaryFeaturesTest.java
diff --git a/src/tests/java/htsjdk/tribble/FeatureReaderTest.java b/src/test/java/htsjdk/tribble/FeatureReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/FeatureReaderTest.java
rename to src/test/java/htsjdk/tribble/FeatureReaderTest.java
diff --git a/src/test/java/htsjdk/tribble/TestUtils.java b/src/test/java/htsjdk/tribble/TestUtils.java
new file mode 100644
index 0000000..f686d25
--- /dev/null
+++ b/src/test/java/htsjdk/tribble/TestUtils.java
@@ -0,0 +1,28 @@
+/** This software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.tribble;
+
+
+/**
+ * User: jacob
+ * Date: 2012-Dec-13
+ */
+public class TestUtils {
+    public static String DATA_DIR = "src/test/resources/htsjdk/tribble/";
+}
diff --git a/src/tests/java/htsjdk/tribble/TribbleIndexFeatureReaderTest.java b/src/test/java/htsjdk/tribble/TribbleIndexFeatureReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/TribbleIndexFeatureReaderTest.java
rename to src/test/java/htsjdk/tribble/TribbleIndexFeatureReaderTest.java
diff --git a/src/tests/java/htsjdk/tribble/TribbleTest.java b/src/test/java/htsjdk/tribble/TribbleTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/TribbleTest.java
rename to src/test/java/htsjdk/tribble/TribbleTest.java
diff --git a/src/tests/java/htsjdk/tribble/bed/BEDCodecTest.java b/src/test/java/htsjdk/tribble/bed/BEDCodecTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/bed/BEDCodecTest.java
rename to src/test/java/htsjdk/tribble/bed/BEDCodecTest.java
diff --git a/src/tests/java/htsjdk/tribble/gelitext/GeliTextTest.java b/src/test/java/htsjdk/tribble/gelitext/GeliTextTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/gelitext/GeliTextTest.java
rename to src/test/java/htsjdk/tribble/gelitext/GeliTextTest.java
diff --git a/src/test/java/htsjdk/tribble/index/IndexFactoryTest.java b/src/test/java/htsjdk/tribble/index/IndexFactoryTest.java
new file mode 100644
index 0000000..ba64998
--- /dev/null
+++ b/src/test/java/htsjdk/tribble/index/IndexFactoryTest.java
@@ -0,0 +1,115 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2013 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.tribble.index;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.tribble.TestUtils;
+import htsjdk.tribble.TribbleException;
+import htsjdk.tribble.bed.BEDCodec;
+import htsjdk.tribble.index.tabix.TabixFormat;
+import htsjdk.tribble.index.tabix.TabixIndex;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFFileReader;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * User: jacob
+ * Date: 2012-Aug-23
+ */
+public class IndexFactoryTest {
+
+    final File sortedBedFile = new File(TestUtils.DATA_DIR + "bed/Unigene.sample.bed");
+    final File unsortedBedFile = new File(TestUtils.DATA_DIR + "bed/unsorted.bed");
+    final File discontinuousFile = new File(TestUtils.DATA_DIR + "bed/disconcontigs.bed");
+    final BEDCodec bedCodec = new BEDCodec();
+
+    @Test
+    public void testCreateLinearIndex() throws Exception {
+        Index index = IndexFactory.createLinearIndex(sortedBedFile, bedCodec);
+        String chr = "chr2";
+
+        Assert.assertTrue(index.getSequenceNames().contains(chr));
+        Assert.assertTrue(index.containsChromosome(chr));
+        Assert.assertEquals(1, index.getSequenceNames().size());
+        List<Block> blocks = index.getBlocks(chr, 1, 50);
+        Assert.assertEquals(1, blocks.size());
+
+        Block block = blocks.get(0);
+        Assert.assertEquals(78, block.getSize());
+    }
+
+    @Test(expectedExceptions = TribbleException.MalformedFeatureFile.class, dataProvider = "indexFactoryProvider")
+    public void testCreateIndexUnsorted(IndexFactory.IndexType type) throws Exception{
+        Index index = IndexFactory.createIndex(unsortedBedFile, bedCodec, type);
+    }
+
+    @Test(expectedExceptions = TribbleException.MalformedFeatureFile.class, dataProvider = "indexFactoryProvider")
+    public void testCreateIndexDiscontinuousContigs(IndexFactory.IndexType type) throws Exception{
+        Index index = IndexFactory.createIndex(discontinuousFile, bedCodec, type);
+    }
+
+    @DataProvider(name = "indexFactoryProvider")
+    public Object[][] getIndexFactoryTypes(){
+        return new Object[][] {
+                new Object[] { IndexFactory.IndexType.LINEAR },
+                new Object[] { IndexFactory.IndexType.INTERVAL_TREE }
+        };
+    }
+
+    @Test
+    public void testCreateTabixIndexOnBlockCompressed() {
+        // index a VCF
+        final File inputFileVcf = new File("src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf");
+        final VCFFileReader readerVcf = new VCFFileReader(inputFileVcf, false);
+        final SAMSequenceDictionary vcfDict = readerVcf.getFileHeader().getSequenceDictionary();
+        final TabixIndex tabixIndexVcf =
+                IndexFactory.createTabixIndex(inputFileVcf, new VCFCodec(), TabixFormat.VCF,
+                vcfDict);
+
+        // index the same bgzipped VCF
+        final File inputFileVcfGz = new File("src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf.gz");
+        final VCFFileReader readerVcfGz = new VCFFileReader(inputFileVcfGz, false);
+        final TabixIndex tabixIndexVcfGz =
+                IndexFactory.createTabixIndex(inputFileVcfGz, new VCFCodec(), TabixFormat.VCF,
+                        readerVcfGz.getFileHeader().getSequenceDictionary());
+
+        // assert that each sequence in the header that represents some VCF row ended up in the index
+        // for both the VCF and bgzipped VCF
+        for (SAMSequenceRecord samSequenceRecord : vcfDict.getSequences()) {
+            Assert.assertTrue(
+                    tabixIndexVcf.containsChromosome(samSequenceRecord.getSequenceName()),
+                    "Tabix indexed VCF does not contain sequence: " + samSequenceRecord.getSequenceName());
+
+            Assert.assertTrue(
+                    tabixIndexVcfGz.containsChromosome(samSequenceRecord.getSequenceName()),
+                    "Tabix indexed (bgzipped) VCF does not contain sequence: " + samSequenceRecord.getSequenceName());
+        }
+    }
+}
diff --git a/src/tests/java/htsjdk/tribble/index/IndexTest.java b/src/test/java/htsjdk/tribble/index/IndexTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/index/IndexTest.java
rename to src/test/java/htsjdk/tribble/index/IndexTest.java
diff --git a/src/tests/java/htsjdk/tribble/index/interval/IntervalTreeTest.java b/src/test/java/htsjdk/tribble/index/interval/IntervalTreeTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/index/interval/IntervalTreeTest.java
rename to src/test/java/htsjdk/tribble/index/interval/IntervalTreeTest.java
diff --git a/src/tests/java/htsjdk/tribble/index/linear/LinearIndexTest.java b/src/test/java/htsjdk/tribble/index/linear/LinearIndexTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/index/linear/LinearIndexTest.java
rename to src/test/java/htsjdk/tribble/index/linear/LinearIndexTest.java
diff --git a/src/test/java/htsjdk/tribble/index/tabix/TabixIndexTest.java b/src/test/java/htsjdk/tribble/index/tabix/TabixIndexTest.java
new file mode 100644
index 0000000..557a398
--- /dev/null
+++ b/src/test/java/htsjdk/tribble/index/tabix/TabixIndexTest.java
@@ -0,0 +1,143 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2014 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.tribble.index.tabix;
+
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import htsjdk.tribble.index.IndexFactory;
+import htsjdk.tribble.util.LittleEndianOutputStream;
+import htsjdk.tribble.util.TabixUtils;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFFileReader;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Iterator;
+
+public class TabixIndexTest {
+    private static final File SMALL_TABIX_FILE = new File("src/test/resources/htsjdk/tribble/tabix/trioDup.vcf.gz.tbi");
+    private static final File BIGGER_TABIX_FILE = new File("src/test/resources/htsjdk/tribble/tabix/bigger.vcf.gz.tbi");
+
+    /**
+     * Read an existing index from disk, write it to a temp file, read that in, and assert that both in-memory
+     * representations are identical.  Disk representations may not be identical due to arbitrary bin order and
+     * compression differences.
+     */
+    @Test(dataProvider = "readWriteTestDataProvider")
+    public void readWriteTest(final File tabixFile) throws Exception {
+        final TabixIndex index = new TabixIndex(tabixFile);
+        final File indexFile = File.createTempFile("TabixIndexTest.", TabixUtils.STANDARD_INDEX_EXTENSION);
+        indexFile.deleteOnExit();
+        final LittleEndianOutputStream los = new LittleEndianOutputStream(new BlockCompressedOutputStream(indexFile));
+        index.write(los);
+        los.close();
+        final TabixIndex index2 = new TabixIndex(indexFile);
+        Assert.assertEquals(index, index2);
+        // Unfortunately, can't do byte comparison of original file and temp file, because 1) different compression
+        // levels; and more importantly, arbitrary order of bins in bin list.
+    }
+
+    @DataProvider(name = "readWriteTestDataProvider")
+    public Object[][] readWriteTestDataProvider() {
+        return new Object[][]{
+                {SMALL_TABIX_FILE},
+                {BIGGER_TABIX_FILE}
+        };
+    }
+
+    @Test
+    public void testQueryProvidedItemsAmount() {
+        final String VCF = "src/test/resources/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf";
+        // Note that we store only compressed files
+        final File plainTextVcfInputFile = new File(VCF);
+        plainTextVcfInputFile.deleteOnExit();
+        final File plainTextVcfIndexFile = new File(VCF + ".tbi");
+        plainTextVcfIndexFile.deleteOnExit();
+        final File compressedVcfInputFile = new File(VCF + ".gz");
+        final File compressedTbiIndexFile = new File(VCF + ".gz.tbi");
+        final VCFFileReader compressedVcfReader = new VCFFileReader(compressedVcfInputFile, compressedTbiIndexFile);
+
+        //create plain text VCF without "index on the fly" option
+        final VariantContextWriter plainTextVcfWriter = new VariantContextWriterBuilder()
+                .setOptions(VariantContextWriterBuilder.NO_OPTIONS)
+                .setOutputFile(VCF)
+                .build();
+        plainTextVcfWriter.writeHeader(compressedVcfReader.getFileHeader());
+        for (VariantContext vc : compressedVcfReader) {
+            if (vc != null) plainTextVcfWriter.add(vc);
+        }
+        plainTextVcfWriter.close();
+
+        IndexFactory.createTabixIndex(plainTextVcfInputFile,
+                new VCFCodec(),
+                TabixFormat.VCF,
+                new VCFFileReader(plainTextVcfInputFile, false).getFileHeader().getSequenceDictionary()
+        ) // create TabixIndex straight from plaintext VCF
+                .write(plainTextVcfIndexFile); // write it
+
+        final VCFFileReader plainTextVcfReader = new VCFFileReader(plainTextVcfInputFile, plainTextVcfIndexFile);
+        // Now we have both plaintext and compressed VCFs with provided TabixIndex-es and could test their "queryability"
+
+        // magic numbers chosen from just looking in provided VCF file
+        try {
+            // just somewhere in middle of chromosome
+            Assert.assertEquals(42, countIteratedElements(compressedVcfReader.query("1", 868379 - 1, 1006891 + 1)));
+            Assert.assertEquals(42, countIteratedElements(plainTextVcfReader.query("1", 868379 - 1, 1006891 + 1)));
+            // chromosome start
+            Assert.assertEquals(13, countIteratedElements(compressedVcfReader.query("1", 1, 836463 + 1)));
+            Assert.assertEquals(13, countIteratedElements(plainTextVcfReader.query("1", 1, 836463 + 1)));
+            // chromosome end
+            Assert.assertEquals(36, countIteratedElements(compressedVcfReader.query("1", 76690833 - 1, 76837502 + 11111111)));
+            Assert.assertEquals(36, countIteratedElements(plainTextVcfReader.query("1", 76690833 - 1, 76837502 + 11111111)));
+            // where's no one feature in the middle of chromosome
+            Assert.assertEquals(0, countIteratedElements(compressedVcfReader.query("1", 36606472 + 1, 36623523 - 1)));
+            Assert.assertEquals(0, countIteratedElements(plainTextVcfReader.query("1", 36606472 + 1, 36623523 - 1)));
+            // before chromosome
+            Assert.assertEquals(0, countIteratedElements(compressedVcfReader.query("1", 1, 10)));
+            Assert.assertEquals(0, countIteratedElements(plainTextVcfReader.query("1", 1, 10)));
+            // after chromosome
+            Assert.assertEquals(0, countIteratedElements(compressedVcfReader.query("1", 76837502 * 15, 76837502 * 16)));
+            Assert.assertEquals(0, countIteratedElements(plainTextVcfReader.query("1", 76837502 * 15, 76837502 * 16)));
+        } catch (NullPointerException e) {
+            Assert.fail("Exception caught on querying: ", e);
+            // before fix exception was thrown from 'TabixIndex.getBlocks()' on 'chunks.size()' while 'chunks == null' for plain files
+        } finally {
+            plainTextVcfReader.close();
+            compressedVcfReader.close();
+        }
+    }
+
+    private static int countIteratedElements(Iterator iterator) {
+        int counter = 0;
+        while (iterator.hasNext()) {
+            iterator.next();
+            counter++;
+        }
+        return counter;
+    }
+}
diff --git a/src/tests/java/htsjdk/tribble/readers/AsciiLineReaderTest.java b/src/test/java/htsjdk/tribble/readers/AsciiLineReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/readers/AsciiLineReaderTest.java
rename to src/test/java/htsjdk/tribble/readers/AsciiLineReaderTest.java
diff --git a/src/tests/java/htsjdk/tribble/readers/LongLineBufferedReaderTest.java b/src/test/java/htsjdk/tribble/readers/LongLineBufferedReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/readers/LongLineBufferedReaderTest.java
rename to src/test/java/htsjdk/tribble/readers/LongLineBufferedReaderTest.java
diff --git a/src/tests/java/htsjdk/tribble/readers/PositionalBufferedStreamTest.java b/src/test/java/htsjdk/tribble/readers/PositionalBufferedStreamTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/readers/PositionalBufferedStreamTest.java
rename to src/test/java/htsjdk/tribble/readers/PositionalBufferedStreamTest.java
diff --git a/src/tests/java/htsjdk/tribble/readers/ReaderTest.java b/src/test/java/htsjdk/tribble/readers/ReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/readers/ReaderTest.java
rename to src/test/java/htsjdk/tribble/readers/ReaderTest.java
diff --git a/src/test/java/htsjdk/tribble/readers/SynchronousLineReaderUnitTest.java b/src/test/java/htsjdk/tribble/readers/SynchronousLineReaderUnitTest.java
new file mode 100644
index 0000000..fbb5d18
--- /dev/null
+++ b/src/test/java/htsjdk/tribble/readers/SynchronousLineReaderUnitTest.java
@@ -0,0 +1,39 @@
+package htsjdk.tribble.readers;
+
+import htsjdk.tribble.TestUtils;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+/**
+ * @author mccowan
+ */
+public class SynchronousLineReaderUnitTest {
+    @Test
+    public void testLineReaderIterator_streamConstructor() throws Exception {
+        final File filePath = new File(TestUtils.DATA_DIR + "gwas/smallp.gwas");
+        final LineIterator lineIterator = new LineIteratorImpl(new SynchronousLineReader(new PositionalBufferedStream(new FileInputStream(filePath))));
+        final BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath)));
+
+        while (lineIterator.hasNext()) {
+            Assert.assertEquals(lineIterator.next(), br.readLine());
+        }
+        Assert.assertNull(br.readLine());
+    }
+
+    @Test
+    public void testLineReaderIterator_readerConstructor() throws Exception {
+        final File filePath = new File(TestUtils.DATA_DIR + "gwas/smallp.gwas");
+        final LineIterator lineIterator = new LineIteratorImpl(new SynchronousLineReader(new InputStreamReader(new PositionalBufferedStream(new FileInputStream(filePath)))));
+        final BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath)));
+
+        while (lineIterator.hasNext()) {
+            Assert.assertEquals(lineIterator.next(), br.readLine());
+        }
+        Assert.assertNull(br.readLine());
+    }
+}
diff --git a/src/tests/java/htsjdk/tribble/readers/TabixReaderTest.java b/src/test/java/htsjdk/tribble/readers/TabixReaderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/readers/TabixReaderTest.java
rename to src/test/java/htsjdk/tribble/readers/TabixReaderTest.java
diff --git a/src/tests/java/htsjdk/tribble/source/.gitignore b/src/test/java/htsjdk/tribble/source/.gitignore
similarity index 100%
rename from src/tests/java/htsjdk/tribble/source/.gitignore
rename to src/test/java/htsjdk/tribble/source/.gitignore
diff --git a/src/tests/java/htsjdk/tribble/util/ParsingUtilsTest.java b/src/test/java/htsjdk/tribble/util/ParsingUtilsTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/util/ParsingUtilsTest.java
rename to src/test/java/htsjdk/tribble/util/ParsingUtilsTest.java
diff --git a/src/tests/java/htsjdk/tribble/util/ftp/FTPClientTest.java b/src/test/java/htsjdk/tribble/util/ftp/FTPClientTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/util/ftp/FTPClientTest.java
rename to src/test/java/htsjdk/tribble/util/ftp/FTPClientTest.java
diff --git a/src/tests/java/htsjdk/tribble/util/ftp/FTPUtilsTest.java b/src/test/java/htsjdk/tribble/util/ftp/FTPUtilsTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/util/ftp/FTPUtilsTest.java
rename to src/test/java/htsjdk/tribble/util/ftp/FTPUtilsTest.java
diff --git a/src/tests/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculationTest.java b/src/test/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculationTest.java
similarity index 100%
rename from src/tests/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculationTest.java
rename to src/test/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculationTest.java
diff --git a/src/test/java/htsjdk/variant/PrintVariantsExampleTest.java b/src/test/java/htsjdk/variant/PrintVariantsExampleTest.java
new file mode 100644
index 0000000..c82f2db
--- /dev/null
+++ b/src/test/java/htsjdk/variant/PrintVariantsExampleTest.java
@@ -0,0 +1,65 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+*
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+*
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+*
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant;
+
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.variant.example.PrintVariantsExample;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.List;
+import java.util.OptionalInt;
+import java.util.stream.IntStream;
+
+public class PrintVariantsExampleTest {
+    @Test
+    public void testExampleWriteFile() throws IOException {
+        final File tempFile = File.createTempFile("example", ".vcf");
+        tempFile.deleteOnExit();
+        File f1 = new File("src/test/resources/htsjdk/variant/ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf");
+        final String[] args = {
+                f1.getAbsolutePath(),
+                tempFile.getAbsolutePath()
+        };
+        Assert.assertEquals(tempFile.length(), 0);
+        PrintVariantsExample.main(args);
+        Assert.assertNotEquals(tempFile.length(), 0);
+
+        assertFilesEqualSkipHeaders(tempFile, f1);
+    }
+
+    private void assertFilesEqualSkipHeaders(File tempFile, File f1) throws FileNotFoundException {
+        final List<String> lines1 = IOUtil.slurpLines(f1);
+        final List<String> lines2 = IOUtil.slurpLines(tempFile);
+        final int firstNonComment1 = IntStream.range(0, lines1.size()).filter(i -> !lines1.get(i).startsWith("#")).findFirst().getAsInt();
+        final int firstNonComment2 = IntStream.range(0, lines2.size()).filter(i -> !lines2.get(i).startsWith("#")).findFirst().getAsInt();
+        Assert.assertEquals(lines1.subList(firstNonComment1, lines1.size()), lines2.subList(firstNonComment2,lines2.size()));
+    }
+}
diff --git a/src/test/java/htsjdk/variant/VariantBaseTest.java b/src/test/java/htsjdk/variant/VariantBaseTest.java
new file mode 100644
index 0000000..87345a0
--- /dev/null
+++ b/src/test/java/htsjdk/variant/VariantBaseTest.java
@@ -0,0 +1,278 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.variant.variantcontext.Genotype;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.VCFConstants;
+import org.testng.Assert;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Base class for test classes within org.broadinstitute.variant
+ */
+public class VariantBaseTest {
+
+    public static final String variantTestDataRoot = new File("src/test/resources/htsjdk/variant/").getAbsolutePath() + "/";
+
+    /**
+     * Creates a temp file that will be deleted on exit after tests are complete.
+     * @param name Prefix of the file.
+     * @param extension Extension to concat to the end of the file.
+     * @return A file in the temporary directory starting with name, ending with extension, which will be deleted after the program exits.
+     */
+    public static File createTempFile(String name, String extension) {
+        try {
+            File file = File.createTempFile(name, extension);
+            file.deleteOnExit();
+            return file;
+        } catch (IOException ex) {
+            throw new RuntimeException("Cannot create temp file: " + ex.getMessage(), ex);
+        }
+    }
+
+    private static final double DEFAULT_FLOAT_TOLERANCE = 1e-1;
+
+    public static final void assertEqualsDoubleSmart(final Object actual, final Double expected) {
+        Assert.assertTrue(actual instanceof Double, "Not a double");
+        assertEqualsDoubleSmart((double)(Double)actual, (double)expected);
+    }
+
+    public static final void assertEqualsDoubleSmart(final Object actual, final Double expected, final double tolerance) {
+        Assert.assertTrue(actual instanceof Double, "Not a double");
+        assertEqualsDoubleSmart((double)(Double)actual, (double)expected, tolerance);
+    }
+
+    public static final void assertEqualsDoubleSmart(final double actual, final double expected) {
+        assertEqualsDoubleSmart(actual, expected, DEFAULT_FLOAT_TOLERANCE);
+    }
+
+    public static final <T> void assertEqualsSet(final Set<T> actual, final Set<T> expected, final String info) {
+        final Set<T> actualSet = new HashSet<T>(actual);
+        final Set<T> expectedSet = new HashSet<T>(expected);
+        Assert.assertTrue(actualSet.equals(expectedSet), info); // note this is necessary due to testng bug for set comps
+    }
+
+    public static void assertEqualsDoubleSmart(final double actual, final double expected, final double tolerance) {
+        assertEqualsDoubleSmart(actual, expected, tolerance, null);
+    }
+
+    public static void assertEqualsDoubleSmart(final double actual, final double expected, final double tolerance, final String message) {
+        if ( Double.isNaN(expected) ) // NaN == NaN => false unfortunately
+            Assert.assertTrue(Double.isNaN(actual), "expected is nan, actual is not");
+        else if ( Double.isInfinite(expected) ) // NaN == NaN => false unfortunately
+            Assert.assertTrue(Double.isInfinite(actual), "expected is infinite, actual is not");
+        else {
+            final double delta = Math.abs(actual - expected);
+            final double ratio = Math.abs(actual / expected - 1.0);
+            Assert.assertTrue(delta < tolerance || ratio < tolerance, "expected = " + expected + " actual = " + actual
+                    + " not within tolerance " + tolerance
+                    + (message == null ? "" : "message: " + message));
+        }
+    }
+
+    public static SAMSequenceDictionary createArtificialSequenceDictionary() {
+        final int[] contigLengths = { 249250621, 243199373, 198022430, 191154276, 180915260, 171115067, 159138663, 146364022,
+                                      141213431, 135534747, 135006516, 133851895, 115169878, 107349540, 102531392, 90354753,
+                                      81195210, 78077248, 59128983, 63025520, 48129895, 51304566, 155270560, 59373566, 16569 };
+        List<SAMSequenceRecord> contigs = new ArrayList<SAMSequenceRecord>();
+
+        for ( int contig = 1; contig <= 22; contig++ ) {
+            contigs.add(new SAMSequenceRecord(Integer.toString(contig), contigLengths[contig - 1]));
+        }
+
+        int position = 22;
+        for ( String contigName : Arrays.asList("X", "Y", "MT") ) {
+            contigs.add(new SAMSequenceRecord(contigName, contigLengths[position]));
+            position++;
+        }
+
+        return new SAMSequenceDictionary(contigs);
+    }
+
+    /**
+     * Asserts that the two provided VariantContext objects are equal.
+     *
+     * @param actual actual VariantContext object
+     * @param expected expected VariantContext to compare against
+     */
+    public static void assertVariantContextsAreEqual( final VariantContext actual, final VariantContext expected ) {
+        Assert.assertNotNull(actual, "VariantContext expected not null");
+        Assert.assertEquals(actual.getContig(), expected.getContig(), "chr");
+        Assert.assertEquals(actual.getStart(), expected.getStart(), "start");
+        Assert.assertEquals(actual.getEnd(), expected.getEnd(), "end");
+        Assert.assertEquals(actual.getID(), expected.getID(), "id");
+        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "alleles for " + expected + " vs " + actual);
+
+        assertAttributesEquals(actual.getAttributes(), expected.getAttributes());
+        Assert.assertEquals(actual.filtersWereApplied(), expected.filtersWereApplied(), "filtersWereApplied");
+        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "isFiltered");
+        assertEqualsSet(actual.getFilters(), expected.getFilters(), "filters");
+        assertEqualsDoubleSmart(actual.getPhredScaledQual(), expected.getPhredScaledQual());
+
+        Assert.assertEquals(actual.hasGenotypes(), expected.hasGenotypes(), "hasGenotypes");
+        if ( expected.hasGenotypes() ) {
+            assertEqualsSet(actual.getSampleNames(), expected.getSampleNames(), "sample names set");
+            Assert.assertEquals(actual.getSampleNamesOrderedByName(), expected.getSampleNamesOrderedByName(), "sample names");
+            final Set<String> samples = expected.getSampleNames();
+            for ( final String sample : samples ) {
+                assertGenotypesAreEqual(actual.getGenotype(sample), expected.getGenotype(sample));
+            }
+        }
+    }
+
+    /**
+     * Asserts that the two provided Genotype objects are equal.
+     *
+     * @param actual actual Genotype object
+     * @param expected expected Genotype object to compare against
+     */
+    public static void assertGenotypesAreEqual(final Genotype actual, final Genotype expected) {
+        Assert.assertEquals(actual.getSampleName(), expected.getSampleName(), "Genotype names");
+        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "Genotype alleles");
+        Assert.assertEquals(actual.getGenotypeString(), expected.getGenotypeString(), "Genotype string");
+        Assert.assertEquals(actual.getType(), expected.getType(), "Genotype type");
+
+        // filters are the same
+        Assert.assertEquals(actual.getFilters(), expected.getFilters(), "Genotype fields");
+        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "Genotype isFiltered");
+
+        // inline attributes
+        Assert.assertEquals(actual.getDP(), expected.getDP(), "Genotype dp");
+        Assert.assertTrue(Arrays.equals(actual.getAD(), expected.getAD()));
+        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype gq");
+        Assert.assertEquals(actual.hasPL(), expected.hasPL(), "Genotype hasPL");
+        Assert.assertEquals(actual.hasAD(), expected.hasAD(), "Genotype hasAD");
+        Assert.assertEquals(actual.hasGQ(), expected.hasGQ(), "Genotype hasGQ");
+        Assert.assertEquals(actual.hasDP(), expected.hasDP(), "Genotype hasDP");
+
+        Assert.assertEquals(actual.hasLikelihoods(), expected.hasLikelihoods(), "Genotype haslikelihoods");
+        Assert.assertEquals(actual.getLikelihoodsString(), expected.getLikelihoodsString(), "Genotype getlikelihoodsString");
+        Assert.assertEquals(actual.getLikelihoods(), expected.getLikelihoods(), "Genotype getLikelihoods");
+        Assert.assertTrue(Arrays.equals(actual.getPL(), expected.getPL()));
+
+        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype phredScaledQual");
+        assertAttributesEquals(actual.getExtendedAttributes(), expected.getExtendedAttributes());
+        Assert.assertEquals(actual.isPhased(), expected.isPhased(), "Genotype isPhased");
+        Assert.assertEquals(actual.getPloidy(), expected.getPloidy(), "Genotype getPloidy");
+    }
+
+    /**
+     * Asserts that the two sets of attribute mappings are equal. Ignores null-valued attributes in
+     * "actual" that are not present in "expected" while performing the comparison.
+     *
+     * @param actual actual mapping of attributes
+     * @param expected expected mapping of attributes
+     */
+    private static void assertAttributesEquals(final Map<String, Object> actual, Map<String, Object> expected) {
+        final Set<String> expectedKeys = new HashSet<String>(expected.keySet());
+
+        for ( final Map.Entry<String, Object> act : actual.entrySet() ) {
+            final Object actualValue = act.getValue();
+            if ( expected.containsKey(act.getKey()) && expected.get(act.getKey()) != null ) {
+                final Object expectedValue = expected.get(act.getKey());
+                if ( expectedValue instanceof List ) {
+                    final List<Object> expectedList = (List<Object>)expectedValue;
+                    Assert.assertTrue(actualValue instanceof List, act.getKey() + " should be a list but isn't");
+                    final List<Object> actualList = (List<Object>)actualValue;
+                    Assert.assertEquals(actualList.size(), expectedList.size(), act.getKey() + " size");
+                    for ( int i = 0; i < expectedList.size(); i++ ) {
+                        assertAttributeEquals(act.getKey(), actualList.get(i), expectedList.get(i));
+                    }
+                }
+                else {
+                    assertAttributeEquals(act.getKey(), actualValue, expectedValue);
+                }
+            }
+            else {
+                // it's ok to have a binding in x -> null that's absent in y
+                Assert.assertNull(actualValue, act.getKey() + " present in one but not in the other");
+            }
+            expectedKeys.remove(act.getKey());
+        }
+
+        // now expectedKeys contains only the keys found in expected but not in actual,
+        // and they must all be null
+        for ( final String missingExpected : expectedKeys ) {
+            final Object value = expected.get(missingExpected);
+            Assert.assertTrue(isMissingAttribute(value), "Attribute " + missingExpected + " missing in one but not in other" );
+        }
+    }
+
+    /**
+     * Asserts that the two provided attribute values are equal. If the values are Doubles, uses a
+     * more lenient comparision with a tolerance of 1e-2.
+     *
+     * @param key key for the attribute values
+     * @param actual actual attribute value
+     * @param expected expected attribute value against which to compare
+     */
+    private static void assertAttributeEquals(final String key, final Object actual, final Object expected) {
+        if ( expected instanceof Double ) {
+            // must be very tolerant because doubles are being rounded to 2 sig figs
+            assertEqualsDoubleSmart(actual, (Double) expected, 1e-2);
+        }
+        else {
+            Assert.assertEquals(actual, expected, "Attribute " + key);
+        }
+    }
+
+    /**
+     * Determines whether the provided attribute value is missing according to the VCF spec.
+     * An attribute value is missing if it's null, is equal to {@link VCFConstants#MISSING_VALUE_v4},
+     * or if it's a List that is either empty or contains only null values.
+     *
+     * @param value attribute value to test
+     * @return true if value is a missing VCF attribute value, otherwise false
+     */
+    private static boolean isMissingAttribute(final Object value) {
+        if ( value == null || value.equals(VCFConstants.MISSING_VALUE_v4) ) {
+            return true;
+        }
+        else if ( value instanceof List ) {
+            // handles the case where all elements are null or the list is empty
+            for ( final Object elt : (List)value) {
+                if (elt != null) {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        return false;
+    }
+
+}
diff --git a/src/tests/java/htsjdk/variant/bcf2/BCF2EncoderDecoderUnitTest.java b/src/test/java/htsjdk/variant/bcf2/BCF2EncoderDecoderUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/bcf2/BCF2EncoderDecoderUnitTest.java
rename to src/test/java/htsjdk/variant/bcf2/BCF2EncoderDecoderUnitTest.java
diff --git a/src/tests/java/htsjdk/variant/bcf2/BCF2UtilsUnitTest.java b/src/test/java/htsjdk/variant/bcf2/BCF2UtilsUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/bcf2/BCF2UtilsUnitTest.java
rename to src/test/java/htsjdk/variant/bcf2/BCF2UtilsUnitTest.java
diff --git a/src/test/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java b/src/test/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java
new file mode 100644
index 0000000..9fb13e8
--- /dev/null
+++ b/src/test/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java
@@ -0,0 +1,65 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2014 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.variant.utils;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.SequenceUtil;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+import org.testng.Assert;
+
+import java.io.File;
+
+/**
+ * @author farjoun on 4/9/14.
+ */
+public class SAMSequenceDictionaryExtractorTest {
+    String path = "src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/";
+
+    @DataProvider(name = "testExtractDictionaries")
+    public Object[][] dictionaries() {
+        return new Object[][]{
+                new Object[]{"test1_comp.interval_list", "test1.dict"},
+                new Object[]{"test1.vcf", "test1.dict"},
+                new Object[]{"test1.dict", "test1.dict"},
+                new Object[]{"empty.interval_list", "test1.dict"},
+                new Object[]{"Homo_sapiens_assembly18.trimmed.fasta", "Homo_sapiens_assembly18.trimmed.dict"},
+                new Object[]{"test2_comp.interval_list", "Homo_sapiens_assembly18.trimmed.dict"},
+                new Object[]{"ScreenSamReads.100.input.sam", "test3_comp.interval_list"},
+                new Object[]{"ScreenSamReads.100.input.sam", "test4_comp.interval_list"},
+        };
+    }
+
+    @Test(dataProvider = "testExtractDictionaries")
+    public void testExtractDictionary(final String dictSource, final String dictExpected) throws Exception {
+        final File dictSourceFile = new File(path, dictSource);
+        final File dictExpectedFile = new File(path, dictExpected);
+        final SAMSequenceDictionary dict1 = SAMSequenceDictionaryExtractor.extractDictionary(dictSourceFile);
+        final SAMSequenceDictionary dict2 = SAMSequenceDictionaryExtractor.extractDictionary(dictExpectedFile);
+
+        Assert.assertTrue(SequenceUtil.areSequenceDictionariesEqual(dict1,
+                dict2));
+        Assert.assertTrue(dict1.md5().equals(dict2.md5()));
+    }
+}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/AlleleUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/AlleleUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/AlleleUnitTest.java
rename to src/test/java/htsjdk/variant/variantcontext/AlleleUnitTest.java
diff --git a/src/test/java/htsjdk/variant/variantcontext/GenotypeLikelihoodsUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/GenotypeLikelihoodsUnitTest.java
new file mode 100644
index 0000000..a7c2bb6
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/GenotypeLikelihoodsUnitTest.java
@@ -0,0 +1,337 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext;
+
+
+// the imports for unit testing.
+
+
+import htsjdk.tribble.TribbleException;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.utils.GeneralUtils;
+import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ * Basic unit test for Genotype likelihoods objects
+ */
+public class GenotypeLikelihoodsUnitTest extends VariantBaseTest {
+    double [] v = new double[]{-10.5, -1.25, -5.11};
+    final static String vGLString = "-10.50,-1.25,-5.11";
+    final static String vPLString = "93,0,39";
+    double[] triAllelic = new double[]{-4.2,-2.0,-3.0,-1.6,0.0,-4.0}; //AA,AB,AC,BB,BC,CC
+
+    @BeforeMethod
+    public void initializeAnyploidPLIndexToAlleleIndices() {
+        GenotypeLikelihoods.anyploidPloidyToPLIndexToAlleleIndices.clear();
+        GenotypeLikelihoods.initializeAnyploidPLIndexToAlleleIndices(1, 1);
+        GenotypeLikelihoods.initializeAnyploidPLIndexToAlleleIndices(2, 2);
+        GenotypeLikelihoods.initializeAnyploidPLIndexToAlleleIndices(2, 3);
+    }
+
+    @Test
+    public void testFromVector2() {
+        GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(v);
+        assertDoubleArraysAreEqual(gl.getAsVector(), v);
+        Assert.assertEquals(gl.getAsString(), vPLString);
+    }
+
+    @Test
+    public void testFromString1() {
+        GenotypeLikelihoods gl = GenotypeLikelihoods.fromPLField(vPLString);
+        assertDoubleArraysAreEqual(gl.getAsVector(), new double[]{-9.3, 0, -3.9});
+        Assert.assertEquals(gl.getAsString(), vPLString);
+    }
+
+    @Test
+    public void testFromString2() {
+        GenotypeLikelihoods gl = GenotypeLikelihoods.fromGLField(vGLString);
+        assertDoubleArraysAreEqual(gl.getAsVector(), v);
+        Assert.assertEquals(gl.getAsString(), vPLString);
+    }
+
+    @Test (expectedExceptions = TribbleException.class)
+    public void testErrorBadFormat() {
+        GenotypeLikelihoods gl = GenotypeLikelihoods.fromPLField("adf,b,c");
+        gl.getAsVector();
+    }
+
+    @Test
+    public void testGetAsMap(){
+        GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(v);
+        //Log scale
+        EnumMap<GenotypeType,Double> glMap = gl.getAsMap(false);
+        Assert.assertEquals(v[GenotypeType.HOM_REF.ordinal()-1],glMap.get(GenotypeType.HOM_REF));
+        Assert.assertEquals(v[GenotypeType.HET.ordinal()-1],glMap.get(GenotypeType.HET));
+        Assert.assertEquals(v[GenotypeType.HOM_VAR.ordinal()-1],glMap.get(GenotypeType.HOM_VAR));
+
+        //Linear scale
+        glMap = gl.getAsMap(true);
+        double [] vl = GeneralUtils.normalizeFromLog10(v);
+        Assert.assertEquals(vl[GenotypeType.HOM_REF.ordinal()-1],glMap.get(GenotypeType.HOM_REF));
+        Assert.assertEquals(vl[GenotypeType.HET.ordinal()-1],glMap.get(GenotypeType.HET));
+        Assert.assertEquals(vl[GenotypeType.HOM_VAR.ordinal()-1],glMap.get(GenotypeType.HOM_VAR));
+
+        //Test missing likelihoods
+        gl = GenotypeLikelihoods.fromPLField(".");
+        glMap = gl.getAsMap(false);
+        Assert.assertNull(glMap);
+
+    }
+
+    @Test
+    public void testCalculateNumLikelihoods() {    
+        
+        for (int nAlleles=2; nAlleles<=5; nAlleles++)
+            // simplest case: diploid
+            Assert.assertEquals(GenotypeLikelihoods.numLikelihoods(nAlleles, 2), nAlleles*(nAlleles+1)/2);
+
+        // some special cases: ploidy = 20, #alleles = 4
+        Assert.assertEquals(GenotypeLikelihoods.numLikelihoods(4, 20), 1771);
+    }
+    
+    @Test
+    public void testGetLog10GQ(){
+        GenotypeLikelihoods gl = GenotypeLikelihoods.fromPLField(vPLString);
+
+        //GQ for the best guess genotype
+        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HET),-3.9);
+
+        double[] test = GeneralUtils.normalizeFromLog10(gl.getAsVector());
+
+        //GQ for the other genotypes
+        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HOM_REF), Math.log10(1.0 - test[GenotypeType.HOM_REF.ordinal()-1]));
+        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HOM_VAR), Math.log10(1.0 - test[GenotypeType.HOM_VAR.ordinal()-1]));
+
+       //Test missing likelihoods
+        gl = GenotypeLikelihoods.fromPLField(".");
+        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HOM_REF),Double.NEGATIVE_INFINITY);
+        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HET),Double.NEGATIVE_INFINITY);
+        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HOM_VAR),Double.NEGATIVE_INFINITY);
+
+    }
+
+    @Test
+    public void testgetQualFromLikelihoods() {
+        double[] likelihoods = new double[]{-1, 0, -2};
+        // qual values we expect for each possible "best" genotype
+        double[] expectedQuals = new double[]{-0.04100161, -1, -0.003930294};
+
+        for ( int i = 0; i < likelihoods.length; i++ ) {
+            Assert.assertEquals(GenotypeLikelihoods.getGQLog10FromLikelihoods(i, likelihoods), expectedQuals[i], 1e-6,
+                    "GQ value for genotype " + i + " was not calculated correctly");
+        }
+    }
+
+    // this test is completely broken, the method is wrong.
+    public void testGetQualFromLikelihoodsMultiAllelicBroken() {
+        GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(triAllelic);
+        double actualGQ = gl.getLog10GQ(GenotypeType.HET);
+        double expectedGQ = 1.6;
+        Assert.assertEquals(actualGQ,expectedGQ);
+    }
+
+    public void testGetQualFromLikelihoodsMultiAllelic() {
+        GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(triAllelic);
+        Allele ref = Allele.create((byte)'A',true);
+        Allele alt1 = Allele.create((byte)'C');
+        Allele alt2 = Allele.create((byte)'T');
+        List<Allele> allAlleles = Arrays.asList(ref,alt1,alt2);
+        List<Allele> gtAlleles = Arrays.asList(alt1,alt2);
+        GenotypeBuilder gtBuilder = new GenotypeBuilder();
+        gtBuilder.alleles(gtAlleles);
+        double actualGQ = gl.getLog10GQ(gtBuilder.make(),allAlleles);
+        double expectedGQ = 1.6;
+        Assert.assertEquals(actualGQ,expectedGQ);
+    }
+
+    private void assertDoubleArraysAreEqual(double[] v1, double[] v2) {
+        Assert.assertEquals(v1.length, v2.length);
+        for ( int i = 0; i < v1.length; i++ ) {
+            Assert.assertEquals(v1[i], v2[i], 1e-6);
+        }
+    }
+
+    @Test
+    public void testCalculatePLindex(){
+        int counter = 0;
+        for ( int i = 0; i <= 3; i++ ) {
+            for ( int j = i; j <= 3; j++ ) {
+                Assert.assertEquals(GenotypeLikelihoods.calculatePLindex(i, j), GenotypeLikelihoods.PLindexConversion[counter++], "PL index of alleles " + i + "," + j + " was not calculated correctly");
+            }
+        }
+    }
+
+    @DataProvider
+    public Object[][] testGetAllelePairData() {
+        return new Object[][]{
+                {0, 0, 0},
+                {1, 0, 1},
+                {2, 1, 1},
+                {3, 0, 2},
+                {4, 1, 2},
+                {5, 2, 2},
+                {6, 0, 3},
+                {7, 1, 3},
+                {8, 2, 3},
+                {9, 3, 3}
+        };
+    }
+
+    @Test(dataProvider = "testGetAllelePairData")
+    public void testGetAllelePair(final int PLindex, final int allele1, final int allele2) {
+        Assert.assertEquals(GenotypeLikelihoods.getAllelePair(PLindex).alleleIndex1, allele1, "allele index " + allele1 + " from PL index " + PLindex + " was not calculated correctly");
+        Assert.assertEquals(GenotypeLikelihoods.getAllelePair(PLindex).alleleIndex2, allele2, "allele index " + allele2 + " from PL index " + PLindex + " was not calculated correctly");
+    }
+
+    @DataProvider
+    public Object[][] testCalculateAnyploidPLcacheData() {
+        return new Object[][]{
+                {
+                        1, 1,
+                        Arrays.asList(Arrays.asList(0),
+                                Arrays.asList(1)
+                        )
+                },
+                {
+                        2, 2,
+                        Arrays.asList(Arrays.asList(0, 0),
+                                Arrays.asList(0, 1),
+                                Arrays.asList(1, 1),
+                                Arrays.asList(0, 2),
+                                Arrays.asList(1, 2),
+                                Arrays.asList(2, 2)
+                        )
+                },
+                {
+                        2, 3,
+                        Arrays.asList(Arrays.asList(0, 0, 0),
+                                Arrays.asList(0, 0, 1),
+                                Arrays.asList(0, 1, 1),
+                                Arrays.asList(1, 1, 1),
+                                Arrays.asList(0, 0, 2),
+                                Arrays.asList(0, 1, 2),
+                                Arrays.asList(1, 1, 2),
+                                Arrays.asList(0, 2, 2),
+                                Arrays.asList(1, 2, 2),
+                                Arrays.asList(2, 2, 2)
+                        )
+                },
+                {
+                        2, -1,
+                        Arrays.asList(Arrays.asList())
+                },
+                {
+                        -1, 2,
+                        Arrays.asList(Arrays.asList())
+                }
+        };
+    }
+
+    @Test(dataProvider = "testCalculateAnyploidPLcacheData")
+    public void testCalculateAnyploidPLcache(final int altAlleles, final int ploidy, final List<List<Integer>> expected) {
+        List<List<Integer>> anyploidPLIndexToAlleleIndices = GenotypeLikelihoods.calculateAnyploidPLcache(altAlleles, ploidy);
+        for ( int i=0; i < anyploidPLIndexToAlleleIndices.size(); i++ )
+            Assert.assertEquals(anyploidPLIndexToAlleleIndices.get(i), expected.get(i));
+    }
+
+    @Test(dataProvider = "testCalculateAnyploidPLcacheData")
+    public void testInitializeAnyploidPLIndexToAlleleIndices(final int altAlleles, final int ploidy, final List<List<Integer>> expected) {
+        if ( altAlleles >= 1 && ploidy >= 1 ) { // Bypass test with bad data
+            Map<Integer, List<List<Integer>>> expectedMap = new HashMap<Integer, List<List<Integer>>>();
+            expectedMap.put(ploidy, expected);
+            for (Map.Entry<Integer, List<List<Integer>>> entry : GenotypeLikelihoods.anyploidPloidyToPLIndexToAlleleIndices.entrySet()) {
+                if (expectedMap.containsKey(entry.getKey()))
+                    Assert.assertEquals(entry.getValue(), expectedMap.get(entry.getKey()));
+            }
+        }
+    }
+
+    @DataProvider
+    public Object[][] testInitializeAnyploidPLIndexToAlleleIndiceseBadData() {
+        return new Object[][]{
+                { 2, -1 },
+                { -1, 2 }
+        };
+    }
+
+    @Test(dataProvider = "testInitializeAnyploidPLIndexToAlleleIndiceseBadData", expectedExceptions = IllegalArgumentException.class)
+    public void testInitializeAnyploidPLIndexToAlleleIndicesBadData(final int altAlleles, final int ploidy) {
+        GenotypeLikelihoods.initializeAnyploidPLIndexToAlleleIndices(altAlleles, ploidy);
+    }
+
+    @DataProvider
+    public Object[][] testGetAllelesData() {
+        return new Object[][]{
+                {0, 2, 3, Arrays.asList(0,0,0)},
+                {1, 2, 3, Arrays.asList(0,0,1)},
+                {2, 2, 3, Arrays.asList(0,1,1)},
+                {3, 2, 3, Arrays.asList(1,1,1)},
+                {4, 2, 3, Arrays.asList(0,0,2)},
+                {5, 2, 3, Arrays.asList(0,1,2)},
+                {6, 2, 3, Arrays.asList(1,1,2)},
+                {7, 2, 3, Arrays.asList(0,2,2)},
+                {8, 2, 3, Arrays.asList(1,2,2)},
+                {9, 2, 3, Arrays.asList(2,2,2)},
+                {1, 2, 1, Arrays.asList(1)}
+        };
+    }
+
+    @Test(dataProvider = "testGetAllelesData")
+    public void testGetAlleles(final int PLindex, final int altAlleles, final int ploidy, final List<Integer> expected ) {
+        Assert.assertEquals(GenotypeLikelihoods.getAlleles(PLindex, ploidy), expected);
+    }
+
+    @DataProvider
+    public Object[][] testGetAllelesIndexOutOfBoundsData() {
+        return new Object[][]{
+                {-1, 3},  // PL index too small, non-diploid
+                {10, 3},  // PL index too large, non-diploid
+                {-1, 2},  // PL index too small, diploid
+                {GenotypeLikelihoods.numLikelihoods(GenotypeLikelihoods.MAX_DIPLOID_ALT_ALLELES_THAT_CAN_BE_GENOTYPED+1,2), 2} // PL index too large, diploid
+        };
+    }
+
+    @Test(dataProvider = "testGetAllelesIndexOutOfBoundsData", expectedExceptions = IllegalStateException.class)
+    public void testGetAllelesOutOfBounds(final int PLindex, final int ploidy) {
+        final List<Integer> alleles = GenotypeLikelihoods.getAlleles(PLindex, ploidy);
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testGetAllelesUnitialized() {
+        GenotypeLikelihoods.anyploidPloidyToPLIndexToAlleleIndices.clear();
+        final List<Integer> alleles = GenotypeLikelihoods.getAlleles(0, 3);
+    }
+}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/GenotypeUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/GenotypeUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/GenotypeUnitTest.java
rename to src/test/java/htsjdk/variant/variantcontext/GenotypeUnitTest.java
diff --git a/src/tests/java/htsjdk/variant/variantcontext/GenotypesContextUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/GenotypesContextUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/GenotypesContextUnitTest.java
rename to src/test/java/htsjdk/variant/variantcontext/GenotypesContextUnitTest.java
diff --git a/src/tests/java/htsjdk/variant/variantcontext/VariantContextBuilderTest.java b/src/test/java/htsjdk/variant/variantcontext/VariantContextBuilderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/VariantContextBuilderTest.java
rename to src/test/java/htsjdk/variant/variantcontext/VariantContextBuilderTest.java
diff --git a/src/test/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java b/src/test/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java
new file mode 100644
index 0000000..868aacc
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java
@@ -0,0 +1,1014 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext;
+
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.tribble.FeatureCodecHeader;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.readers.*;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.bcf2.BCF2Codec;
+import htsjdk.variant.utils.GeneralUtils;
+import htsjdk.variant.variantcontext.writer.Options;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFConstants;
+import htsjdk.variant.vcf.VCFContigHeaderLine;
+import htsjdk.variant.vcf.VCFFilterHeaderLine;
+import htsjdk.variant.vcf.VCFFormatHeaderLine;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLine;
+import htsjdk.variant.vcf.VCFHeaderLineCount;
+import htsjdk.variant.vcf.VCFHeaderLineType;
+import htsjdk.variant.vcf.VCFInfoHeaderLine;
+
+import org.testng.Assert;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Routines for generating all sorts of VCs for testing
+ *
+ * @author Your Name
+ * @since Date created
+ */
+public class VariantContextTestProvider {
+    final private static boolean ENABLE_GENOTYPE_TESTS = true;
+    final private static boolean ENABLE_A_AND_G_TESTS = true;
+    final private static boolean ENABLE_VARARRAY_TESTS = true;
+    final private static boolean ENABLE_PLOIDY_TESTS = true;
+    final private static boolean ENABLE_PL_TESTS = true;
+    final private static boolean ENABLE_SYMBOLIC_ALLELE_TESTS = true;
+    final private static boolean ENABLE_SOURCE_VCF_TESTS = true;
+    final private static boolean ENABLE_VARIABLE_LENGTH_GENOTYPE_STRING_TESTS = true;
+    final private static List<Integer> TWENTY_INTS = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20);
+
+    private static VCFHeader syntheticHeader;
+    final static List<VariantContextTestData> TEST_DATAs = new ArrayList<VariantContextTestData>();
+    private static VariantContext ROOT;
+
+    private final static List<File> testSourceVCFs = new ArrayList<File>();
+    static {
+        testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf"));
+        testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "ex2.vcf"));
+        testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "dbsnp_135.b37.1000.vcf"));
+        if ( ENABLE_SYMBOLIC_ALLELE_TESTS ) {
+            testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "diagnosis_targets_testfile.vcf"));
+            testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "VQSR.mixedTest.recal"));
+            testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "breakpoint.vcf"));
+        }
+    }
+
+    public static class VariantContextContainer {
+        private VCFHeader header;
+        private Iterable<VariantContext> vcs;
+
+        public VariantContextContainer( VCFHeader header, Iterable<VariantContext> vcs ) {
+            this.header = header;
+            this.vcs = vcs;
+        }
+
+        public VCFHeader getHeader() {
+            return header;
+        }
+
+        public Iterable<VariantContext> getVCs() {
+            return vcs;
+        }
+    }
+
+    public abstract static class VariantContextIOTest<CODECTYPE> {
+        public String toString() {
+            return "VariantContextIOTest:" + getExtension();
+        }
+        public abstract String getExtension();
+        public abstract CODECTYPE makeCodec();
+        public abstract VariantContextWriter makeWriter(final File outputFile, final EnumSet<Options> baseOptions);
+
+        public abstract VariantContextContainer readAllVCs(final File input) throws IOException;
+        
+        public List<VariantContext> preprocess(final VCFHeader header, List<VariantContext> vcsBeforeIO) {
+            return vcsBeforeIO;
+        }
+
+        public List<VariantContext> postprocess(final VCFHeader header, List<VariantContext> vcsAfterIO) {
+            return vcsAfterIO;
+        }
+    }
+
+    public static class VariantContextTestData {
+        public final VCFHeader header;
+        public List<VariantContext> vcs;
+
+        public VariantContextTestData(final VCFHeader header, final VariantContextBuilder builder) {
+            this(header, Collections.singletonList(builder.fullyDecoded(true).make()));
+        }
+
+        public VariantContextTestData(final VCFHeader header, final List<VariantContext> vcs) {
+            final Set<String> samples = new HashSet<String>();
+            for ( final VariantContext vc : vcs )
+                if ( vc.hasGenotypes() )
+                    samples.addAll(vc.getSampleNames());
+            this.header = samples.isEmpty() ? header : new VCFHeader(header.getMetaDataInSortedOrder(), samples);
+            this.vcs = vcs;
+        }
+
+        public boolean hasGenotypes() {
+            return vcs.get(0).hasGenotypes();
+        }
+
+        public String toString() {
+            StringBuilder b = new StringBuilder();
+            b.append("VariantContextTestData: [");
+            final VariantContext vc = vcs.get(0);
+            final VariantContextBuilder builder = new VariantContextBuilder(vc);
+            builder.noGenotypes();
+            b.append(builder.make().toString());
+            if ( vc.getNSamples() < 5 ) {
+                for ( final Genotype g : vc.getGenotypes() )
+                    b.append(g.toString());
+            } else {
+                b.append(" nGenotypes = ").append(vc.getNSamples());
+            }
+
+            if ( vcs.size() > 1 ) b.append(" ----- with another ").append(vcs.size() - 1).append(" VariantContext records");
+            b.append("]");
+            return b.toString();
+        }
+    }
+
+    private final static VariantContextBuilder builder() {
+        return new VariantContextBuilder(ROOT);
+    }
+
+    private final static void add(VariantContextBuilder builder) {
+        TEST_DATAs.add(new VariantContextTestData(syntheticHeader, builder));
+    }
+
+    public static void initializeTests() throws IOException {
+        createSyntheticHeader();
+        makeSyntheticTests();
+        makeEmpiricalTests();
+    }
+
+    private static void makeEmpiricalTests() throws IOException {
+        if ( ENABLE_SOURCE_VCF_TESTS ) {
+            for ( final File file : testSourceVCFs ) {
+                VCFCodec codec = new VCFCodec();
+                VariantContextContainer x = readAllVCs( file, codec );
+                List<VariantContext> fullyDecoded = new ArrayList<VariantContext>();
+
+                for ( final VariantContext raw : x.getVCs() ) {
+                    if ( raw != null )
+                        fullyDecoded.add(raw.fullyDecode(x.getHeader(), false));
+                }
+
+                TEST_DATAs.add(new VariantContextTestData(x.getHeader(), fullyDecoded));
+            }
+        }
+    }
+
+    private final static void addHeaderLine(final Set<VCFHeaderLine> metaData, final String id, final int count, final VCFHeaderLineType type) {
+        metaData.add(new VCFInfoHeaderLine(id, count, type, "x"));
+        if ( type != VCFHeaderLineType.Flag )
+            metaData.add(new VCFFormatHeaderLine(id, count, type, "x"));
+    }
+
+    private final static void addHeaderLine(final Set<VCFHeaderLine> metaData, final String id, final VCFHeaderLineCount count, final VCFHeaderLineType type) {
+        metaData.add(new VCFInfoHeaderLine(id, count, type, "x"));
+        if ( type != VCFHeaderLineType.Flag )
+            metaData.add(new VCFFormatHeaderLine(id, count, type, "x"));
+    }
+
+    private static void createSyntheticHeader() {
+        Set<VCFHeaderLine> metaData = new TreeSet<VCFHeaderLine>();
+
+        addHeaderLine(metaData, "STRING1", 1, VCFHeaderLineType.String);
+        addHeaderLine(metaData, "END", 1, VCFHeaderLineType.Integer);
+        addHeaderLine(metaData, "STRING3", 3, VCFHeaderLineType.String);
+        addHeaderLine(metaData, "STRING20", 20, VCFHeaderLineType.String);
+        addHeaderLine(metaData, "VAR.INFO.STRING", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String);
+
+        addHeaderLine(metaData, "GT", 1, VCFHeaderLineType.String);
+        addHeaderLine(metaData, "GQ", 1, VCFHeaderLineType.Integer);
+        addHeaderLine(metaData, "ADA", VCFHeaderLineCount.A, VCFHeaderLineType.Integer);
+        addHeaderLine(metaData, "PL", VCFHeaderLineCount.G, VCFHeaderLineType.Integer);
+        addHeaderLine(metaData, "GS", 2, VCFHeaderLineType.String);
+        addHeaderLine(metaData, "GV", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String);
+        addHeaderLine(metaData, "FT", 1, VCFHeaderLineType.String);
+
+        // prep the header
+        metaData.add(new VCFContigHeaderLine(Collections.singletonMap("ID", "1"), 0));
+
+        metaData.add(new VCFFilterHeaderLine("FILTER1"));
+        metaData.add(new VCFFilterHeaderLine("FILTER2"));
+
+        addHeaderLine(metaData, "INT1", 1, VCFHeaderLineType.Integer);
+        addHeaderLine(metaData, "INT3", 3, VCFHeaderLineType.Integer);
+        addHeaderLine(metaData, "INT20", 20, VCFHeaderLineType.Integer);
+        addHeaderLine(metaData, "INT.VAR", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer);
+        addHeaderLine(metaData, "FLOAT1", 1, VCFHeaderLineType.Float);
+        addHeaderLine(metaData, "FLOAT3", 3, VCFHeaderLineType.Float);
+        addHeaderLine(metaData, "FLAG", 0, VCFHeaderLineType.Flag);
+
+        syntheticHeader = new VCFHeader(metaData);
+    }
+
+
+    private static void makeSyntheticTests() {
+        VariantContextBuilder rootBuilder = new VariantContextBuilder();
+        rootBuilder.source("test");
+        rootBuilder.loc("1", 10, 10);
+        rootBuilder.alleles("A", "C");
+        rootBuilder.unfiltered();
+        ROOT = rootBuilder.make();
+
+        add(builder());
+        add(builder().alleles("A"));
+        add(builder().alleles("A", "C", "T"));
+        add(builder().alleles("A", "AC"));
+        add(builder().alleles("A", "ACAGT"));
+        add(builder().loc("1", 10, 11).alleles("AC", "A"));
+        add(builder().loc("1", 10, 13).alleles("ACGT", "A"));
+
+        // make sure filters work
+        add(builder().unfiltered());
+        add(builder().passFilters());
+        add(builder().filters("FILTER1"));
+        add(builder().filters("FILTER1", "FILTER2"));
+
+        add(builder().log10PError(VariantContext.NO_LOG10_PERROR));
+        add(builder().log10PError(-1));
+        add(builder().log10PError(-1.234e6));
+
+        add(builder().noID());
+        add(builder().id("rsID12345"));
+
+
+        add(builder().attribute("INT1", 1));
+        add(builder().attribute("INT1", 100));
+        add(builder().attribute("INT1", 1000));
+        add(builder().attribute("INT1", 100000));
+        add(builder().attribute("INT1", null));
+        add(builder().attribute("INT3", Arrays.asList(1, 2, 3)));
+        add(builder().attribute("INT3", Arrays.asList(1000, 2000, 3000)));
+        add(builder().attribute("INT3", Arrays.asList(100000, 200000, 300000)));
+        add(builder().attribute("INT3", null));
+        add(builder().attribute("INT20", TWENTY_INTS));
+
+        add(builder().attribute("FLOAT1", 1.0));
+        add(builder().attribute("FLOAT1", 100.0));
+        add(builder().attribute("FLOAT1", 1000.0));
+        add(builder().attribute("FLOAT1", 100000.0));
+        add(builder().attribute("FLOAT1", null));
+        add(builder().attribute("FLOAT3", Arrays.asList(1.0, 2.0, 3.0)));
+        add(builder().attribute("FLOAT3", Arrays.asList(1000.0, 2000.0, 3000.0)));
+        add(builder().attribute("FLOAT3", Arrays.asList(100000.0, 200000.0, 300000.0)));
+        add(builder().attribute("FLOAT3", null));
+
+        add(builder().attribute("FLAG", true));
+        //add(builder().attribute("FLAG", false)); // NOTE -- VCF doesn't allow false flags
+
+        add(builder().attribute("STRING1", "s1"));
+        add(builder().attribute("STRING1", null));
+        add(builder().attribute("STRING3", Arrays.asList("s1", "s2", "s3")));
+        add(builder().attribute("STRING3", null));
+        add(builder().attribute("STRING20", Arrays.asList("s1", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15", "s16", "s17", "s18", "s19", "s20")));
+
+        add(builder().attribute("VAR.INFO.STRING", "s1"));
+        add(builder().attribute("VAR.INFO.STRING", Arrays.asList("s1", "s2")));
+        add(builder().attribute("VAR.INFO.STRING", Arrays.asList("s1", "s2", "s3")));
+        add(builder().attribute("VAR.INFO.STRING", null));
+
+        if ( ENABLE_GENOTYPE_TESTS ) {
+            addGenotypesToTestData();
+            addComplexGenotypesTest();
+        }
+
+        if ( ENABLE_A_AND_G_TESTS )
+            addGenotypesAndGTests();
+
+        if ( ENABLE_SYMBOLIC_ALLELE_TESTS )
+            addSymbolicAlleleTests();
+    }
+
+    private static void addSymbolicAlleleTests() {
+        // two tests to ensure that the end is computed correctly when there's (and not) an END field present
+        add(builder().alleles("N", "<VQSR>").start(10).stop(11).attribute("END", 11));
+        add(builder().alleles("N", "<VQSR>").start(10).stop(10));
+    }
+
+    private static void addGenotypesToTestData() {
+        final ArrayList<VariantContext> sites = new ArrayList<VariantContext>();
+
+        sites.add(builder().alleles("A").make());
+        sites.add(builder().alleles("A", "C", "T").make());
+        sites.add(builder().alleles("A", "AC").make());
+        sites.add(builder().alleles("A", "ACAGT").make());
+
+        for ( VariantContext site : sites ) {
+            addGenotypes(site);
+        }
+    }
+
+    private static void addGenotypeTests( final VariantContext site, Genotype ... genotypes ) {
+        // for each sites VC, we are going to add create two root genotypes.
+        // The first is the primary, and will be added to each new test
+        // The second is variable.  In some tests it's absent (testing 1 genotype), in others it is duplicated
+        // 1 once, 10, 100, or 1000 times to test scaling
+
+        final VariantContextBuilder builder = new VariantContextBuilder(site);
+
+        // add a single context
+        builder.genotypes(genotypes[0]);
+        add(builder);
+
+        if ( genotypes.length > 1 ) {
+            // add all
+            add(builder.genotypes(Arrays.asList(genotypes)));
+
+            // add all with the last replicated 10x and 100x times
+            for ( int nCopiesOfLast : Arrays.asList(10, 100, 1000) ) {
+                final GenotypesContext gc = new GenotypesContext();
+                final Genotype last = genotypes[genotypes.length-1];
+                for ( int i = 0; i < genotypes.length - 1; i++ )
+                    gc.add(genotypes[i]);
+                for ( int i = 0; i < nCopiesOfLast; i++ )
+                    gc.add(new GenotypeBuilder(last).name("copy" + i).make());
+                add(builder.genotypes(gc));
+            }
+        }
+    }
+
+    private static void addGenotypes( final VariantContext site) {
+        // test ref/ref
+        final Allele ref = site.getReference();
+        final Allele alt1 = site.getNAlleles() > 1 ? site.getAlternateAllele(0) : null;
+        final Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(ref, ref));
+        addGenotypeTests(site, homRef);
+
+        if ( alt1 != null ) {
+            final Genotype het = GenotypeBuilder.create("het", Arrays.asList(ref, alt1));
+            final Genotype homVar = GenotypeBuilder.create("homVar", Arrays.asList(alt1, alt1));
+            addGenotypeTests(site, homRef, het);
+            addGenotypeTests(site, homRef, het, homVar);
+
+            // test no GT at all
+            addGenotypeTests(site, new GenotypeBuilder("noGT", new ArrayList<Allele>(0)).attribute("INT1", 10).make());
+
+            final List<Allele> noCall = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
+
+            // ploidy
+            if ( ENABLE_PLOIDY_TESTS ) {
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
+                        GenotypeBuilder.create("hap", Arrays.asList(ref)));
+
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("noCall", noCall),
+                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
+                        GenotypeBuilder.create("hap", Arrays.asList(ref)));
+
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("noCall",  noCall),
+                        GenotypeBuilder.create("noCall2", noCall),
+                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
+                        GenotypeBuilder.create("hap", Arrays.asList(ref)));
+
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
+                        GenotypeBuilder.create("tet", Arrays.asList(ref, alt1, alt1)));
+
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("noCall", noCall),
+                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
+                        GenotypeBuilder.create("tet", Arrays.asList(ref, alt1, alt1)));
+
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("noCall", noCall),
+                        GenotypeBuilder.create("noCall2", noCall),
+                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
+                        GenotypeBuilder.create("tet", Arrays.asList(ref, alt1, alt1)));
+
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("nocall", noCall),
+                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
+                        GenotypeBuilder.create("tet", Arrays.asList(ref, alt1, alt1)));
+            }
+
+
+            //
+            //
+            // TESTING PHASE
+            //
+            //
+            final Genotype gUnphased = new GenotypeBuilder("gUnphased", Arrays.asList(ref, alt1)).make();
+            final Genotype gPhased = new GenotypeBuilder("gPhased", Arrays.asList(ref, alt1)).phased(true).make();
+            final Genotype gPhased2 = new GenotypeBuilder("gPhased2", Arrays.asList(alt1, alt1)).phased(true).make();
+            final Genotype gPhased3 = new GenotypeBuilder("gPhased3", Arrays.asList(ref, ref)).phased(true).make();
+            final Genotype haploidNoPhase = new GenotypeBuilder("haploidNoPhase", Arrays.asList(ref)).make();
+            addGenotypeTests(site, gUnphased, gPhased);
+            addGenotypeTests(site, gUnphased, gPhased2);
+            addGenotypeTests(site, gUnphased, gPhased3);
+            addGenotypeTests(site, gPhased, gPhased2);
+            addGenotypeTests(site, gPhased, gPhased3);
+            addGenotypeTests(site, gPhased2, gPhased3);
+            addGenotypeTests(site, haploidNoPhase, gPhased);
+            addGenotypeTests(site, haploidNoPhase, gPhased2);
+            addGenotypeTests(site, haploidNoPhase, gPhased3);
+            addGenotypeTests(site, haploidNoPhase, gPhased, gPhased2);
+            addGenotypeTests(site, haploidNoPhase, gPhased, gPhased3);
+            addGenotypeTests(site, haploidNoPhase, gPhased2, gPhased3);
+            addGenotypeTests(site, haploidNoPhase, gPhased, gPhased2, gPhased3);
+
+            final Genotype gUnphasedTet = new GenotypeBuilder("gUnphasedTet", Arrays.asList(ref, alt1, ref, alt1)).make();
+            final Genotype gPhasedTet = new GenotypeBuilder("gPhasedTet", Arrays.asList(ref, alt1, alt1, alt1)).phased(true).make();
+            addGenotypeTests(site, gUnphasedTet, gPhasedTet);
+        }
+
+        if ( ENABLE_PL_TESTS ) {
+            if ( site.getNAlleles() == 2 ) {
+                // testing PLs
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{0, -1, -2}),
+                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref), new double[]{0, -2, -3}));
+
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{-1, 0, -2}),
+                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref), new double[]{0, -2, -3}));
+
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{-1, 0, -2}),
+                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref), new double[]{0, -2000, -1000}));
+
+                addGenotypeTests(site, // missing PLs
+                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{-1, 0, -2}),
+                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref)));
+            }
+            else if ( site.getNAlleles() == 3 ) {
+                // testing PLs
+                addGenotypeTests(site,
+                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{0, -1, -2, -3, -4, -5}),
+                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref), new double[]{0, -2, -3, -4, -5, -6}));
+            }
+        }
+
+        // test attributes
+        addGenotypeTests(site,
+                attr("g1", ref, "INT1", 1),
+                attr("g2", ref, "INT1", 2));
+        addGenotypeTests(site,
+                attr("g1", ref, "INT1", 1),
+                attr("g2", ref, "INT1"));
+        addGenotypeTests(site,
+                attr("g1", ref, "INT3", 1, 2, 3),
+                attr("g2", ref, "INT3", 4, 5, 6));
+        addGenotypeTests(site,
+                attr("g1", ref, "INT3", 1, 2, 3),
+                attr("g2", ref, "INT3"));
+
+        addGenotypeTests(site,
+                attr("g1", ref, "INT20", TWENTY_INTS),
+                attr("g2", ref, "INT20", TWENTY_INTS));
+
+
+        if (ENABLE_VARARRAY_TESTS) {
+            addGenotypeTests(site,
+                    attr("g1", ref, "INT.VAR", 1, 2, 3),
+                    attr("g2", ref, "INT.VAR", 4, 5),
+                    attr("g3", ref, "INT.VAR", 6));
+            addGenotypeTests(site,
+                    attr("g1", ref, "INT.VAR", 1, 2, 3),
+                    attr("g2", ref, "INT.VAR"),
+                    attr("g3", ref, "INT.VAR", 5));
+        }
+
+        addGenotypeTests(site,
+                attr("g1", ref, "FLOAT1", 1.0),
+                attr("g2", ref, "FLOAT1", 2.0));
+        addGenotypeTests(site,
+                attr("g1", ref, "FLOAT1", 1.0),
+                attr("g2", ref, "FLOAT1"));
+        addGenotypeTests(site,
+                attr("g1", ref, "FLOAT3", 1.0, 2.0, 3.0),
+                attr("g2", ref, "FLOAT3", 4.0, 5.0, 6.0));
+        addGenotypeTests(site,
+                attr("g1", ref, "FLOAT3", 1.0, 2.0, 3.0),
+                attr("g2", ref, "FLOAT3"));
+
+        if (ENABLE_VARIABLE_LENGTH_GENOTYPE_STRING_TESTS) {
+            //
+            //
+            // TESTING MULTIPLE SIZED LISTS IN THE GENOTYPE FIELD
+            //
+            //
+            addGenotypeTests(site,
+                    attr("g1", ref, "GS", Arrays.asList("S1", "S2")),
+                    attr("g2", ref, "GS", Arrays.asList("S3", "S4")));
+
+            addGenotypeTests(site, // g1 is missing the string, and g2 is missing FLOAT1
+                    attr("g1", ref, "FLOAT1", 1.0),
+                    attr("g2", ref, "GS", Arrays.asList("S3", "S4")));
+
+            // variable sized lists
+            addGenotypeTests(site,
+                    attr("g1", ref, "GV", "S1"),
+                    attr("g2", ref, "GV", Arrays.asList("S3", "S4")));
+
+            addGenotypeTests(site,
+                    attr("g1", ref, "GV", Arrays.asList("S1", "S2")),
+                    attr("g2", ref, "GV", Arrays.asList("S3", "S4", "S5")));
+
+            addGenotypeTests(site, // missing value in varlist of string
+                    attr("g1", ref, "FLOAT1", 1.0),
+                    attr("g2", ref, "GV", Arrays.asList("S3", "S4", "S5")));
+        }
+
+        //
+        //
+        // TESTING GENOTYPE FILTERS
+        //
+        //
+        addGenotypeTests(site,
+                new GenotypeBuilder("g1-x", Arrays.asList(ref, ref)).filters("X").make(),
+                new GenotypeBuilder("g2-x", Arrays.asList(ref, ref)).filters("X").make());
+        addGenotypeTests(site,
+                new GenotypeBuilder("g1-unft", Arrays.asList(ref, ref)).unfiltered().make(),
+                new GenotypeBuilder("g2-x", Arrays.asList(ref, ref)).filters("X").make());
+        addGenotypeTests(site,
+                new GenotypeBuilder("g1-unft", Arrays.asList(ref, ref)).unfiltered().make(),
+                new GenotypeBuilder("g2-xy", Arrays.asList(ref, ref)).filters("X", "Y").make());
+        addGenotypeTests(site,
+                new GenotypeBuilder("g1-unft", Arrays.asList(ref, ref)).unfiltered().make(),
+                new GenotypeBuilder("g2-x", Arrays.asList(ref, ref)).filters("X").make(),
+                new GenotypeBuilder("g3-xy", Arrays.asList(ref, ref)).filters("X", "Y").make());
+    }
+
+    private static void addGenotypesAndGTests() {
+//        for ( final int ploidy : Arrays.asList(2)) {
+        for ( final int ploidy : Arrays.asList(1, 2, 3, 4, 5)) {
+            final List<List<String>> alleleCombinations =
+                    Arrays.asList(
+                            Arrays.asList("A"),
+                            Arrays.asList("A", "C"),
+                            Arrays.asList("A", "C", "G"),
+                            Arrays.asList("A", "C", "G", "T"));
+
+            for ( final List<String> alleles : alleleCombinations ) {
+                final VariantContextBuilder vcb = builder().alleles(alleles);
+                final VariantContext site = vcb.make();
+                final int nAlleles = site.getNAlleles();
+                final Allele ref = site.getReference();
+
+                // base genotype is ref/.../ref up to ploidy
+                final List<Allele> baseGenotype = new ArrayList<Allele>(ploidy);
+                for ( int i = 0; i < ploidy; i++) baseGenotype.add(ref);
+                final int nPLs = GenotypeLikelihoods.numLikelihoods(nAlleles, ploidy);
+
+                // ada is 0, 1, ..., nAlleles - 1
+                final List<Integer> ada = new ArrayList<Integer>(nAlleles);
+                for ( int i = 0; i < nAlleles - 1; i++ ) ada.add(i);
+
+                // pl is 0, 1, ..., up to nPLs (complex calc of nAlleles and ploidy)
+                final int[] pl = new int[nPLs];
+                for ( int i = 0; i < pl.length; i++ ) pl[i] = i;
+
+                final GenotypeBuilder gb = new GenotypeBuilder("ADA_PL_SAMPLE");
+                gb.alleles(baseGenotype);
+                gb.PL(pl);
+                gb.attribute("ADA", nAlleles == 2 ? ada.get(0) : ada);
+                vcb.genotypes(gb.make());
+
+                add(vcb);
+            }
+        }
+    }
+
+    private static Genotype attr(final String name, final Allele ref, final String key, final Object ... value) {
+        if ( value.length == 0 )
+            return GenotypeBuilder.create(name, Arrays.asList(ref, ref));
+        else {
+            final Object toAdd = value.length == 1 ? value[0] : Arrays.asList(value);
+            return new GenotypeBuilder(name, Arrays.asList(ref, ref)).attribute(key, toAdd).make();
+        }
+    }
+
+    public static List<VariantContextTestData> generateSiteTests() {
+        return TEST_DATAs;
+    }
+
+    public static void testReaderWriterWithMissingGenotypes(final VariantContextIOTest tester, final VariantContextTestData data) throws IOException {
+        final int nSamples = data.header.getNGenotypeSamples();
+        if ( nSamples > 2 ) {
+            for ( final VariantContext vc : data.vcs )
+                if ( vc.isSymbolic() )
+                    // cannot handle symbolic alleles because they may be weird non-call VCFs
+                    return;
+
+            final File tmpFile = File.createTempFile("testReaderWriter", tester.getExtension());
+            tmpFile.deleteOnExit();
+            Tribble.indexFile(tmpFile).deleteOnExit();
+
+            // write expected to disk
+            final EnumSet<Options> options = EnumSet.of(Options.INDEX_ON_THE_FLY);
+            final VariantContextWriter writer = tester.makeWriter(tmpFile, options);
+
+            final Set<String> samplesInVCF = new HashSet<String>(data.header.getGenotypeSamples());
+            final List<String> missingSamples = Arrays.asList("MISSING1", "MISSING2");
+            final List<String> allSamples = new ArrayList<String>(missingSamples);
+            allSamples.addAll(samplesInVCF);
+
+            final VCFHeader header = new VCFHeader(data.header.getMetaDataInInputOrder(), allSamples);
+            writeVCsToFile(writer, header, data.vcs);
+
+            // ensure writing of expected == actual
+            final VariantContextContainer p = tester.readAllVCs(tmpFile);
+            final Iterable<VariantContext> actual = p.getVCs();
+
+            int i = 0;
+            for ( final VariantContext readVC : actual ) {
+                if ( readVC == null ) continue; // sometimes we read null records...
+                final VariantContext expected = data.vcs.get(i++);
+                for ( final Genotype g : readVC.getGenotypes() ) {
+                    Assert.assertTrue(allSamples.contains(g.getSampleName()));
+                    if ( samplesInVCF.contains(g.getSampleName()) ) {
+                        assertEquals(g, expected.getGenotype(g.getSampleName()));
+                    } else {
+                        // missing
+                        Assert.assertTrue(g.isNoCall());
+                    }
+                }
+            }
+
+        }
+    }
+
+    public static void testReaderWriter(final VariantContextIOTest tester, final VariantContextTestData data) throws IOException {
+        testReaderWriter(tester, data.header, data.vcs, data.vcs, true);
+    }
+
+    public static void testReaderWriter(final VariantContextIOTest tester,
+                                        final VCFHeader header,
+                                        final List<VariantContext> expected,
+                                        final Iterable<VariantContext> vcs,
+                                        final boolean recurse) throws IOException {
+        final File tmpFile = File.createTempFile("testReaderWriter", tester.getExtension());
+        tmpFile.deleteOnExit();
+        Tribble.indexFile(tmpFile).deleteOnExit();
+
+        // write expected to disk
+        final EnumSet<Options> options = EnumSet.of(Options.INDEX_ON_THE_FLY);
+        final VariantContextWriter writer = tester.makeWriter(tmpFile, options);
+        writeVCsToFile(writer, header, vcs);
+
+        // ensure writing of expected == actual
+        final VariantContextContainer p = tester.readAllVCs(tmpFile);
+        final Iterable<VariantContext> actual = p.getVCs();
+        assertEquals(actual, expected);
+
+        if ( recurse ) {
+            // if we are doing a recursive test, grab a fresh iterator over the written values
+            final Iterable<VariantContext> read = tester.readAllVCs(tmpFile).getVCs();
+            testReaderWriter(tester, p.getHeader(), expected, read, false);
+        }
+    }
+
+    private static void writeVCsToFile(final VariantContextWriter writer, final VCFHeader header, final Iterable<VariantContext> vcs) {
+        // write
+        writer.writeHeader(header);
+        for ( VariantContext vc : vcs )
+            if (vc != null)
+                writer.add(vc);
+        writer.close();
+    }
+
+    public static abstract class VCIterable<SOURCE> implements Iterable<VariantContext>, Iterator<VariantContext> {
+        final FeatureCodec<VariantContext, SOURCE> codec;
+        final VCFHeader header;
+
+        public VCIterable(final FeatureCodec<VariantContext, SOURCE> codec, final VCFHeader header) {
+            this.codec = codec;
+            this.header = header;
+        }
+
+        @Override
+        public Iterator<VariantContext> iterator() {
+            return this;
+        }
+
+        @Override
+        public abstract boolean hasNext();
+
+        public abstract SOURCE nextSource();
+        
+        @Override
+        public VariantContext next() {
+            try {
+                final VariantContext vc = codec.decode(nextSource());
+                return vc == null ? null : vc.fullyDecode(header, false);
+            } catch ( IOException e ) {
+                throw new RuntimeException(e);
+            }
+        }
+
+        @Override
+        public void remove() { }
+    }
+
+    public static VariantContextContainer readAllVCs(final File input, final BCF2Codec codec) throws IOException {
+        PositionalBufferedStream headerPbs = new PositionalBufferedStream(new FileInputStream(input));
+        FeatureCodecHeader header = codec.readHeader(headerPbs);
+        headerPbs.close();
+
+        final PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(input));
+        pbs.skip(header.getHeaderEnd());
+
+        final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue();
+        return new VariantContextTestProvider.VariantContextContainer(vcfHeader, new VariantContextTestProvider.VCIterable(codec, vcfHeader) {
+            @Override
+            public boolean hasNext() {
+                try {
+                    return !pbs.isDone();
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+
+            @Override
+            public Object nextSource() {
+                return pbs;
+            }
+        });
+    }
+
+    public static VariantContextContainer readAllVCs(final File input, final VCFCodec codec) throws FileNotFoundException {
+        final LineIterator lineIterator = new LineIteratorImpl(new SynchronousLineReader(new BufferedInputStream(new FileInputStream(input))));
+        final VCFHeader vcfHeader = (VCFHeader) codec.readActualHeader(lineIterator);
+        return new VariantContextTestProvider.VariantContextContainer(vcfHeader, new VariantContextTestProvider.VCIterable<LineIterator>(codec, vcfHeader) {
+            @Override
+            public boolean hasNext() {
+                return lineIterator.hasNext();
+            }
+
+            @Override
+            public LineIterator nextSource() {
+                return lineIterator;
+            }
+        });
+    }
+    
+    public static void assertVCFandBCFFilesAreTheSame(final File vcfFile, final File bcfFile) throws IOException {
+        final VariantContextContainer vcfData = readAllVCs(vcfFile, new VCFCodec());
+        final VariantContextContainer bcfData = readAllVCs(bcfFile, new BCF2Codec());
+        assertEquals(bcfData.getHeader(), vcfData.getHeader());
+        assertEquals(bcfData.getVCs(), vcfData.getVCs());
+    }
+
+    public static void assertEquals(final Iterable<VariantContext> actual, final Iterable<VariantContext> expected) {
+        final Iterator<VariantContext> actualIT = actual.iterator();
+        final Iterator<VariantContext> expectedIT = expected.iterator();
+
+        while ( expectedIT.hasNext() ) {
+            final VariantContext expectedVC = expectedIT.next();
+            if ( expectedVC == null )
+                continue;
+
+            VariantContext actualVC;
+            do {
+                Assert.assertTrue(actualIT.hasNext(), "Too few records found in actual");
+                actualVC = actualIT.next();
+            } while ( actualIT.hasNext() && actualVC == null );
+
+            if ( actualVC == null )
+                Assert.fail("Too few records in actual");
+
+            assertEquals(actualVC, expectedVC);
+        }
+        Assert.assertTrue(! actualIT.hasNext(), "Too many records found in actual");
+    }
+
+    /**
+     * Assert that two variant contexts are actually equal
+     * @param actual
+     * @param expected
+     */
+    public static void assertEquals( final VariantContext actual, final VariantContext expected ) {
+        Assert.assertNotNull(actual, "VariantContext expected not null");
+        Assert.assertEquals(actual.getChr(), expected.getChr(), "chr");
+        Assert.assertEquals(actual.getStart(), expected.getStart(), "start");
+        Assert.assertEquals(actual.getEnd(), expected.getEnd(), "end");
+        Assert.assertEquals(actual.getID(), expected.getID(), "id");
+        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "alleles for " + expected + " vs " + actual);
+
+        assertAttributesEquals(actual.getAttributes(), expected.getAttributes());
+        Assert.assertEquals(actual.filtersWereApplied(), expected.filtersWereApplied(), "filtersWereApplied");
+        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "isFiltered");
+        VariantBaseTest.assertEqualsSet(actual.getFilters(), expected.getFilters(), "filters");
+        VariantBaseTest.assertEqualsDoubleSmart(actual.getPhredScaledQual(), expected.getPhredScaledQual());
+
+        Assert.assertEquals(actual.hasGenotypes(), expected.hasGenotypes(), "hasGenotypes");
+        if ( expected.hasGenotypes() ) {
+            VariantBaseTest.assertEqualsSet(actual.getSampleNames(), expected.getSampleNames(), "sample names set");
+            Assert.assertEquals(actual.getSampleNamesOrderedByName(), expected.getSampleNamesOrderedByName(), "sample names");
+            final Set<String> samples = expected.getSampleNames();
+            for ( final String sample : samples ) {
+                assertEquals(actual.getGenotype(sample), expected.getGenotype(sample));
+            }
+        }
+    }
+
+    public static void assertEquals(final Genotype actual, final Genotype expected) {
+        Assert.assertEquals(actual.getSampleName(), expected.getSampleName(), "Genotype names");
+        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "Genotype alleles");
+        Assert.assertEquals(actual.getGenotypeString(), expected.getGenotypeString(), "Genotype string");
+        Assert.assertEquals(actual.getType(), expected.getType(), "Genotype type");
+
+        // filters are the same
+        Assert.assertEquals(actual.getFilters(), expected.getFilters(), "Genotype fields");
+        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "Genotype isFiltered");
+
+        // inline attributes
+        Assert.assertEquals(actual.getDP(), expected.getDP(), "Genotype dp");
+        Assert.assertTrue(Arrays.equals(actual.getAD(), expected.getAD()));
+        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype gq");
+        Assert.assertEquals(actual.hasPL(), expected.hasPL(), "Genotype hasPL");
+        Assert.assertEquals(actual.hasAD(), expected.hasAD(), "Genotype hasAD");
+        Assert.assertEquals(actual.hasGQ(), expected.hasGQ(), "Genotype hasGQ");
+        Assert.assertEquals(actual.hasDP(), expected.hasDP(), "Genotype hasDP");
+
+        Assert.assertEquals(actual.hasLikelihoods(), expected.hasLikelihoods(), "Genotype haslikelihoods");
+        Assert.assertEquals(actual.getLikelihoodsString(), expected.getLikelihoodsString(), "Genotype getlikelihoodsString");
+        Assert.assertEquals(actual.getLikelihoods(), expected.getLikelihoods(), "Genotype getLikelihoods");
+        Assert.assertTrue(Arrays.equals(actual.getPL(), expected.getPL()));
+
+        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype phredScaledQual");
+        assertAttributesEquals(actual.getExtendedAttributes(), expected.getExtendedAttributes());
+        Assert.assertEquals(actual.isPhased(), expected.isPhased(), "Genotype isPhased");
+        Assert.assertEquals(actual.getPloidy(), expected.getPloidy(), "Genotype getPloidy");
+    }
+
+    private static void assertAttributesEquals(final Map<String, Object> actual, Map<String, Object> expected) {
+        final Set<String> expectedKeys = new HashSet<String>(expected.keySet());
+
+        for ( final Map.Entry<String, Object> act : actual.entrySet() ) {
+            final Object actualValue = act.getValue();
+            if ( expected.containsKey(act.getKey()) && expected.get(act.getKey()) != null ) {
+                final Object expectedValue = expected.get(act.getKey());
+                if ( expectedValue instanceof List ) {
+                    final List<Object> expectedList = (List<Object>)expectedValue;
+                    Assert.assertTrue(actualValue instanceof List, act.getKey() + " should be a list but isn't");
+                    final List<Object> actualList = (List<Object>)actualValue;
+                    Assert.assertEquals(actualList.size(), expectedList.size(), act.getKey() + " size");
+                    for ( int i = 0; i < expectedList.size(); i++ )
+                        assertAttributeEquals(act.getKey(), actualList.get(i), expectedList.get(i));
+                } else
+                    assertAttributeEquals(act.getKey(), actualValue, expectedValue);
+            } else {
+                // it's ok to have a binding in x -> null that's absent in y
+                Assert.assertNull(actualValue, act.getKey() + " present in one but not in the other");
+            }
+            expectedKeys.remove(act.getKey());
+        }
+
+        // now expectedKeys contains only the keys found in expected but not in actual,
+        // and they must all be null
+        for ( final String missingExpected : expectedKeys ) {
+            final Object value = expected.get(missingExpected);
+            Assert.assertTrue(isMissing(value), "Attribute " + missingExpected + " missing in one but not in other" );
+        }
+    }
+
+    private static final boolean isMissing(final Object value) {
+        if ( value == null ) return true;
+        else if ( value.equals(VCFConstants.MISSING_VALUE_v4) ) return true;
+        else if ( value instanceof List ) {
+            // handles the case where all elements are null or the list is empty
+            for ( final Object elt : (List)value)
+                if ( elt != null )
+                    return false;
+            return true;
+        } else
+            return false;
+    }
+
+    private static void assertAttributeEquals(final String key, final Object actual, final Object expected) {
+        if ( expected instanceof Double ) {
+            // must be very tolerant because doubles are being rounded to 2 sig figs
+            VariantBaseTest.assertEqualsDoubleSmart(actual, (Double)expected, 1e-2);
+        } else
+            Assert.assertEquals(actual, expected, "Attribute " + key);
+    }
+
+    public static void addComplexGenotypesTest() {
+        final List<Allele> allAlleles = Arrays.asList(
+                Allele.create("A", true),
+                Allele.create("C", false),
+                Allele.create("G", false));
+
+        for ( int nAlleles : Arrays.asList(2, 3) ) {
+            for ( int highestPloidy : Arrays.asList(1, 2, 3) ) {
+                // site alleles
+                final List<Allele> siteAlleles = allAlleles.subList(0, nAlleles);
+
+                // possible alleles for genotypes
+                final List<Allele> possibleGenotypeAlleles = new ArrayList<Allele>(siteAlleles);
+                possibleGenotypeAlleles.add(Allele.NO_CALL);
+
+                // there are n^ploidy possible genotypes
+                final List<List<Allele>> possibleGenotypes = makeAllGenotypes(possibleGenotypeAlleles, highestPloidy);
+                final int nPossibleGenotypes = possibleGenotypes.size();
+
+                VariantContextBuilder vb = new VariantContextBuilder("unittest", "1", 1, 1, siteAlleles);
+
+                // first test -- create n copies of each genotype
+                for ( int i = 0; i < nPossibleGenotypes; i++ ) {
+                    final List<Genotype> samples = new ArrayList<Genotype>(3);
+                    samples.add(GenotypeBuilder.create("sample" + i, possibleGenotypes.get(i)));
+                    add(vb.genotypes(samples));
+                }
+
+                // second test -- create one sample with each genotype
+                {
+                    final List<Genotype> samples = new ArrayList<Genotype>(nPossibleGenotypes);
+                    for ( int i = 0; i < nPossibleGenotypes; i++ ) {
+                        samples.add(GenotypeBuilder.create("sample" + i, possibleGenotypes.get(i)));
+                    }
+                    add(vb.genotypes(samples));
+                }
+
+                // test mixed ploidy
+                for ( int i = 0; i < nPossibleGenotypes; i++ ) {
+                    for ( int ploidy = 1; ploidy < highestPloidy; ploidy++ ) {
+                        final List<Genotype> samples = new ArrayList<Genotype>(highestPloidy);
+                        final List<Allele> genotype = possibleGenotypes.get(i).subList(0, ploidy);
+                        samples.add(GenotypeBuilder.create("sample" + i, genotype));
+                        add(vb.genotypes(samples));
+                    }
+                }
+            }
+        }
+    }
+
+    private static List<List<Allele>> makeAllGenotypes(final List<Allele> alleles, final int highestPloidy) {
+        return GeneralUtils.makePermutations(alleles, highestPloidy, true);
+    }
+
+    public static void assertEquals(final VCFHeader actual, final VCFHeader expected) {
+        Assert.assertEquals(actual.getMetaDataInSortedOrder().size(), expected.getMetaDataInSortedOrder().size(), "No VCF header lines");
+
+        // for some reason set.equals() is returning false but all paired elements are .equals().  Perhaps compare to is busted?
+        //Assert.assertEquals(actual.getMetaDataInInputOrder(), expected.getMetaDataInInputOrder());
+        final List<VCFHeaderLine> actualLines = new ArrayList<VCFHeaderLine>(actual.getMetaDataInSortedOrder());
+        final List<VCFHeaderLine> expectedLines = new ArrayList<VCFHeaderLine>(expected.getMetaDataInSortedOrder());
+        for ( int i = 0; i < actualLines.size(); i++ ) {
+            Assert.assertEquals(actualLines.get(i), expectedLines.get(i), "VCF header lines");
+        }
+    }
+
+    public static void main( String argv[] ) {
+        final File variants1 = new File(argv[0]);
+        final File variants2 = new File(argv[1]);
+        try {
+            VariantContextTestProvider.assertVCFandBCFFilesAreTheSame(variants1, variants2);
+        } catch ( IOException e ) {
+            throw new RuntimeException(e);
+        }
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java
new file mode 100644
index 0000000..be55f8f
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java
@@ -0,0 +1,1450 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext;
+
+
+// the imports for unit testing.
+
+import htsjdk.samtools.util.TestUtil;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.FeatureCodec;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.bcf2.BCF2Codec;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.tribble.TribbleException;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.vcf.VCFConstants;
+import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+import java.io.File;
+import java.util.*;
+
+
+public class VariantContextUnitTest extends VariantBaseTest {
+    Allele A, Aref, C, T, Tref;
+    Allele del, delRef, ATC, ATCref;
+
+    // A [ref] / T at 10
+    String snpLoc = "chr1";
+    int snpLocStart = 10;
+    int snpLocStop = 10;
+
+    // - / ATC [ref] from 20-22
+    String delLoc = "chr1";
+    int delLocStart = 20;
+    int delLocStop = 22;
+
+    // - [ref] / ATC from 20-20
+    String insLoc = "chr1";
+    int insLocStart = 20;
+    int insLocStop = 20;
+
+    VariantContextBuilder basicBuilder, snpBuilder, insBuilder;
+
+    @BeforeSuite
+    public void before() {
+        del = Allele.create("A");
+        delRef = Allele.create("A", true);
+
+        A = Allele.create("A");
+        C = Allele.create("C");
+        Aref = Allele.create("A", true);
+        T = Allele.create("T");
+        Tref = Allele.create("T", true);
+
+        ATC = Allele.create("ATC");
+        ATCref = Allele.create("ATC", true);
+    }
+
+    @BeforeMethod
+    public void beforeTest() {
+        basicBuilder = new VariantContextBuilder("test", snpLoc,snpLocStart, snpLocStop, Arrays.asList(Aref, T));
+        snpBuilder = new VariantContextBuilder("test", snpLoc,snpLocStart, snpLocStop, Arrays.asList(Aref, T));
+        insBuilder = new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(delRef, ATC));
+    }
+
+    @Test
+    public void testDetermineTypes() {
+        Allele ACref = Allele.create("AC", true);
+        Allele AC = Allele.create("AC");
+        Allele AT = Allele.create("AT");
+        Allele C = Allele.create("C");
+        Allele CAT = Allele.create("CAT");
+        Allele TAref = Allele.create("TA", true);
+        Allele TA = Allele.create("TA");
+        Allele TC = Allele.create("TC");
+        Allele symbolic = Allele.create("<FOO>");
+
+        // test REF
+        List<Allele> alleles = Arrays.asList(Tref);
+        VariantContext vc = snpBuilder.alleles(alleles).stop(snpLocStop).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.NO_VARIATION);
+
+        // test SNPs
+        alleles = Arrays.asList(Tref, A);
+        vc = snpBuilder.alleles(alleles).stop(snpLocStop).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.SNP);
+
+        alleles = Arrays.asList(Tref, A, C);
+        vc = snpBuilder.alleles(alleles).stop(snpLocStop).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.SNP);
+
+        // test MNPs
+        alleles = Arrays.asList(ACref, TA);
+        vc = snpBuilder.alleles(alleles).stop(snpLocStop+1).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.MNP);
+
+        alleles = Arrays.asList(ATCref, CAT, Allele.create("GGG"));
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop+2).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.MNP);
+
+        // test INDELs
+        alleles = Arrays.asList(Aref, ATC);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+
+        alleles = Arrays.asList(ATCref, A);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop+2).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+
+        alleles = Arrays.asList(Tref, TA, TC);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+
+        alleles = Arrays.asList(ATCref, A, AC);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop+2).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+
+        alleles = Arrays.asList(ATCref, A, Allele.create("ATCTC"));
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop+2).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+
+        // test MIXED
+        alleles = Arrays.asList(TAref, T, TC);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop+1).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.MIXED);
+
+        alleles = Arrays.asList(TAref, T, AC);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop+1).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.MIXED);
+
+        alleles = Arrays.asList(ACref, ATC, AT);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop+1).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.MIXED);
+
+        alleles = Arrays.asList(Aref, T, symbolic);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.MIXED);
+
+        // test SYMBOLIC
+        alleles = Arrays.asList(Tref, symbolic);
+        vc = basicBuilder.alleles(alleles).stop(snpLocStop).make();
+        Assert.assertEquals(vc.getType(), VariantContext.Type.SYMBOLIC);
+    }
+
+    @Test
+    public void testMultipleSNPAlleleOrdering() {
+        final List<Allele> allelesNaturalOrder = Arrays.asList(Aref, C, T);
+        final List<Allele> allelesUnnaturalOrder = Arrays.asList(Aref, T, C);
+        VariantContext naturalVC = snpBuilder.alleles(allelesNaturalOrder).make();
+        VariantContext unnaturalVC = snpBuilder.alleles(allelesUnnaturalOrder).make();
+        Assert.assertEquals(new ArrayList<Allele>(naturalVC.getAlleles()), allelesNaturalOrder);
+        Assert.assertEquals(new ArrayList<Allele>(unnaturalVC.getAlleles()), allelesUnnaturalOrder);
+    }
+
+    @Test
+    public void testCreatingSNPVariantContext() {
+
+        List<Allele> alleles = Arrays.asList(Aref, T);
+        VariantContext vc = snpBuilder.alleles(alleles).make();
+
+        Assert.assertEquals(vc.getChr(), snpLoc);
+        Assert.assertEquals(vc.getStart(), snpLocStart);
+        Assert.assertEquals(vc.getEnd(), snpLocStop);
+        Assert.assertEquals(vc.getType(), VariantContext.Type.SNP);
+        Assert.assertTrue(vc.isSNP());
+        Assert.assertFalse(vc.isIndel());
+        Assert.assertFalse(vc.isSimpleInsertion());
+        Assert.assertFalse(vc.isSimpleDeletion());
+        Assert.assertFalse(vc.isSimpleIndel());
+        Assert.assertFalse(vc.isMixed());
+        Assert.assertTrue(vc.isBiallelic());
+        Assert.assertEquals(vc.getNAlleles(), 2);
+
+        Assert.assertEquals(vc.getReference(), Aref);
+        Assert.assertEquals(vc.getAlleles().size(), 2);
+        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
+        Assert.assertEquals(vc.getAlternateAllele(0), T);
+
+        Assert.assertFalse(vc.hasGenotypes());
+
+        Assert.assertEquals(vc.getSampleNames().size(), 0);
+    }
+
+    @Test
+    public void testCreatingRefVariantContext() {
+        List<Allele> alleles = Arrays.asList(Aref);
+        VariantContext vc = snpBuilder.alleles(alleles).make();
+
+        Assert.assertEquals(vc.getChr(), snpLoc);
+        Assert.assertEquals(vc.getStart(), snpLocStart);
+        Assert.assertEquals(vc.getEnd(), snpLocStop);
+        Assert.assertEquals(VariantContext.Type.NO_VARIATION, vc.getType());
+        Assert.assertFalse(vc.isSNP());
+        Assert.assertFalse(vc.isIndel());
+        Assert.assertFalse(vc.isSimpleInsertion());
+        Assert.assertFalse(vc.isSimpleDeletion());
+        Assert.assertFalse(vc.isSimpleIndel());
+        Assert.assertFalse(vc.isMixed());
+        Assert.assertFalse(vc.isBiallelic());
+        Assert.assertEquals(vc.getNAlleles(), 1);
+
+        Assert.assertEquals(vc.getReference(), Aref);
+        Assert.assertEquals(vc.getAlleles().size(), 1);
+        Assert.assertEquals(vc.getAlternateAlleles().size(), 0);
+        //Assert.assertEquals(vc.getAlternateAllele(0), T);
+
+        Assert.assertFalse(vc.hasGenotypes());
+        Assert.assertEquals(vc.getSampleNames().size(), 0);
+    }
+
+    @Test
+    public void testCreatingDeletionVariantContext() {
+        List<Allele> alleles = Arrays.asList(ATCref, del);
+        VariantContext vc = new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, alleles).make();
+
+        Assert.assertEquals(vc.getChr(), delLoc);
+        Assert.assertEquals(vc.getStart(), delLocStart);
+        Assert.assertEquals(vc.getEnd(), delLocStop);
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+        Assert.assertFalse(vc.isSNP());
+        Assert.assertTrue(vc.isIndel());
+        Assert.assertFalse(vc.isSimpleInsertion());
+        Assert.assertTrue(vc.isSimpleDeletion());
+        Assert.assertTrue(vc.isSimpleIndel());
+        Assert.assertFalse(vc.isMixed());
+        Assert.assertTrue(vc.isBiallelic());
+        Assert.assertEquals(vc.getNAlleles(), 2);
+
+        Assert.assertEquals(vc.getReference(), ATCref);
+        Assert.assertEquals(vc.getAlleles().size(), 2);
+        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
+        Assert.assertEquals(vc.getAlternateAllele(0), del);
+
+        Assert.assertFalse(vc.hasGenotypes());
+
+        Assert.assertEquals(vc.getSampleNames().size(), 0);
+    }
+
+    @Test
+    public void testCreatingComplexSubstitutionVariantContext() {
+        List<Allele> alleles = Arrays.asList(Tref, ATC);
+        VariantContext vc = new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, alleles).make();
+
+        Assert.assertEquals(vc.getChr(), insLoc);
+        Assert.assertEquals(vc.getStart(), insLocStart);
+        Assert.assertEquals(vc.getEnd(), insLocStop);
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+        Assert.assertFalse(vc.isSNP());
+        Assert.assertTrue(vc.isIndel());
+        Assert.assertFalse(vc.isSimpleInsertion());
+        Assert.assertFalse(vc.isSimpleDeletion());
+        Assert.assertFalse(vc.isSimpleIndel());
+        Assert.assertFalse(vc.isMixed());
+        Assert.assertTrue(vc.isBiallelic());
+        Assert.assertEquals(vc.getNAlleles(), 2);
+
+        Assert.assertEquals(vc.getReference(), Tref);
+        Assert.assertEquals(vc.getAlleles().size(), 2);
+        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
+        Assert.assertEquals(vc.getAlternateAllele(0), ATC);
+
+        Assert.assertFalse(vc.hasGenotypes());
+
+        Assert.assertEquals(vc.getSampleNames().size(), 0);
+    }
+
+    @Test
+    public void testMatchingAlleles() {
+        List<Allele> alleles = Arrays.asList(ATCref, del);
+        VariantContext vc = new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, alleles).make();
+        VariantContext vc2 = new VariantContextBuilder("test2", delLoc, delLocStart+12, delLocStop+12, alleles).make();
+
+        Assert.assertTrue(vc.hasSameAllelesAs(vc2));
+        Assert.assertTrue(vc.hasSameAlternateAllelesAs(vc2));
+    }
+
+    @Test
+    public void testCreatingInsertionVariantContext() {
+        List<Allele> alleles = Arrays.asList(delRef, ATC);
+        VariantContext vc = insBuilder.alleles(alleles).make();
+
+        Assert.assertEquals(vc.getChr(), insLoc);
+        Assert.assertEquals(vc.getStart(), insLocStart);
+        Assert.assertEquals(vc.getEnd(), insLocStop);
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+        Assert.assertFalse(vc.isSNP());
+        Assert.assertTrue(vc.isIndel());
+        Assert.assertTrue(vc.isSimpleInsertion());
+        Assert.assertFalse(vc.isSimpleDeletion());
+        Assert.assertTrue(vc.isSimpleIndel());
+        Assert.assertFalse(vc.isMixed());
+        Assert.assertTrue(vc.isBiallelic());
+        Assert.assertEquals(vc.getNAlleles(), 2);
+
+        Assert.assertEquals(vc.getReference(), delRef);
+        Assert.assertEquals(vc.getAlleles().size(), 2);
+        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
+        Assert.assertEquals(vc.getAlternateAllele(0), ATC);
+        Assert.assertFalse(vc.hasGenotypes());
+
+        Assert.assertEquals(vc.getSampleNames().size(), 0);
+    }
+
+    @Test
+    public void testCreatingPartiallyCalledGenotype() {
+        List<Allele> alleles = Arrays.asList(Aref, C);
+        Genotype g = GenotypeBuilder.create("foo", Arrays.asList(C, Allele.NO_CALL));
+        VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g).make();
+
+        Assert.assertTrue(vc.isSNP());
+        Assert.assertEquals(vc.getNAlleles(), 2);
+        Assert.assertTrue(vc.hasGenotypes());
+        Assert.assertFalse(vc.isMonomorphicInSamples());
+        Assert.assertTrue(vc.isPolymorphicInSamples());
+        Assert.assertEquals(vc.getGenotype("foo"), g);
+        Assert.assertEquals(vc.getCalledChrCount(), 1); // we only have 1 called chromosomes, we exclude the NO_CALL one isn't called
+        Assert.assertEquals(vc.getCalledChrCount(Aref), 0);
+        Assert.assertEquals(vc.getCalledChrCount(C), 1);
+        Assert.assertFalse(vc.getGenotype("foo").isHet());
+        Assert.assertFalse(vc.getGenotype("foo").isHom());
+        Assert.assertFalse(vc.getGenotype("foo").isNoCall());
+        Assert.assertFalse(vc.getGenotype("foo").isHom());
+        Assert.assertTrue(vc.getGenotype("foo").isMixed());
+        Assert.assertEquals(vc.getGenotype("foo").getType(), GenotypeType.MIXED);
+    }
+
+    @Test (expectedExceptions = Exception.class)
+    public void testBadConstructorArgs1() {
+        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(delRef, ATCref)).make();
+    }
+
+    @Test (expectedExceptions = Exception.class)
+    public void testBadConstructorArgs2() {
+        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(delRef, del)).make();
+    }
+
+    @Test (expectedExceptions = Exception.class)
+    public void testBadConstructorArgs3() {
+        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(del)).make();
+    }
+
+    @Test (expectedExceptions = Throwable.class)
+    public void testBadConstructorArgs4() {
+        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Collections.<Allele>emptyList()).make();
+    }
+
+    @Test (expectedExceptions = Exception.class)
+    public void testBadConstructorArgsDuplicateAlleles1() {
+        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(Aref, T, T)).make();
+    }
+
+    @Test (expectedExceptions = Exception.class)
+    public void testBadConstructorArgsDuplicateAlleles2() {
+        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(Aref, A)).make();
+    }
+
+    @Test (expectedExceptions = Throwable.class)
+    public void testBadLoc1() {
+        List<Allele> alleles = Arrays.asList(Aref, T, del);
+        new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, alleles).make();
+    }
+
+    @Test (expectedExceptions = Throwable.class)
+    public void testBadID1() {
+        new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, Arrays.asList(Aref, T)).id(null).make();
+    }
+
+    @Test (expectedExceptions = Exception.class)
+    public void testBadID2() {
+        new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, Arrays.asList(Aref, T)).id("").make();
+    }
+
+    @Test (expectedExceptions = Throwable.class)
+    public void testBadPError() {
+        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(delRef, ATCref)).log10PError(0.5).make();
+    }
+
+    @Test
+    public void testAccessingSimpleSNPGenotypes() {
+        List<Allele> alleles = Arrays.asList(Aref, T);
+
+        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
+        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
+
+        VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles)
+                .genotypes(g1, g2, g3).make();
+
+        Assert.assertTrue(vc.hasGenotypes());
+        Assert.assertFalse(vc.isMonomorphicInSamples());
+        Assert.assertTrue(vc.isPolymorphicInSamples());
+        Assert.assertEquals(vc.getSampleNames().size(), 3);
+
+        Assert.assertEquals(vc.getGenotypes().size(), 3);
+        Assert.assertEquals(vc.getGenotypes().get("AA"), g1);
+        Assert.assertEquals(vc.getGenotype("AA"), g1);
+        Assert.assertEquals(vc.getGenotypes().get("AT"), g2);
+        Assert.assertEquals(vc.getGenotype("AT"), g2);
+        Assert.assertEquals(vc.getGenotypes().get("TT"), g3);
+        Assert.assertEquals(vc.getGenotype("TT"), g3);
+
+        Assert.assertTrue(vc.hasGenotype("AA"));
+        Assert.assertTrue(vc.hasGenotype("AT"));
+        Assert.assertTrue(vc.hasGenotype("TT"));
+        Assert.assertFalse(vc.hasGenotype("foo"));
+        Assert.assertFalse(vc.hasGenotype("TTT"));
+        Assert.assertFalse(vc.hasGenotype("at"));
+        Assert.assertFalse(vc.hasGenotype("tt"));
+
+        Assert.assertEquals(vc.getCalledChrCount(), 6);
+        Assert.assertEquals(vc.getCalledChrCount(Aref), 3);
+        Assert.assertEquals(vc.getCalledChrCount(T), 3);
+    }
+
+    @Test
+    public void testAccessingCompleteGenotypes() {
+        List<Allele> alleles = Arrays.asList(Aref, T, ATC);
+
+        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
+        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
+        Genotype g4 = GenotypeBuilder.create("Td", Arrays.asList(T, ATC));
+        Genotype g5 = GenotypeBuilder.create("dd", Arrays.asList(ATC, ATC));
+        Genotype g6 = GenotypeBuilder.create("..", Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
+
+        VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles)
+                .genotypes(g1, g2, g3, g4, g5, g6).make();
+
+        Assert.assertTrue(vc.hasGenotypes());
+        Assert.assertFalse(vc.isMonomorphicInSamples());
+        Assert.assertTrue(vc.isPolymorphicInSamples());
+        Assert.assertEquals(vc.getGenotypes().size(), 6);
+
+        Assert.assertEquals(3, vc.getGenotypes(Arrays.asList("AA", "Td", "dd")).size());
+
+        Assert.assertEquals(10, vc.getCalledChrCount());
+        Assert.assertEquals(3, vc.getCalledChrCount(Aref));
+        Assert.assertEquals(4, vc.getCalledChrCount(T));
+        Assert.assertEquals(3, vc.getCalledChrCount(ATC));
+        Assert.assertEquals(2, vc.getCalledChrCount(Allele.NO_CALL));
+    }
+
+    @Test
+    public void testAccessingRefGenotypes() {
+        List<Allele> alleles1 = Arrays.asList(Aref, T);
+        List<Allele> alleles2 = Arrays.asList(Aref);
+        List<Allele> alleles3 = Arrays.asList(Aref, T);
+        for ( List<Allele> alleles : Arrays.asList(alleles1, alleles2, alleles3)) {
+            Genotype g1 = GenotypeBuilder.create("AA1", Arrays.asList(Aref, Aref));
+            Genotype g2 = GenotypeBuilder.create("AA2", Arrays.asList(Aref, Aref));
+            Genotype g3 = GenotypeBuilder.create("..", Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
+            VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles)
+                    .genotypes(g1, g2, g3).make();
+
+            Assert.assertTrue(vc.hasGenotypes());
+            Assert.assertTrue(vc.isMonomorphicInSamples());
+            Assert.assertFalse(vc.isPolymorphicInSamples());
+            Assert.assertEquals(vc.getGenotypes().size(), 3);
+
+            Assert.assertEquals(4, vc.getCalledChrCount());
+            Assert.assertEquals(4, vc.getCalledChrCount(Aref));
+            Assert.assertEquals(0, vc.getCalledChrCount(T));
+            Assert.assertEquals(2, vc.getCalledChrCount(Allele.NO_CALL));
+        }
+    }
+
+    @Test
+    public void testFilters() {
+        List<Allele> alleles = Arrays.asList(Aref, T);
+        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
+
+        VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1, g2).make();
+
+        Assert.assertTrue(vc.isNotFiltered());
+        Assert.assertFalse(vc.isFiltered());
+        Assert.assertEquals(0, vc.getFilters().size());
+        Assert.assertFalse(vc.filtersWereApplied());
+        Assert.assertNull(vc.getFiltersMaybeNull());
+
+        vc = new VariantContextBuilder(vc).filters("BAD_SNP_BAD!").make();
+
+        Assert.assertFalse(vc.isNotFiltered());
+        Assert.assertTrue(vc.isFiltered());
+        Assert.assertEquals(1, vc.getFilters().size());
+        Assert.assertTrue(vc.filtersWereApplied());
+        Assert.assertNotNull(vc.getFiltersMaybeNull());
+
+        Set<String> filters = new HashSet<String>(Arrays.asList("BAD_SNP_BAD!", "REALLY_BAD_SNP", "CHRIST_THIS_IS_TERRIBLE"));
+        vc = new VariantContextBuilder(vc).filters(filters).make();
+
+        Assert.assertFalse(vc.isNotFiltered());
+        Assert.assertTrue(vc.isFiltered());
+        Assert.assertEquals(3, vc.getFilters().size());
+        Assert.assertTrue(vc.filtersWereApplied());
+        Assert.assertNotNull(vc.getFiltersMaybeNull());
+    }
+
+    @Test
+    public void testGetGenotypeCounts() {
+        List<Allele> alleles = Arrays.asList(Aref, T);
+        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
+        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
+        Genotype g4 = GenotypeBuilder.create("A.", Arrays.asList(Aref, Allele.NO_CALL));
+        Genotype g5 = GenotypeBuilder.create("..", Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
+
+        // we need to create a new VariantContext each time
+        VariantContext vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
+        Assert.assertEquals(1, vc.getHetCount());
+        vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
+        Assert.assertEquals(1, vc.getHomRefCount());
+        vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
+        Assert.assertEquals(1, vc.getHomVarCount());
+        vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
+        Assert.assertEquals(1, vc.getMixedCount());
+        vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
+        Assert.assertEquals(1, vc.getNoCallCount());
+    }
+
+    @Test
+    public void testVCFfromGenotypes() {
+        List<Allele> alleles = Arrays.asList(Aref, C, T);
+        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
+        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
+        Genotype g4 = GenotypeBuilder.create("..", Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
+        Genotype g5 = GenotypeBuilder.create("AC", Arrays.asList(Aref, C));
+        VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
+
+        VariantContext vc12 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g2.getSampleName())), true);
+        VariantContext vc1 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName())), true);
+        VariantContext vc23 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g2.getSampleName(), g3.getSampleName())), true);
+        VariantContext vc4 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g4.getSampleName())), true);
+        VariantContext vc14 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g4.getSampleName())), true);
+        VariantContext vc125 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g2.getSampleName(), g5.getSampleName())), true);
+
+        Assert.assertTrue(vc12.isPolymorphicInSamples());
+        Assert.assertTrue(vc23.isPolymorphicInSamples());
+        Assert.assertTrue(vc1.isMonomorphicInSamples());
+        Assert.assertTrue(vc4.isMonomorphicInSamples());
+        Assert.assertTrue(vc14.isMonomorphicInSamples());
+        Assert.assertTrue(vc125.isPolymorphicInSamples());
+
+        Assert.assertTrue(vc12.isSNP());
+        Assert.assertTrue(vc12.isVariant());
+        Assert.assertTrue(vc12.isBiallelic());
+
+        Assert.assertFalse(vc1.isSNP());
+        Assert.assertFalse(vc1.isVariant());
+        Assert.assertFalse(vc1.isBiallelic());
+
+        Assert.assertTrue(vc23.isSNP());
+        Assert.assertTrue(vc23.isVariant());
+        Assert.assertTrue(vc23.isBiallelic());
+
+        Assert.assertFalse(vc4.isSNP());
+        Assert.assertFalse(vc4.isVariant());
+        Assert.assertFalse(vc4.isBiallelic());
+
+        Assert.assertFalse(vc14.isSNP());
+        Assert.assertFalse(vc14.isVariant());
+        Assert.assertFalse(vc14.isBiallelic());
+
+        Assert.assertTrue(vc125.isSNP());
+        Assert.assertTrue(vc125.isVariant());
+        Assert.assertFalse(vc125.isBiallelic());
+
+        Assert.assertEquals(3, vc12.getCalledChrCount(Aref));
+        Assert.assertEquals(1, vc23.getCalledChrCount(Aref));
+        Assert.assertEquals(2, vc1.getCalledChrCount(Aref));
+        Assert.assertEquals(0, vc4.getCalledChrCount(Aref));
+        Assert.assertEquals(2, vc14.getCalledChrCount(Aref));
+        Assert.assertEquals(4, vc125.getCalledChrCount(Aref));
+    }
+
+    public void testGetGenotypeMethods() {
+        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
+        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
+        GenotypesContext gc = GenotypesContext.create(g1, g2, g3);
+        VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).genotypes(gc).make();
+
+        Assert.assertEquals(vc.getGenotype("AA"), g1);
+        Assert.assertEquals(vc.getGenotype("AT"), g2);
+        Assert.assertEquals(vc.getGenotype("TT"), g3);
+        Assert.assertEquals(vc.getGenotype("CC"), null);
+
+        Assert.assertEquals(vc.getGenotypes(), gc);
+        Assert.assertEquals(vc.getGenotypes(Arrays.asList("AA", "AT")), Arrays.asList(g1, g2));
+        Assert.assertEquals(vc.getGenotypes(Arrays.asList("AA", "TT")), Arrays.asList(g1, g3));
+        Assert.assertEquals(vc.getGenotypes(Arrays.asList("AA", "AT", "TT")), Arrays.asList(g1, g2, g3));
+        Assert.assertEquals(vc.getGenotypes(Arrays.asList("AA", "AT", "CC")), Arrays.asList(g1, g2));
+
+        Assert.assertEquals(vc.getGenotype(0), g1);
+        Assert.assertEquals(vc.getGenotype(1), g2);
+        Assert.assertEquals(vc.getGenotype(2), g3);
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test allele merging
+    //
+    // --------------------------------------------------------------------------------
+
+    private class GetAllelesTest {
+        List<Allele> alleles;
+        String name;
+
+        private GetAllelesTest(String name, Allele... arg) {
+            this.name = name;
+            this.alleles = Arrays.asList(arg);
+        }
+
+        public String toString() {
+            return String.format("%s input=%s", name, alleles);
+        }
+    }
+
+    @DataProvider(name = "getAlleles")
+    public Object[][] mergeAllelesData() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{new GetAllelesTest("A*",   Aref)});
+        tests.add(new Object[]{new GetAllelesTest("A*/C", Aref, C)});
+        tests.add(new Object[]{new GetAllelesTest("A*/C/T", Aref, C, T)});
+        tests.add(new Object[]{new GetAllelesTest("A*/T/C", Aref, T, C)});
+        tests.add(new Object[]{new GetAllelesTest("A*/C/T/ATC", Aref, C, T, ATC)});
+        tests.add(new Object[]{new GetAllelesTest("A*/T/C/ATC", Aref, T, C, ATC)});
+        tests.add(new Object[]{new GetAllelesTest("A*/ATC/T/C", Aref, ATC, T, C)});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "getAlleles")
+    public void testMergeAlleles(GetAllelesTest cfg) {
+        final List<Allele> altAlleles = cfg.alleles.subList(1, cfg.alleles.size());
+        final VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, cfg.alleles).make();
+
+        Assert.assertEquals(vc.getAlleles(), cfg.alleles, "VC alleles not the same as input alleles");
+        Assert.assertEquals(vc.getNAlleles(), cfg.alleles.size(), "VC getNAlleles not the same as input alleles size");
+        Assert.assertEquals(vc.getAlternateAlleles(), altAlleles, "VC alt alleles not the same as input alt alleles");
+
+
+        for ( int i = 0; i < cfg.alleles.size(); i++ ) {
+            final Allele inputAllele = cfg.alleles.get(i);
+
+            Assert.assertTrue(vc.hasAllele(inputAllele));
+            if ( inputAllele.isReference() ) {
+                final Allele nonRefVersion = Allele.create(inputAllele.getBases(), false);
+                Assert.assertTrue(vc.hasAllele(nonRefVersion, true));
+                Assert.assertFalse(vc.hasAllele(nonRefVersion, false));
+            }
+
+            Assert.assertEquals(inputAllele, vc.getAllele(inputAllele.getBaseString()));
+            Assert.assertEquals(inputAllele, vc.getAllele(inputAllele.getBases()));
+
+            if ( i > 0 ) { // it's an alt allele
+                Assert.assertEquals(inputAllele, vc.getAlternateAllele(i-1));
+            }
+        }
+
+        final Allele missingAllele = Allele.create("AACCGGTT"); // does not exist
+        Assert.assertNull(vc.getAllele(missingAllele.getBases()));
+        Assert.assertFalse(vc.hasAllele(missingAllele));
+        Assert.assertFalse(vc.hasAllele(missingAllele, true));
+    }
+
+    private class SitesAndGenotypesVC {
+        VariantContext vc, copy;
+        String name;
+
+        private SitesAndGenotypesVC(String name, VariantContext original) {
+            this.name = name;
+            this.vc = original;
+            this.copy = new VariantContextBuilder(original).make();
+        }
+
+        public String toString() {
+            return String.format("%s input=%s", name, vc);
+        }
+    }
+
+    @DataProvider(name = "SitesAndGenotypesVC")
+    public Object[][] MakeSitesAndGenotypesVCs() {
+        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
+        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
+
+        VariantContext sites = new VariantContextBuilder("sites", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).make();
+        VariantContext genotypes = new VariantContextBuilder(sites).source("genotypes").genotypes(g1, g2, g3).make();
+
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{new SitesAndGenotypesVC("sites", sites)});
+        tests.add(new Object[]{new SitesAndGenotypesVC("genotypes", genotypes)});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test modifying routines
+    //
+    // --------------------------------------------------------------------------------
+    @Test(dataProvider = "SitesAndGenotypesVC")
+    public void runModifyVCTests(SitesAndGenotypesVC cfg) {
+        VariantContext modified = new VariantContextBuilder(cfg.vc).loc("chr2", 123, 123).make();
+        Assert.assertEquals(modified.getChr(), "chr2");
+        Assert.assertEquals(modified.getStart(), 123);
+        Assert.assertEquals(modified.getEnd(), 123);
+
+        modified = new VariantContextBuilder(cfg.vc).id("newID").make();
+        Assert.assertEquals(modified.getID(), "newID");
+
+        Set<String> newFilters = Collections.singleton("newFilter");
+        modified = new VariantContextBuilder(cfg.vc).filters(newFilters).make();
+        Assert.assertEquals(modified.getFilters(), newFilters);
+
+        // test the behavior when the builder's attribute object is null
+        modified = new VariantContextBuilder(modified).attributes(null).make();
+        Assert.assertTrue(modified.getAttributes().isEmpty());
+        modified = new VariantContextBuilder(modified).attributes(null).rmAttribute("AC").make();
+        Assert.assertTrue(modified.getAttributes().isEmpty());
+        modified = new VariantContextBuilder(modified).attributes(null).attribute("AC", 1).make();
+        Assert.assertEquals(modified.getAttribute("AC"), 1);
+
+        // test the behavior when the builder's attribute object is not initialized
+        modified = new VariantContextBuilder(modified.getSource(), modified.getChr(), modified.getStart(), modified.getEnd(), modified.getAlleles()).attribute("AC", 1).make();
+
+        // test normal attribute modification
+        modified = new VariantContextBuilder(cfg.vc).attribute("AC", 1).make();
+        Assert.assertEquals(modified.getAttribute("AC"), 1);
+        modified = new VariantContextBuilder(modified).attribute("AC", 2).make();
+        Assert.assertEquals(modified.getAttribute("AC"), 2);
+
+        Genotype g1 = GenotypeBuilder.create("AA2", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT2", Arrays.asList(Aref, T));
+        Genotype g3 = GenotypeBuilder.create("TT2", Arrays.asList(T, T));
+        GenotypesContext gc = GenotypesContext.create(g1,g2,g3);
+        modified = new VariantContextBuilder(cfg.vc).genotypes(gc).make();
+        Assert.assertEquals(modified.getGenotypes(), gc);
+        modified = new VariantContextBuilder(cfg.vc).noGenotypes().make();
+        Assert.assertTrue(modified.getGenotypes().isEmpty());
+
+        // test that original hasn't changed
+        Assert.assertEquals(cfg.vc.getChr(), cfg.copy.getChr());
+        Assert.assertEquals(cfg.vc.getStart(), cfg.copy.getStart());
+        Assert.assertEquals(cfg.vc.getEnd(), cfg.copy.getEnd());
+        Assert.assertEquals(cfg.vc.getAlleles(), cfg.copy.getAlleles());
+        Assert.assertEquals(cfg.vc.getAttributes(), cfg.copy.getAttributes());
+        Assert.assertEquals(cfg.vc.getID(), cfg.copy.getID());
+        Assert.assertEquals(cfg.vc.getGenotypes(), cfg.copy.getGenotypes());
+        Assert.assertEquals(cfg.vc.getLog10PError(), cfg.copy.getLog10PError());
+        Assert.assertEquals(cfg.vc.getFilters(), cfg.copy.getFilters());
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test subcontext
+    //
+    // --------------------------------------------------------------------------------
+    private class SubContextTest {
+        Set<String> samples;
+        boolean updateAlleles;
+
+        private SubContextTest(Collection<String> samples, boolean updateAlleles) {
+            this.samples = new HashSet<String>(samples);
+            this.updateAlleles = updateAlleles;
+        }
+
+        public String toString() {
+            return String.format("%s samples=%s updateAlleles=%b", "SubContextTest", samples, updateAlleles);
+        }
+    }
+
+    @DataProvider(name = "SubContextTest")
+    public Object[][] MakeSubContextTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        for ( boolean updateAlleles : Arrays.asList(true, false)) {
+            tests.add(new Object[]{new SubContextTest(Collections.<String>emptySet(), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Collections.singleton("MISSING"), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Collections.singleton("AA"), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Collections.singleton("AT"), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Collections.singleton("TT"), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT"), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT", "TT"), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT", "MISSING"), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT", "TT", "MISSING"), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT", "AC"), updateAlleles)});
+        }
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    @Test(dataProvider = "SubContextTest")
+    public void runSubContextTest(SubContextTest cfg) {
+        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
+        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
+        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
+        Genotype g4 = GenotypeBuilder.create("AC", Arrays.asList(Aref, C));
+
+        GenotypesContext gc = GenotypesContext.create(g1, g2, g3, g4);
+        VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, C, T)).genotypes(gc).make();
+        VariantContext sub = vc.subContextFromSamples(cfg.samples, cfg.updateAlleles);
+
+        // unchanged attributes should be the same
+        Assert.assertEquals(sub.getChr(), vc.getChr());
+        Assert.assertEquals(sub.getStart(), vc.getStart());
+        Assert.assertEquals(sub.getEnd(), vc.getEnd());
+        Assert.assertEquals(sub.getLog10PError(), vc.getLog10PError());
+        Assert.assertEquals(sub.getFilters(), vc.getFilters());
+        Assert.assertEquals(sub.getID(), vc.getID());
+        Assert.assertEquals(sub.getAttributes(), vc.getAttributes());
+
+        Set<Genotype> expectedGenotypes = new HashSet<Genotype>();
+        if ( cfg.samples.contains(g1.getSampleName()) ) expectedGenotypes.add(g1);
+        if ( cfg.samples.contains(g2.getSampleName()) ) expectedGenotypes.add(g2);
+        if ( cfg.samples.contains(g3.getSampleName()) ) expectedGenotypes.add(g3);
+        if ( cfg.samples.contains(g4.getSampleName()) ) expectedGenotypes.add(g4);
+        GenotypesContext expectedGC = GenotypesContext.copy(expectedGenotypes);
+
+        // these values depend on the results of sub
+        if ( cfg.updateAlleles ) {
+            // do the work to see what alleles should be here, and which not
+            List<Allele> expectedAlleles = new ArrayList<Allele>();
+            expectedAlleles.add(Aref);
+
+            Set<Allele> genotypeAlleles = new HashSet<Allele>();
+            for ( final Genotype g : expectedGC )
+                genotypeAlleles.addAll(g.getAlleles());
+            genotypeAlleles.remove(Aref);
+
+            // ensure original allele order
+            for (Allele allele: vc.getAlleles())
+                if (genotypeAlleles.contains(allele))
+                    expectedAlleles.add(allele);
+
+            Assert.assertEquals(sub.getAlleles(), expectedAlleles);
+        } else {
+            // not updating alleles -- should be the same
+            Assert.assertEquals(sub.getAlleles(), vc.getAlleles());
+        }
+
+        // same sample names => success
+        Assert.assertTrue(sub.getGenotypes().getSampleNames().equals(expectedGC.getSampleNames()));
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test sample name functions
+    //
+    // --------------------------------------------------------------------------------
+    private class SampleNamesTest {
+        List<String> sampleNames;
+        List<String> sampleNamesInOrder;
+
+        private SampleNamesTest(List<String> sampleNames, List<String> sampleNamesInOrder) {
+            this.sampleNamesInOrder = sampleNamesInOrder;
+            this.sampleNames = sampleNames;
+        }
+
+        public String toString() {
+            return String.format("%s samples=%s order=%s", "SampleNamesTest", sampleNames, sampleNamesInOrder);
+        }
+    }
+
+    @DataProvider(name = "SampleNamesTest")
+    public Object[][] MakeSampleNamesTest() {
+        List<Object[]> tests = new ArrayList<Object[]>();
+
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("1"), Arrays.asList("1"))});
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("2", "1"), Arrays.asList("1", "2"))});
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("1", "2"), Arrays.asList("1", "2"))});
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("1", "2", "3"), Arrays.asList("1", "2", "3"))});
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("2", "1", "3"), Arrays.asList("1", "2", "3"))});
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("2", "3", "1"), Arrays.asList("1", "2", "3"))});
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("3", "1", "2"), Arrays.asList("1", "2", "3"))});
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("3", "2", "1"), Arrays.asList("1", "2", "3"))});
+        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("NA2", "NA1"), Arrays.asList("NA1", "NA2"))});
+
+        return tests.toArray(new Object[][]{});
+    }
+
+    private final static void assertGenotypesAreInOrder(Iterable<Genotype> gIt, List<String> names) {
+        int i = 0;
+        for ( final Genotype g : gIt ) {
+            Assert.assertEquals(g.getSampleName(), names.get(i), "Unexpected genotype ordering");
+            i++;
+        }
+    }
+
+
+    @Test(dataProvider = "SampleNamesTest")
+    public void runSampleNamesTest(SampleNamesTest cfg) {
+        GenotypesContext gc = GenotypesContext.create(cfg.sampleNames.size());
+        for ( final String name : cfg.sampleNames ) {
+            gc.add(GenotypeBuilder.create(name, Arrays.asList(Aref, T)));
+        }
+
+        VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).genotypes(gc).make();
+
+        // same sample names => success
+        Assert.assertTrue(vc.getSampleNames().equals(new HashSet<String>(cfg.sampleNames)), "vc.getSampleNames() = " + vc.getSampleNames());
+        Assert.assertEquals(vc.getSampleNamesOrderedByName(), cfg.sampleNamesInOrder, "vc.getSampleNamesOrderedByName() = " + vc.getSampleNamesOrderedByName());
+
+        assertGenotypesAreInOrder(vc.getGenotypesOrderedByName(), cfg.sampleNamesInOrder);
+        assertGenotypesAreInOrder(vc.getGenotypesOrderedBy(cfg.sampleNames), cfg.sampleNames);
+    }
+
+    @Test
+    public void testGenotypeCounting() {
+        Genotype noCall = GenotypeBuilder.create("nocall", Arrays.asList(Allele.NO_CALL));
+        Genotype mixed  = GenotypeBuilder.create("mixed", Arrays.asList(Aref, Allele.NO_CALL));
+        Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(Aref, Aref));
+        Genotype het    = GenotypeBuilder.create("het", Arrays.asList(Aref, T));
+        Genotype homVar = GenotypeBuilder.create("homVar", Arrays.asList(T, T));
+
+        List<Genotype> allGenotypes = Arrays.asList(noCall, mixed, homRef, het, homVar);
+        final int nCycles = allGenotypes.size() * 10;
+
+        for ( int i = 0; i < nCycles; i++ ) {
+            int nNoCall = 0, nNoCallAlleles = 0, nA = 0, nT = 0, nMixed = 0, nHomRef = 0, nHet = 0, nHomVar = 0;
+            int nSamples = 0;
+            GenotypesContext gc = GenotypesContext.create();
+            for ( int j = 0; j < i; j++ ) {
+                nSamples++;
+                Genotype g = allGenotypes.get(j % allGenotypes.size());
+                final String name = String.format("%s_%d%d", g.getSampleName(), i, j);
+                gc.add(GenotypeBuilder.create(name, g.getAlleles()));
+                switch ( g.getType() ) {
+                    case NO_CALL: nNoCall++; nNoCallAlleles++; break;
+                    case HOM_REF: nA += 2; nHomRef++; break;
+                    case HET: nA++; nT++; nHet++; break;
+                    case HOM_VAR: nT += 2; nHomVar++; break;
+                    case MIXED: nA++; nNoCallAlleles++; nMixed++; break;
+                    default: throw new RuntimeException("Unexpected genotype type " + g.getType());
+                }
+
+            }
+
+            VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).genotypes(gc).make();
+            Assert.assertEquals(vc.getNSamples(), nSamples);
+            if ( nSamples > 0 ) {
+                Assert.assertEquals(vc.isPolymorphicInSamples(), nT > 0);
+                Assert.assertEquals(vc.isMonomorphicInSamples(), nT == 0);
+            }
+            Assert.assertEquals(vc.getCalledChrCount(), nA + nT);
+
+            Assert.assertEquals(vc.getCalledChrCount(Allele.NO_CALL), nNoCallAlleles);
+            Assert.assertEquals(vc.getCalledChrCount(Aref), nA);
+            Assert.assertEquals(vc.getCalledChrCount(T), nT);
+
+            Assert.assertEquals(vc.getNoCallCount(), nNoCall);
+            Assert.assertEquals(vc.getHomRefCount(), nHomRef);
+            Assert.assertEquals(vc.getHetCount(), nHet);
+            Assert.assertEquals(vc.getHomVarCount(), nHomVar);
+            Assert.assertEquals(vc.getMixedCount(), nMixed);
+        }
+    }
+    @Test
+    public void testSetAttribute() {
+    	VariantContextBuilder builder = new VariantContextBuilder();
+    	builder.attribute("Test", "value");
+    }
+
+    // --------------------------------------------------------------------------------
+    //
+    // Test validation methods
+    //
+    // --------------------------------------------------------------------------------
+
+    // create a VariantContext object for various alleles and genotypes to test against
+    private VariantContext createTestVariantContext(final List<Allele> alleles, final Map<String, Object> attributes, final Genotype... genotypes) {
+        final EnumSet<VariantContext.Validation> toValidate = EnumSet.noneOf(VariantContext.Validation.class);
+        final Set<String> filters = null;
+        final boolean fullyDecoded = false;
+
+        // no genotypes needs to use GenotypesContext.NO_GENOTYPES,
+        // otherwise we build up a GenotypesContext from the passed genotypes
+        final GenotypesContext gc;
+        if (genotypes == null || genotypes.length == 0) {
+            gc = GenotypesContext.NO_GENOTYPES;
+        } else {
+            gc = new GenotypesContext();
+            for (final Genotype genotype : genotypes) {
+                gc.add(genotype);
+            }
+        }
+        // most of the fields are not important to the tests, we just need alleles and gc set properly
+        return new VariantContext("genotypes", VCFConstants.EMPTY_ID_FIELD, snpLoc, snpLocStart, snpLocStop, alleles,
+                gc, VariantContext.NO_LOG10_PERROR, filters, attributes,
+                fullyDecoded, toValidate);
+    }
+
+    // validateReferenceBases: PASS conditions
+    @DataProvider
+    public Object[][] testValidateReferencesBasesDataProvider() {
+        final VariantContext vc = createValidateReferencesContext(Arrays.asList(Aref, T));
+        return new Object[][]{
+                // null ref will pass validation
+                {vc, null, A},
+                // A vs A-ref will pass validation
+                {vc, Aref, A}
+        };
+    }
+    @Test(dataProvider = "testValidateReferencesBasesDataProvider")
+    public void testValidateReferenceBases(final VariantContext vc, final Allele allele1, final Allele allele2) {
+        // validateReferenceBases throws exceptions if it fails, so no Asserts here...
+        vc.validateReferenceBases(allele1, allele2);
+    }
+    // validateReferenceBases: FAIL conditions
+    @DataProvider
+    public Object[][] testValidateReferencesBasesFailureDataProvider() {
+        final VariantContext vc = createValidateReferencesContext(Arrays.asList(Aref, T));
+
+        final Allele symbolicAllele = Allele.create("<A>");
+
+        return new Object[][]{
+                // T vs A-ref will NOT pass validation
+                {vc, Aref, T},
+                // symbolic alleles will NOT pass validation
+                {vc, Aref, symbolicAllele}
+        };
+    }
+    @Test(dataProvider = "testValidateReferencesBasesFailureDataProvider", expectedExceptions = TribbleException.class)
+    public void testValidateReferenceBasesFailure(final VariantContext vc, final Allele allele1, final Allele allele2) {
+        // validateReferenceBases throws exceptions if it fails, so no Asserts here...
+        vc.validateReferenceBases(allele1, allele2);
+    }
+    private VariantContext createValidateReferencesContext(final List<Allele> alleles) {
+        return createTestVariantContext(alleles, null);
+    }
+
+
+    // validateRSIDs: PASS conditions
+    @DataProvider
+    public Object[][] testValidateRSIDsDataProvider() {
+        final VariantContext vcNoId = createTestVariantContextRsIds(VCFConstants.EMPTY_ID_FIELD);
+        final VariantContext vcNonRs = createTestVariantContextRsIds("abc456");
+        final VariantContext vc = createTestVariantContextRsIds("rs123");
+        final VariantContext vcMultipleRs = createTestVariantContextRsIds("rs123;rs456;rs789");
+
+        return new Object[][]{
+                // no ID will pass validation
+                {vcNoId, makeRsIDsSet("rs123")},
+                // non-rs ID will pass validation
+                {vcNonRs, makeRsIDsSet("rs123")},
+                // matching ID will pass validation
+                {vc, makeRsIDsSet("rs123")},
+                // null rsIDs to check will pass validation
+                {vc, null},
+                // context with multiple rsIDs that are contained within the rsID list will pass
+                {vcMultipleRs, makeRsIDsSet("rs123", "rs321", "rs456", "rs654", "rs789")}
+        };
+    }
+    @Test(dataProvider = "testValidateRSIDsDataProvider")
+    public void testValidateRSIDs(final VariantContext vc, final Set<String> rsIDs) {
+        // validateRSIDs throws exceptions if it fails, so no Asserts here...
+        vc.validateRSIDs(rsIDs);
+    }
+    // validateRSIDs: FAIL conditions
+    @DataProvider
+    public Object[][] testValidateRSIDsFailureDataProvider() {
+        final VariantContext vc = createTestVariantContextRsIds("rs123");
+        final VariantContext vcMultipleRs = createTestVariantContextRsIds("rs123;rs456;rs789");
+
+        return new Object[][]{
+                // mismatching ID will fail validation
+                {vc, makeRsIDsSet("rs123456")},
+                // null rsIDs to check will pass validation
+                {vcMultipleRs, makeRsIDsSet("rs456")}
+        };
+    }
+    @Test(dataProvider = "testValidateRSIDsFailureDataProvider", expectedExceptions = TribbleException.class)
+    public void testValidateRSIDsFailure(final VariantContext vc, final Set<String> rsIDs) {
+        // validateRSIDs throws exceptions if it fails, so no Asserts here...
+        vc.validateRSIDs(rsIDs);
+    }
+    // create a VariantContext appropriate for testing rsIDs
+    private VariantContext createTestVariantContextRsIds(final String rsId) {
+        final EnumSet<VariantContext.Validation> toValidate = EnumSet.noneOf(VariantContext.Validation.class);
+        final Set<String> filters = null;
+        final Map<String, Object> attributes = null;
+        final boolean fullyDecoded = false;
+
+        return new VariantContext("genotypes", rsId, snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T),
+                GenotypesContext.NO_GENOTYPES, VariantContext.NO_LOG10_PERROR, filters, attributes,
+                fullyDecoded, toValidate);
+    }
+    private Set<String> makeRsIDsSet(final String... rsIds) {
+        return new HashSet<String>(Arrays.asList(rsIds));
+    }
+
+
+    // validateAlternateAlleles: PASS conditions
+    @DataProvider
+    public Object[][] testValidateAlternateAllelesDataProvider() {
+        final Genotype homVarT = GenotypeBuilder.create("homVarT", Arrays.asList(T, T));
+
+        // no genotypes passes validateAlternateAlleles
+        final VariantContext vcNoGenotypes =
+                // A-ref/T with no GT
+                createValidateAlternateAllelesContext(Arrays.asList(Aref, T));
+
+        // genotypes that match ALTs will pass
+        final VariantContext vcHasGenotypes =
+                // A-ref/T vs T/T
+                createValidateAlternateAllelesContext(Arrays.asList(Aref, T), homVarT);
+
+        return new Object[][]{
+                {vcNoGenotypes},
+                {vcHasGenotypes}
+        };
+    }
+    @Test(dataProvider = "testValidateAlternateAllelesDataProvider")
+    public void testValidateAlternateAlleles(final VariantContext vc) {
+        // validateAlternateAlleles throws exceptions if it fails, so no Asserts here...
+        vc.validateAlternateAlleles();
+    }
+    // validateAlternateAlleles: FAIL conditions
+    @DataProvider
+    public Object[][] testValidateAlternateAllelesFailureDataProvider() {
+        final Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(Aref, Aref));
+        final Genotype homVarA = GenotypeBuilder.create("homVarA", Arrays.asList(A, A));
+
+        // alts not observed in the genotypes will fail validation
+        // this is the throw in VariantContext from: if ( reportedAlleles.size() != observedAlleles.size() )
+        final VariantContext vcHasAltNotObservedInGT =
+                // A-ref/T vs A-ref/A-ref
+                createValidateAlternateAllelesContext(Arrays.asList(Aref, T), homRef);
+
+        // alts not observed in the genotypes will fail validation
+        // but this time it is the second throw in VariantContext after: observedAlleles.retainAll(reportedAlleles);
+        final VariantContext vcHasAltNotObservedInGTIntersection =
+                // A-ref/T vs A/A
+                createValidateAlternateAllelesContext(Arrays.asList(Aref, T), homVarA);
+
+        return new Object[][]{
+                {vcHasAltNotObservedInGT},
+                {vcHasAltNotObservedInGTIntersection}
+        };
+    }
+    @Test(dataProvider = "testValidateAlternateAllelesFailureDataProvider", expectedExceptions = TribbleException.class)
+    public void testValidateAlternateAllelesFailure(final VariantContext vc) {
+        // validateAlternateAlleles throws exceptions if it fails, so no Asserts here...
+        vc.validateAlternateAlleles();
+    }
+    private VariantContext createValidateAlternateAllelesContext(final List<Allele> alleles, final Genotype... genotypes) {
+        return createTestVariantContext(alleles, null, genotypes);
+    }
+
+
+
+    // validateChromosomeCounts: PASS conditions
+    @DataProvider
+    public Object[][] testValidateChromosomeCountsDataProvider() {
+        final Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(Aref, Aref));
+        final Genotype homVarT = GenotypeBuilder.create("homVarT", Arrays.asList(T, T));
+        final Genotype hetVarTC = GenotypeBuilder.create("hetVarTC", Arrays.asList(T, C));
+        final Genotype homRefNoCall = GenotypeBuilder.create("homRefNoCall", Arrays.asList(Aref, Allele.NO_CALL));
+
+
+        // no genotypes passes validateChromosomeCounts
+        final VariantContext vcNoGenotypes =
+                // A-ref/T with no GT
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T), null);
+
+        /** AN : total number of alleles in called genotypes **/
+        // with AN set and hom-ref, we expect AN to be 2 for Aref/Aref
+        final Map<String, Object> attributesAN = new HashMap<String, Object>();
+        attributesAN.put(VCFConstants.ALLELE_NUMBER_KEY, "2");
+        final VariantContext vcANSet =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesAN, homRef);
+
+        // with AN set, one no-call (no-calls get ignored by getCalledChrCount() in VariantContext)
+        // we expect AN to be 1 for Aref/no-call
+        final Map<String, Object> attributesANNoCall = new HashMap<String, Object>();
+        attributesANNoCall.put(VCFConstants.ALLELE_NUMBER_KEY, "1");
+        final VariantContext vcANSetNoCall =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesANNoCall, homRefNoCall);
+
+
+        /** AC : allele count in genotypes, for each ALT allele, in the same order as listed **/
+        // with AC set, and T/T, we expect AC to be 2 (for 2 counts of ALT T)
+        final Map<String, Object> attributesAC = new HashMap<String, Object>();
+        attributesAC.put(VCFConstants.ALLELE_COUNT_KEY, "2");
+        final VariantContext vcACSet =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T), attributesAC, homVarT);
+
+        // with AC set and no ALT (GT is 0/0), we expect AC count to be 0
+        final Map<String, Object> attributesACNoAlts = new HashMap<String, Object>();
+        attributesACNoAlts.put(VCFConstants.ALLELE_COUNT_KEY, "0");
+        final VariantContext vcACSetNoAlts =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesACNoAlts, homRef);
+
+        // with AC set, and two different ALTs (T and C), with GT of 1/2, we expect a count of 1 for each.
+        // With two ALTs, a list is expected, so we set the attribute as a list of 1,1
+        final Map<String, Object> attributesACTwoAlts = new HashMap<String, Object>();
+        attributesACTwoAlts.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1", "1"));
+        final VariantContext vcACSetTwoAlts =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAlts, hetVarTC);
+
+        return new Object[][]{
+                {vcNoGenotypes},
+                {vcANSet},
+                {vcANSetNoCall},
+                {vcACSet},
+                {vcACSetNoAlts},
+                {vcACSetTwoAlts}
+        };
+    }
+    @Test(dataProvider = "testValidateChromosomeCountsDataProvider")
+    public void testValidateChromosomeCounts(final VariantContext vc) {
+        // validateChromosomeCounts throws exceptions if it fails, so no Asserts here...
+        vc.validateChromosomeCounts();
+    }
+    // validateChromosomeCounts: FAIL conditions
+    @DataProvider
+    public Object[][] testValidateChromosomeCountsFailureDataProvider() {
+        final Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(Aref, Aref));
+        final Genotype hetVarTC = GenotypeBuilder.create("hetVarTC", Arrays.asList(T, C));
+        final Genotype homRefNoCall = GenotypeBuilder.create("homRefNoCall", Arrays.asList(Aref, Allele.NO_CALL));
+
+        /** AN : total number of alleles in called genotypes **/
+        // with AN set and hom-ref, we expect AN to be 2 for Aref/Aref, so 3 will fail
+        final Map<String, Object> attributesAN = new HashMap<String, Object>();
+        attributesAN.put(VCFConstants.ALLELE_NUMBER_KEY, "3");
+        final VariantContext vcANSet =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesAN, homRef);
+
+        // with AN set, one no-call (no-calls get ignored by getCalledChrCount() in VariantContext)
+        // we expect AN to be 1 for Aref/no-call, so 2 will fail
+        final Map<String, Object> attributesANNoCall = new HashMap<String, Object>();
+        attributesANNoCall.put(VCFConstants.ALLELE_NUMBER_KEY, "2");
+        final VariantContext vcANSetNoCall =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesANNoCall, homRefNoCall);
+
+        /** AC : allele count in genotypes, for each ALT allele, in the same order as listed **/
+        // with AC set but no ALTs, we expect a count of 0, so the wrong count will fail here
+        final Map<String, Object> attributesACWrongCount = new HashMap<String, Object>();
+        attributesACWrongCount.put(VCFConstants.ALLELE_COUNT_KEY, "2");
+        final VariantContext vcACWrongCount =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesACWrongCount, homRef);
+
+        // with AC set, two ALTs, but AC is not a list with count for each ALT
+        final Map<String, Object> attributesACTwoAlts = new HashMap<String, Object>();
+        attributesACTwoAlts.put(VCFConstants.ALLELE_COUNT_KEY, "1");
+        final VariantContext vcACSetTwoAlts =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAlts, hetVarTC);
+
+        // with AC set, two ALTs, and a list is correctly used, but wrong counts (we expect counts to be 1,1)
+        final Map<String, Object> attributesACTwoAltsWrongCount = new HashMap<String, Object>();
+        attributesACTwoAltsWrongCount.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1", "2"));
+        final VariantContext vcACSetTwoAltsWrongCount =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAltsWrongCount, hetVarTC);
+
+        // with AC set, two ALTs, but only count for one ALT (we expect two items in the list: 1,1)
+        final Map<String, Object> attributesACTwoAltsOneAltCount = new HashMap<String, Object>();
+        attributesACTwoAltsOneAltCount.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1"));
+        final VariantContext vcACSetTwoAltsOneAltCount =
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAltsOneAltCount, hetVarTC);
+
+        return new Object[][]{
+                {vcANSet},
+                {vcANSetNoCall},
+                {vcACWrongCount},
+                {vcACSetTwoAlts},
+                {vcACSetTwoAltsWrongCount},
+                {vcACSetTwoAltsOneAltCount}
+        };
+    }
+    @Test(dataProvider = "testValidateChromosomeCountsFailureDataProvider", expectedExceptions = TribbleException.class)
+    public void testValidateChromosomeCountsFailure(final VariantContext vc) {
+        // validateChromosomeCounts throws exceptions if it fails, so no Asserts here...
+        vc.validateChromosomeCounts();
+    }
+    private VariantContext createValidateChromosomeCountsContext(final List<Allele> alleles, final Map<String, Object> attributes, final Genotype... genotypes) {
+        return createTestVariantContext(alleles, attributes, genotypes);
+    }
+
+
+    // the extraStrictValidation method calls the other validation methods
+    @DataProvider
+    public Object[][] testExtraStrictValidationDataProvider() {
+        // get the data providers for each of the passing tests of the individual methods
+        final Object[][] passingValidateReferenceBasesData = testValidateReferencesBasesDataProvider();
+        final Object[][] passingValidateRSIDsData = testValidateRSIDsDataProvider();
+        final Object[][] passingValidateAlternateAllelesData = testValidateAlternateAllelesDataProvider();
+        final Object[][] passingValidateChromosomeCountsData = testValidateChromosomeCountsDataProvider();
+
+        // the total number of tests we will run here is the sum of each of the test cases
+        final int numDataPoints =
+                passingValidateReferenceBasesData.length +
+                        passingValidateRSIDsData.length +
+                        passingValidateAlternateAllelesData.length +
+                        passingValidateChromosomeCountsData.length;
+
+        // create the data provider structure for this extra strict test
+        final Object[][] extraStrictData = new Object[numDataPoints][];
+
+        int testNum = 0;
+        for (final Object[] testRefBases : passingValidateReferenceBasesData) {
+            final VariantContext vc = (VariantContext) testRefBases[0];
+            final Allele refAllele = (Allele) testRefBases[1];
+            final Allele allele = (Allele) testRefBases[2];
+
+            // for this test, rsIds does not matter, so we hold it constant
+            extraStrictData[testNum++] = new Object[]{vc, refAllele, allele, null};
+        }
+
+        for (final Object[] testRsIDs : passingValidateRSIDsData) {
+            final VariantContext vc = (VariantContext) testRsIDs[0];
+            final Set<String> rsIDs = (Set<String>) testRsIDs[1];
+
+            // for this test, reportedReference and observedReference does not matter,
+            // so we hold it constant
+            extraStrictData[testNum++] = new Object[]{vc, Tref, T, rsIDs};
+        }
+
+        for (final Object[] testAlternateAlleles : passingValidateAlternateAllelesData) {
+            final VariantContext vc = (VariantContext) testAlternateAlleles[0];
+
+            // for this test, only VariantContext is used, so we hold
+            // reportedReference, observedReference and rsIds constant
+            extraStrictData[testNum++] = new Object[]{vc, Tref, T, null};
+        }
+
+        for (final Object[] testChromomeCounts : passingValidateChromosomeCountsData) {
+            final VariantContext vc = (VariantContext) testChromomeCounts[0];
+
+            // for this test, only VariantContext is used, so we hold
+            // reportedReference, observedReference and rsIds constant
+            extraStrictData[testNum++] = new Object[]{vc, Tref, T, null};
+        }
+
+        return extraStrictData;
+    }
+
+    @DataProvider(name = "serializationTestData")
+    public Object[][] getSerializationTestData() {
+        return new Object[][] {
+                { new File("src/test/resources/htsjdk/variant/HiSeq.10000.vcf"), new VCFCodec() },
+                { new File("src/test/resources/htsjdk/variant/serialization_test.bcf"), new BCF2Codec() }
+        };
+    }
+
+    @Test(dataProvider = "serializationTestData")
+    public void testSerialization( final File testFile, final FeatureCodec<VariantContext, ?> codec ) throws Exception {
+        final AbstractFeatureReader<VariantContext, ?> featureReader = AbstractFeatureReader.getFeatureReader(testFile.getAbsolutePath(), codec, false);
+        final VariantContext initialVC = featureReader.iterator().next();
+
+        final VariantContext vcDeserialized = TestUtil.serializeAndDeserialize(initialVC);
+
+        assertVariantContextsAreEqual(vcDeserialized, initialVC);
+    }
+
+    @Test(dataProvider = "testExtraStrictValidationDataProvider")
+    public void testExtraStrictValidation(final VariantContext vc, final Allele reportedReference, final Allele observedReference, final Set<String> rsIDs) {
+        // extraStrictValidation throws exceptions if it fails, so no Asserts here...
+        vc.extraStrictValidation(reportedReference, observedReference, rsIDs);
+    }
+    @DataProvider
+    public Object[][] testExtraStrictValidationFailureDataProvider() {
+        // get the data providers for each of the failure tests of the individual methods
+        final Object[][] failingValidateReferenceBasesData = testValidateReferencesBasesFailureDataProvider();
+        final Object[][] failingValidateRSIDsData = testValidateRSIDsFailureDataProvider();
+        final Object[][] failingValidateAlternateAllelesData = testValidateAlternateAllelesFailureDataProvider();
+        final Object[][] failingValidateChromosomeCountsData = testValidateChromosomeCountsFailureDataProvider();
+
+        // the total number of tests we will run here is the sum of each of the test cases
+        final int numDataPoints =
+                failingValidateReferenceBasesData.length +
+                        failingValidateRSIDsData.length +
+                        failingValidateAlternateAllelesData.length +
+                        failingValidateChromosomeCountsData.length;
+
+        // create the data provider structure for this extra strict test
+        final Object[][] extraStrictData = new Object[numDataPoints][];
+
+        int testNum = 0;
+        for (final Object[] testRefBases : failingValidateReferenceBasesData) {
+            final VariantContext vc = (VariantContext) testRefBases[0];
+            final Allele refAllele = (Allele) testRefBases[1];
+            final Allele allele = (Allele) testRefBases[2];
+
+            // for this test, rsIds does not matter, so we hold it constant
+            extraStrictData[testNum++] = new Object[]{vc, refAllele, allele, null};
+        }
+
+        for (final Object[] testRsIDs : failingValidateRSIDsData) {
+            final VariantContext vc = (VariantContext) testRsIDs[0];
+            final Set<String> rsIDs = (Set<String>) testRsIDs[1];
+
+            // for this test, reportedReference and observedReference does not matter,
+            // so we hold it constant
+            extraStrictData[testNum++] = new Object[]{vc, Tref, T, rsIDs};
+        }
+
+        for (final Object[] testAlternateAlleles : failingValidateAlternateAllelesData) {
+            final VariantContext vc = (VariantContext) testAlternateAlleles[0];
+
+            // for this test, only VariantContext is used, so we hold
+            // reportedReference, observedReference and rsIds constant
+            extraStrictData[testNum++] = new Object[]{vc, Tref, T, null};
+        }
+
+        for (final Object[] testChromomeCounts : failingValidateChromosomeCountsData) {
+            final VariantContext vc = (VariantContext) testChromomeCounts[0];
+
+            // for this test, only VariantContext is used, so we hold
+            // reportedReference, observedReference and rsIds constant
+            extraStrictData[testNum++] = new Object[]{vc, Tref, T, null};
+        }
+
+        return extraStrictData;
+    }
+    @Test(dataProvider = "testExtraStrictValidationFailureDataProvider", expectedExceptions = TribbleException.class)
+    public void testExtraStrictValidationFailure(final VariantContext vc, final Allele reportedReference, final Allele observedReference, final Set<String> rsIDs) {
+        // extraStrictValidation throws exceptions if it fails, so no Asserts here...
+        vc.extraStrictValidation(reportedReference, observedReference, rsIDs);
+    }
+}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/VariantJEXLContextUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/VariantJEXLContextUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/VariantJEXLContextUnitTest.java
rename to src/test/java/htsjdk/variant/variantcontext/VariantJEXLContextUnitTest.java
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/AllFailFilter.java b/src/test/java/htsjdk/variant/variantcontext/filter/AllFailFilter.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/filter/AllFailFilter.java
rename to src/test/java/htsjdk/variant/variantcontext/filter/AllFailFilter.java
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/AllPassFilter.java b/src/test/java/htsjdk/variant/variantcontext/filter/AllPassFilter.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/filter/AllPassFilter.java
rename to src/test/java/htsjdk/variant/variantcontext/filter/AllPassFilter.java
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/CompoundFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/CompoundFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/filter/CompoundFilterTest.java
rename to src/test/java/htsjdk/variant/variantcontext/filter/CompoundFilterTest.java
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java
new file mode 100644
index 0000000..d8decfd
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java
@@ -0,0 +1,88 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2015 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package htsjdk.variant.variantcontext.filter;
+
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.VCFFileReader;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+/**
+ * Tests for testing the (VariantContext)FilteringVariantContextIterator, and the HeterozygosityFilter
+ */
+
+public class FilteringVariantContextIteratorTest {
+    final File testDir = new File("src/test/resources/htsjdk/variant");
+
+    @DataProvider
+    public Object [][] filteringIteratorData() {
+        return new Object[][] {
+                {new HeterozygosityFilter(true, "NA00001"), 2},
+                {new HeterozygosityFilter(false, "NA00001"), 3},
+                {new HeterozygosityFilter(true, null), 2},
+                {new HeterozygosityFilter(false, null), 3},
+                {new AllPassFilter(), 5},
+                {new HeterozygosityFilter(true, "NA00002"), 4},
+                {new HeterozygosityFilter(false, "NA00002"), 1},
+        };
+    }
+
+    @Test(dataProvider = "filteringIteratorData")
+    public void testFilteringIterator(final VariantContextFilter filter, final int expectedCount) {
+
+        final File vcf = new File(testDir,"ex2.vcf");
+        final VCFFileReader vcfReader = new VCFFileReader(vcf, false);
+        final FilteringVariantContextIterator filteringIterator = new FilteringVariantContextIterator(vcfReader.iterator(), filter);
+        int count = 0;
+
+        for(final VariantContext vc : filteringIterator) {
+            count++;
+        }
+
+        Assert.assertEquals(count, expectedCount);
+    }
+
+    @DataProvider
+    public Object [][] badSampleData() {
+        return new Object[][] {
+                {"ex2.vcf", "DOES_NOT_EXIST"},
+                {"breakpoint.vcf", null},
+        };
+    }
+
+    @Test(dataProvider = "badSampleData", expectedExceptions = IllegalArgumentException.class)
+    public void testMissingSample(final String file, final String sample) {
+
+        final File vcf = new File(testDir, file);
+        final VCFFileReader vcfReader = new VCFFileReader(vcf, false);
+        final HeterozygosityFilter heterozygosityFilter = new HeterozygosityFilter(true, sample);
+
+        new FilteringVariantContextIterator(vcfReader.iterator(), heterozygosityFilter).next();
+    }
+}
+
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilterTest.java
rename to src/test/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilterTest.java
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilterTest.java
rename to src/test/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilterTest.java
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java
new file mode 100644
index 0000000..3993b79
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java
@@ -0,0 +1,72 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2015 Pierre Lindenbaum @yokofakun Institut du Thorax - Nantes - France
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.variant.variantcontext.filter;
+
+import htsjdk.variant.vcf.VCFFileReader;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * @author Pierre Lindenbaum PhD Institut du Thorax - INSERM - Nantes - France
+ */
+
+public class JavascriptVariantFilterTest {
+    final File testDir = new File("src/test/resources/htsjdk/variant");
+
+    @DataProvider
+    public Object[][] jsData() {
+        return new Object[][] {
+                { "ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf", "variantFilter01.js",61 },
+                { "ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf", "variantFilter02.js",38 }, };
+    }
+
+    @Test(dataProvider = "jsData")
+    public void testJavascriptFilters(final String vcfFile, final String javascriptFile, final int expectCount) {
+        final File vcfInput = new File(testDir, vcfFile);
+        final File jsInput = new File(testDir, javascriptFile);
+        final VCFFileReader vcfReader = new VCFFileReader(vcfInput, false);
+        final JavascriptVariantFilter filter;
+        try {
+            filter = new JavascriptVariantFilter(jsInput, vcfReader.getFileHeader());
+        } catch (IOException err) {
+            Assert.fail("cannot read script "+jsInput, err);
+            vcfReader.close();
+            return;
+        }
+        final FilteringVariantContextIterator iter = new FilteringVariantContextIterator(vcfReader.iterator(), filter);
+        int count = 0;
+        while (iter.hasNext()) {
+            iter.next();
+            ++count;
+        }
+        iter.close();
+        vcfReader.close();
+        Assert.assertEquals(count, expectCount, "Expected number of variants " + expectCount + " but got " + count);
+    }
+}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/PassingVariantFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/PassingVariantFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/filter/PassingVariantFilterTest.java
rename to src/test/java/htsjdk/variant/variantcontext/filter/PassingVariantFilterTest.java
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/SnpFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/SnpFilterTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/filter/SnpFilterTest.java
rename to src/test/java/htsjdk/variant/variantcontext/filter/SnpFilterTest.java
diff --git a/src/test/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java b/src/test/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java
new file mode 100644
index 0000000..2fd1520
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java
@@ -0,0 +1,66 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2014 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.variant.variantcontext.writer;
+
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.CloseableTribbleIterator;
+import htsjdk.tribble.FeatureReader;
+import htsjdk.tribble.index.tabix.TabixIndex;
+import htsjdk.tribble.util.TabixUtils;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.vcf.VCF3Codec;
+import htsjdk.variant.vcf.VCFHeader;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.EnumSet;
+
+public class TabixOnTheFlyIndexCreationTest {
+    private static final File SMALL_VCF = new File("src/test/resources/htsjdk/tribble/tabix/trioDup.vcf.gz");
+    @Test
+    public void simpleTest() throws Exception {
+        final VCF3Codec codec = new VCF3Codec();
+        final FeatureReader<VariantContext> reader = AbstractFeatureReader.getFeatureReader(SMALL_VCF.getAbsolutePath(), codec, false);
+        final VCFHeader headerFromFile = (VCFHeader)reader.getHeader();
+        final File vcf = File.createTempFile("TabixOnTheFlyIndexCreationTest.", ".vcf.gz");
+        final File tabix = new File(vcf.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION);
+        vcf.deleteOnExit();
+        tabix.deleteOnExit();
+        final VariantContextWriter vcfWriter = new VariantContextWriterBuilder()
+                .setOutputFile(vcf)
+                .setReferenceDictionary(headerFromFile.getSequenceDictionary())
+                .setOptions(EnumSet.of(Options.INDEX_ON_THE_FLY, Options.ALLOW_MISSING_FIELDS_IN_HEADER))
+                .build();
+        vcfWriter.writeHeader(headerFromFile);
+        final CloseableTribbleIterator<VariantContext> it = reader.iterator();
+        while (it.hasNext()) {
+            vcfWriter.add(it.next());
+        }
+        it.close();
+        vcfWriter.close();
+
+        // Hard to validate, so just confirm that index can be read.
+        new TabixIndex(tabix);
+    }
+}
diff --git a/src/test/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java
new file mode 100644
index 0000000..eaf2f95
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java
@@ -0,0 +1,280 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext.writer;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.TestUtil;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.FeatureReader;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.util.TabixUtils;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.Genotype;
+import htsjdk.variant.variantcontext.GenotypeBuilder;
+import htsjdk.variant.variantcontext.GenotypesContext;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFFileReader;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLine;
+import htsjdk.variant.vcf.VCFHeaderVersion;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+/**
+ * @author aaron
+ *         <p/>
+ *         Class VCFWriterUnitTest
+ *         <p/>
+ *         This class tests out the ability of the VCF writer to correctly write VCF files
+ */
+public class VCFWriterUnitTest extends VariantBaseTest {
+    private Set<VCFHeaderLine> metaData;
+    private Set<String> additionalColumns;
+    private File tempDir;
+
+    @BeforeClass
+    private void createTemporaryDirectory() {
+        tempDir = TestUtil.getTempDirectory("VCFWriter", "StaleIndex");
+    }
+
+    @AfterClass
+    private void deleteTemporaryDirectory() {
+        for (File f : tempDir.listFiles()) {
+            f.delete();
+        }
+        tempDir.delete();
+    }
+
+    /** test, using the writer and reader, that we can output and input a VCF file without problems */
+    @Test(dataProvider = "vcfExtensionsDataProvider")
+    public void testBasicWriteAndRead(final String extension) throws IOException {
+        final File fakeVCFFile = File.createTempFile("testBasicWriteAndRead.", extension);
+        fakeVCFFile.deleteOnExit();
+        if (".vcf.gz".equals(extension)) {
+            new File(fakeVCFFile.getAbsolutePath() + ".tbi").deleteOnExit();
+        } else {
+            Tribble.indexFile(fakeVCFFile).deleteOnExit();
+        }
+        metaData = new HashSet<VCFHeaderLine>();
+        additionalColumns = new HashSet<String>();
+        final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
+        final VCFHeader header = createFakeHeader(metaData, additionalColumns, sequenceDict);
+        final VariantContextWriter writer = new VariantContextWriterBuilder()
+                .setOutputFile(fakeVCFFile)
+                .setReferenceDictionary(sequenceDict)
+                .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
+                .build();
+        writer.writeHeader(header);
+        writer.add(createVC(header));
+        writer.add(createVC(header));
+        writer.close();
+        final VCFCodec codec = new VCFCodec();
+        final FeatureReader<VariantContext> reader = AbstractFeatureReader.getFeatureReader(fakeVCFFile.getAbsolutePath(), codec, false);
+        final VCFHeader headerFromFile = (VCFHeader)reader.getHeader();
+
+        int counter = 0;
+
+        // validate what we're reading in
+        validateHeader(headerFromFile, sequenceDict);
+        
+        try {
+            final Iterator<VariantContext> it = reader.iterator();
+            while(it.hasNext()) {
+                it.next();
+                counter++;
+            }
+            Assert.assertEquals(counter, 2);
+        }
+        catch (final IOException e ) {
+            throw new RuntimeException(e.getMessage());
+        }
+
+    }
+
+    /**
+     * create a fake header of known quantity
+     * @param metaData           the header lines
+     * @param additionalColumns  the additional column names
+     * @return a fake VCF header
+     */
+    public static VCFHeader createFakeHeader(final Set<VCFHeaderLine> metaData, final Set<String> additionalColumns,
+                                             final SAMSequenceDictionary sequenceDict) {
+        metaData.add(new VCFHeaderLine(VCFHeaderVersion.VCF4_0.getFormatString(), VCFHeaderVersion.VCF4_0.getVersionString()));
+        metaData.add(new VCFHeaderLine("two", "2"));
+        additionalColumns.add("extra1");
+        additionalColumns.add("extra2");
+        final VCFHeader ret = new VCFHeader(metaData, additionalColumns);
+        ret.setSequenceDictionary(sequenceDict);
+        return ret;
+    }
+
+    /**
+     * create a fake VCF record
+     * @param header the VCF header
+     * @return a VCFRecord
+     */
+    private VariantContext createVC(final VCFHeader header) {
+
+       return createVCGeneral(header,"1",1);
+    }
+
+    private VariantContext createVCGeneral(final VCFHeader header, final String chrom, final int position) {
+        final List<Allele> alleles = new ArrayList<Allele>();
+        final Map<String, Object> attributes = new HashMap<String,Object>();
+        final GenotypesContext genotypes = GenotypesContext.create(header.getGenotypeSamples().size());
+
+        alleles.add(Allele.create("A",true));
+        alleles.add(Allele.create("ACC",false));
+
+        attributes.put("DP","50");
+        for (final String name : header.getGenotypeSamples()) {
+            final Genotype gt = new GenotypeBuilder(name,alleles.subList(1,2)).GQ(0).attribute("BB", "1").phased(true).make();
+            genotypes.add(gt);
+        }
+        return new VariantContextBuilder("RANDOM", chrom, position, position, alleles)
+                .genotypes(genotypes).attributes(attributes).make();
+    }
+
+
+    /**
+     * validate a VCF header
+     * @param header the header to validate
+     */
+    public void validateHeader(final VCFHeader header, final SAMSequenceDictionary sequenceDictionary) {
+        // check the fields
+        int index = 0;
+        for (final VCFHeader.HEADER_FIELDS field : header.getHeaderFields()) {
+            Assert.assertEquals(VCFHeader.HEADER_FIELDS.values()[index], field);
+            index++;
+        }
+        Assert.assertEquals(header.getMetaDataInSortedOrder().size(), metaData.size() + sequenceDictionary.size());
+        index = 0;
+        for (final String key : header.getGenotypeSamples()) {
+            Assert.assertTrue(additionalColumns.contains(key));
+            index++;
+        }
+        Assert.assertEquals(index, additionalColumns.size());
+    }
+
+    @Test(dataProvider = "vcfExtensionsDataProvider")
+    public void TestWritingLargeVCF(final String extension) throws FileNotFoundException, InterruptedException {
+
+        final Set<VCFHeaderLine> metaData = new HashSet<VCFHeaderLine>();
+        final Set<String> Columns = new HashSet<String>();
+        for (int i = 0; i < 123; i++) {
+
+            Columns.add(String.format("SAMPLE_%d", i));
+        }
+
+        final SAMSequenceDictionary dict = createArtificialSequenceDictionary();
+        final VCFHeader header = createFakeHeader(metaData,Columns, dict);
+
+        final File vcf = new File(tempDir, "test" + extension);
+        final String indexExtension;
+        if (extension.equals(".vcf.gz")) {
+            indexExtension = TabixUtils.STANDARD_INDEX_EXTENSION;
+        } else {
+            indexExtension = Tribble.STANDARD_INDEX_EXTENSION;
+        }
+        final File vcfIndex = new File(vcf.getAbsolutePath() + indexExtension);
+        vcfIndex.deleteOnExit();
+
+        for(int count=1;count<2; count++){
+            final VariantContextWriter writer =  new VariantContextWriterBuilder()
+                    .setOutputFile(vcf)
+                    .setReferenceDictionary(dict)
+                    .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
+                    .build();
+            writer.writeHeader(header);
+
+            for (int i = 1; i < 17 ; i++) { // write 17 chromosomes
+                for (int j = 1; j < 10; j++) { //10 records each
+                    writer.add(createVCGeneral(header, String.format("%d", i), j * 100));
+                }
+            }
+            writer.close();
+
+            Assert.assertTrue(vcf.lastModified() <= vcfIndex.lastModified());
+        }
+    }
+
+    @DataProvider(name = "vcfExtensionsDataProvider")
+    public Object[][]vcfExtensionsDataProvider() {
+        return new Object[][] {
+                // TODO: BCF doesn't work because header is not properly constructed.
+                // {".bcf"},
+                {".vcf"},
+                {".vcf.gz"}
+        };
+    }
+
+
+    /**
+     * A test to ensure that if we add a line to a VCFHeader it will persist through
+     * a round-trip write/read cycle via VariantContextWriter/VCFFileReader
+     */
+    @Test
+    public void testModifyHeader() {
+        final File originalVCF = new File("src/test/resources/htsjdk/variant/HiSeq.10000.vcf");
+        final VCFFileReader reader = new VCFFileReader(originalVCF, false);
+        final VCFHeader header = reader.getFileHeader();
+        reader.close();
+
+        header.addMetaDataLine(new VCFHeaderLine("FOOBAR", "foovalue"));
+
+        final File outputVCF = createTempFile("testModifyHeader", ".vcf");
+        final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(outputVCF).setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER)).build();
+        writer.writeHeader(header);
+        writer.close();
+
+        final VCFFileReader roundtripReader = new VCFFileReader(outputVCF, false);
+        final VCFHeader roundtripHeader = roundtripReader.getFileHeader();
+        roundtripReader.close();
+
+        Assert.assertNotNull(roundtripHeader.getOtherHeaderLine("FOOBAR"), "Could not find FOOBAR header line after a write/read cycle");
+        Assert.assertEquals(roundtripHeader.getOtherHeaderLine("FOOBAR").getValue(), "foovalue", "Wrong value for FOOBAR header line after a write/read cycle");
+    }
+}
+
diff --git a/src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java
new file mode 100644
index 0000000..179c4cb
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java
@@ -0,0 +1,399 @@
+/*
+* Copyright (c) 2014 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext.writer;
+
+import htsjdk.samtools.Defaults;
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.util.TabixUtils;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder.OutputType;
+import org.testng.Assert;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.List;
+
+public class VariantContextWriterBuilderUnitTest extends VariantBaseTest {
+	private static final String TEST_BASENAME = "htsjdk-test.VariantContextWriterBuilderUnitTest";
+    private SAMSequenceDictionary dictionary;
+
+    private File vcf;
+    private File vcfIdx;
+    private File vcfMD5;
+    private File bcf;
+    private File bcfIdx;
+    private File unknown;
+
+    private List<File> blockCompressedVCFs;
+    private List<File> blockCompressedIndices;
+
+    @BeforeSuite
+    public void before() throws IOException {
+        dictionary = createArtificialSequenceDictionary();
+        vcf = File.createTempFile(TEST_BASENAME, ".vcf");
+        vcf.deleteOnExit();
+        vcfIdx = Tribble.indexFile(vcf);
+        vcfIdx.deleteOnExit();
+        vcfMD5 = new File(vcf.getAbsolutePath() + ".md5");
+        vcfMD5.deleteOnExit();
+        bcf = File.createTempFile(TEST_BASENAME, ".bcf");
+        bcf.deleteOnExit();
+        bcfIdx = Tribble.indexFile(bcf);
+        bcfIdx.deleteOnExit();
+        unknown = File.createTempFile(TEST_BASENAME, ".unknown");
+        unknown.deleteOnExit();
+
+        blockCompressedVCFs = new ArrayList<File>();
+        blockCompressedIndices = new ArrayList<File>();
+        for (final String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS) {
+            final File blockCompressed = File.createTempFile(TEST_BASENAME, ".vcf" + extension);
+            blockCompressed.deleteOnExit();
+            blockCompressedVCFs.add(blockCompressed);
+
+            final File index = new File(blockCompressed.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION);
+            index.deleteOnExit();
+            blockCompressedIndices.add(index);
+        }
+    }
+
+    @Test
+    public void testSetOutputFile() throws IOException {
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary);
+
+        VariantContextWriter writer = builder.setOutputFile(vcf.getAbsolutePath()).build();
+        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile VCF String");
+        Assert.assertFalse(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFile VCF String was compressed");
+
+        writer = builder.setOutputFile(vcf).build();
+        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile VCF File");
+        Assert.assertFalse(((VCFWriter)writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFile VCF File was compressed");
+
+        for (final String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS) {
+            final File file = File.createTempFile(TEST_BASENAME + ".setoutput", extension);
+            file.deleteOnExit();
+            final String filename = file.getAbsolutePath();
+
+            writer = builder.setOutputFile(filename).build();
+            Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile " + extension + " String");
+            Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFile " + extension + " String was not compressed");
+
+            writer = builder.setOutputFile(file).build();
+            Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile " + extension + " File");
+            Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFile " + extension + " File was not compressed");
+        }
+
+        writer = builder.setOutputFile(bcf).build();
+        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFile BCF String");
+
+        writer = builder.setOutputFile(bcf.getAbsolutePath()).build();
+        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFile BCF File");
+    }
+
+    @Test
+    public void testDetermineOutputType() {
+        Assert.assertEquals(OutputType.VCF, VariantContextWriterBuilder.determineOutputTypeFromFile(this.vcf));
+        Assert.assertEquals(OutputType.BCF, VariantContextWriterBuilder.determineOutputTypeFromFile(this.bcf));
+        Assert.assertEquals(OutputType.VCF_STREAM, VariantContextWriterBuilder.determineOutputTypeFromFile(new File("/dev/stdout")));
+        for (final File f: this.blockCompressedVCFs) {
+            Assert.assertEquals(OutputType.BLOCK_COMPRESSED_VCF, VariantContextWriterBuilder.determineOutputTypeFromFile(f));
+        }
+
+        // Test symlinking
+        try {
+            final Path link = Files.createTempFile("foo.", ".tmp");
+            Files.deleteIfExists(link);
+            Files.createSymbolicLink(link, this.vcf.toPath());
+            link.toFile().deleteOnExit();
+            Assert.assertEquals(OutputType.VCF, VariantContextWriterBuilder.determineOutputTypeFromFile(link.toFile()));
+            link.toFile().delete();
+        }
+        catch (final IOException ioe) {
+            throw new RuntimeIOException(ioe);
+        }
+    }
+
+    @Test
+    public void testSetOutputFileType() {
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputFile(unknown);
+
+        VariantContextWriter writer = builder.setOutputFileType(VariantContextWriterBuilder.OutputType.VCF).build();
+        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFileType VCF");
+        Assert.assertFalse(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFileType VCF was compressed");
+
+        writer = builder.setOption(Options.FORCE_BCF).build();
+        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFileType FORCE_BCF set -> expected BCF, was VCF");
+
+        // test that FORCE_BCF remains in effect, overriding the explicit setting of VCF
+        writer = builder.setOutputFileType(VariantContextWriterBuilder.OutputType.VCF).build();
+        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFileType FORCE_BCF set 2 -> expected BCF, was VCF");
+
+        writer = builder.unsetOption(Options.FORCE_BCF).build();
+        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFileType FORCE_BCF unset -> expected VCF, was BCF");
+        Assert.assertFalse(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFileType FORCE_BCF unset was compressed");
+
+        writer = builder.setOutputFileType(VariantContextWriterBuilder.OutputType.BLOCK_COMPRESSED_VCF).build();
+        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile BLOCK_COMPRESSED_VCF");
+        Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFileType BLOCK_COMPRESSED_VCF was not compressed");
+
+        writer = builder.setOutputFileType(VariantContextWriterBuilder.OutputType.BCF).build();
+        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFileType BCF");
+    }
+
+    @Test
+    public void testSetOutputStream() {
+        final OutputStream stream = new ByteArrayOutputStream();
+
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .unsetOption(Options.INDEX_ON_THE_FLY)
+                .setOutputStream(stream);
+
+        VariantContextWriter writer = builder.build();
+        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputStream default");
+
+        writer = builder.setOption(Options.FORCE_BCF).build();
+        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputStream FORCE_BCF set -> expected BCF stream, was VCF stream");
+
+        // test that FORCE_BCF remains in effect, overriding the explicit setting of VCF
+        writer = builder.setOutputVCFStream(stream).build();
+        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputStream FORCE_BCF set 2 -> expected BCF stream, was VCF stream");
+
+        writer = builder.unsetOption(Options.FORCE_BCF).build();
+        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputStream FORCE_BCF unset -> expected VCF stream, was BCF stream");
+
+        writer = builder.setOutputBCFStream(stream).build();
+        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputStream BCF");
+
+        writer = builder.setOutputVCFStream(stream).build();
+        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputStream VCF");
+    }
+
+    @Test
+    public void testAsync() {
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputFile(vcf);
+
+        VariantContextWriter writer = builder.build();
+        Assert.assertEquals(writer instanceof AsyncVariantContextWriter, Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE, "testAsync default");
+
+        writer = builder.setOption(Options.USE_ASYNC_IO).build();
+        Assert.assertTrue(writer instanceof AsyncVariantContextWriter, "testAsync option=set");
+
+        writer = builder.unsetOption(Options.USE_ASYNC_IO).build();
+        Assert.assertFalse(writer instanceof AsyncVariantContextWriter, "testAsync option=unset");
+    }
+
+    @Test
+    public void testBuffering() {
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputFile(vcf)
+                .unsetOption(Options.INDEX_ON_THE_FLY);     // so the potential BufferedOutputStream is not wrapped in a PositionalOutputStream
+
+        VariantContextWriter writer = builder.build();
+        Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BufferedOutputStream, "testBuffering was not buffered by default");
+
+        writer = builder.unsetBuffering().build();
+        Assert.assertFalse(((VCFWriter) writer).getOutputStream() instanceof BufferedOutputStream, "testBuffering was buffered when unset");
+
+        writer = builder.setBuffer(8192).build();
+        Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BufferedOutputStream, "testBuffering was not buffered when set");
+    }
+
+    @Test
+    public void testMD5() {
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputFile(vcf);
+
+        VariantContextWriter writer = builder.build();
+        writer.close();
+        Assert.assertEquals(vcfMD5.exists(), Defaults.CREATE_MD5, "MD5 default setting not respected");
+
+        if (vcfMD5.exists())
+            vcfMD5.delete();
+
+        writer = builder.setCreateMD5().build();
+        writer.close();
+        Assert.assertTrue(vcfMD5.exists(), "MD5 not created when requested");
+        vcfMD5.delete();
+
+        writer = builder.unsetCreateMD5().build();
+        writer.close();
+        Assert.assertFalse(vcfMD5.exists(), "MD5 created when not requested");
+
+        writer = builder.setCreateMD5(false).build();
+        writer.close();
+        Assert.assertFalse(vcfMD5.exists(), "MD5 created when not requested via boolean parameter");
+
+        writer = builder.setCreateMD5(true).build();
+        writer.close();
+        Assert.assertTrue(vcfMD5.exists(), "MD5 not created when requested via boolean parameter");
+        vcfMD5.delete();
+
+        for (final File blockCompressed : blockCompressedVCFs) {
+            final File md5 = new File(blockCompressed + ".md5");
+            if (md5.exists())
+                md5.delete();
+            md5.deleteOnExit();
+            writer = builder.setOutputFile(blockCompressed).build();
+            writer.close();
+            Assert.assertTrue(md5.exists(), "MD5 digest not created for " + blockCompressed);
+        }
+    }
+
+    @Test
+    public void testIndexingOnTheFly() {
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOption(Options.INDEX_ON_THE_FLY);
+
+        if (vcfIdx.exists())
+            vcfIdx.delete();
+        VariantContextWriter writer = builder.setOutputFile(vcf).build();
+        writer.close();
+        Assert.assertTrue(vcfIdx.exists(), String.format("VCF index not created for %s / %s", vcf, vcfIdx));
+
+        if (bcfIdx.exists())
+            bcfIdx.delete();
+        writer = builder.setOutputFile(bcf).build();
+        writer.close();
+        Assert.assertTrue(bcfIdx.exists(), String.format("BCF index not created for %s / %s", bcf, bcfIdx));
+
+        for (int i = 0; i < blockCompressedVCFs.size(); i++) {
+            final File blockCompressed = blockCompressedVCFs.get(i);
+            final File index = blockCompressedIndices.get(i);
+            if (index.exists())
+                index.delete();
+            writer = builder.setOutputFile(blockCompressed).setReferenceDictionary(dictionary).build();
+            writer.close();
+            Assert.assertTrue(index.exists(), String.format("Block-compressed index not created for %s / %s", blockCompressed, index));
+
+            // Tabix does not require a reference dictionary.
+            // Tribble does: see tests testRefDictRequiredForVCFIndexOnTheFly / testRefDictRequiredForBCFIndexOnTheFly
+
+            index.delete();
+            writer = builder.setReferenceDictionary(null).build();
+            writer.close();
+            Assert.assertTrue(index.exists(), String.format("Block-compressed index not created for %s / %s", blockCompressed, index));
+        }
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testInvalidImplicitFileType() {
+        new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputFile("test.bam")
+                .build();
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testSetInvalidFileType() {
+        new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputFile("test.bam")
+                .setOutputFileType(VariantContextWriterBuilder.OutputType.VCF_STREAM)
+                .build();
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testInvalidSetFileTypeForStream() {
+        new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputStream(new ByteArrayOutputStream())
+                .setOutputFileType(VariantContextWriterBuilder.OutputType.VCF)
+                .build();
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testUnsupportedIndexOnTheFlyForStreaming() {
+        new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputStream(new ByteArrayOutputStream())
+                .setOption(Options.INDEX_ON_THE_FLY)
+                .build();
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testUnsupportedDefaultIndexOnTheFlyForStreaming() {
+        new VariantContextWriterBuilder()
+                .setReferenceDictionary(dictionary)
+                .setOutputStream(new ByteArrayOutputStream())
+                .build();
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testRefDictRequiredForVCFIndexOnTheFly() {
+        new VariantContextWriterBuilder()
+                .setOutputFile(vcf)
+                .setOption(Options.INDEX_ON_THE_FLY)
+                .build();
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testRefDictRequiredForBCFIndexOnTheFly() {
+        new VariantContextWriterBuilder()
+                .setOutputFile(bcf)
+                .setOption(Options.INDEX_ON_THE_FLY)
+                .build();
+    }
+
+    @Test
+    public void testClearOptions() {
+        // Verify that clearOptions doesn't have a side effect of carrying previously set options
+        // forward to subsequent builders
+        VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder();
+        vcwb.clearOptions().setOption(Options.INDEX_ON_THE_FLY);
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder().clearOptions();
+        Assert.assertTrue(builder.options.isEmpty());
+    }
+
+    @Test
+    public void testModifyOption() {
+        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder().clearOptions();
+        for (final Options option : Options.values()) {
+            Assert.assertFalse(builder.isOptionSet(option)); // shouldn't be set
+            builder.modifyOption(option, false);
+            Assert.assertFalse(builder.isOptionSet(option)); // still shouldn't be set
+            builder.modifyOption(option, true);
+            Assert.assertTrue(builder.isOptionSet(option)); // now is set
+            builder.modifyOption(option, false);
+            Assert.assertFalse(builder.isOptionSet(option)); // has been unset
+        }
+    }
+}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/writer/VariantContextWritersUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWritersUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/variantcontext/writer/VariantContextWritersUnitTest.java
rename to src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWritersUnitTest.java
diff --git a/src/test/java/htsjdk/variant/vcf/AbstractVCFCodecTest.java b/src/test/java/htsjdk/variant/vcf/AbstractVCFCodecTest.java
new file mode 100644
index 0000000..051c19b
--- /dev/null
+++ b/src/test/java/htsjdk/variant/vcf/AbstractVCFCodecTest.java
@@ -0,0 +1,53 @@
+package htsjdk.variant.vcf;
+
+import htsjdk.tribble.TribbleException;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.List;
+
+
+
+public class AbstractVCFCodecTest extends VariantBaseTest {
+	@Test
+	public void shouldPreserveSymbolicAlleleCase() {
+		VCFFileReader reader = new VCFFileReader(new File(VariantBaseTest.variantTestDataRoot + "breakpoint.vcf"), false);
+		VariantContext variant = reader.iterator().next();
+		reader.close();
+		
+		// VCF v4.1 s1.4.5
+		// Tools processing VCF files are not required to preserve case in the allele String, except for IDs, which are case sensitive.
+		Assert.assertTrue(variant.getAlternateAllele(0).getDisplayString().contains("chr12"));
+	}
+
+	@Test
+	public void TestSpanDelParseAlleles(){
+		List<Allele> list = VCF3Codec.parseAlleles("A", Allele.SPAN_DEL_STRING, 0);
+	}
+
+	@Test(expectedExceptions = TribbleException.class)
+	public void TestSpanDelParseAllelesException(){
+		List<Allele> list1 = VCF3Codec.parseAlleles(Allele.SPAN_DEL_STRING, "A", 0);
+	}
+
+	@DataProvider(name="thingsToTryToDecode")
+	public Object[][] getThingsToTryToDecode(){
+		return new Object[][] {
+				{"src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf", true},
+				{"src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf.gz", true},
+				{"src/test/resources/htsjdk/tribble/nonexistant.garbage", false},
+				{"src/test/resources/htsjdk/tribble/testIntervalList.list", false}
+		};
+	}
+
+	@Test(dataProvider = "thingsToTryToDecode")
+	public void testCanDecodeFile(String potentialInput, boolean canDecode) {
+		Assert.assertEquals(AbstractVCFCodec.canDecodeFile(potentialInput, VCFCodec.VCF4_MAGIC_HEADER), canDecode);
+	}
+
+}
diff --git a/src/tests/java/htsjdk/variant/vcf/IndexFactoryUnitTest.java b/src/test/java/htsjdk/variant/vcf/IndexFactoryUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/vcf/IndexFactoryUnitTest.java
rename to src/test/java/htsjdk/variant/vcf/IndexFactoryUnitTest.java
diff --git a/src/tests/java/htsjdk/variant/vcf/VCFCompoundHeaderLineUnitTest.java b/src/test/java/htsjdk/variant/vcf/VCFCompoundHeaderLineUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/vcf/VCFCompoundHeaderLineUnitTest.java
rename to src/test/java/htsjdk/variant/vcf/VCFCompoundHeaderLineUnitTest.java
diff --git a/src/tests/java/htsjdk/variant/vcf/VCFEncoderTest.java b/src/test/java/htsjdk/variant/vcf/VCFEncoderTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/vcf/VCFEncoderTest.java
rename to src/test/java/htsjdk/variant/vcf/VCFEncoderTest.java
diff --git a/src/test/java/htsjdk/variant/vcf/VCFHeaderLineTranslatorUnitTest.java b/src/test/java/htsjdk/variant/vcf/VCFHeaderLineTranslatorUnitTest.java
new file mode 100644
index 0000000..88a8cce
--- /dev/null
+++ b/src/test/java/htsjdk/variant/vcf/VCFHeaderLineTranslatorUnitTest.java
@@ -0,0 +1,62 @@
+package htsjdk.variant.vcf;
+
+import htsjdk.tribble.TribbleException;
+import htsjdk.variant.VariantBaseTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.util.Map;
+
+public class VCFHeaderLineTranslatorUnitTest extends VariantBaseTest {
+
+    @Test
+    public void testParseVCF4HeaderLine() {
+        // the following tests exercise the escaping of quotes and backslashes in VCF header lines
+
+        // test a case with no escapes
+        final Map<String,String> values = VCFHeaderLineTranslator.parseLine(VCFHeaderVersion.VCF4_2, "<ID=SnpCluster,Description=\"SNPs found in clusters\">", null);
+        Assert.assertEquals(values.size(), 2);
+        Assert.assertEquals(values.get("ID"), "SnpCluster");
+        Assert.assertEquals(values.get("Description"), "SNPs found in clusters");
+
+        // test escaped quotes
+        final Map<String,String> values2 = VCFHeaderLineTranslator.parseLine(VCFHeaderVersion.VCF4_2, "<ID=ANNOTATION,Description=\"ANNOTATION != \\\"NA\\\" || ANNOTATION <= 0.01\">", null);
+        Assert.assertEquals(values2.size(), 2);
+        Assert.assertEquals(values2.get("ID"), "ANNOTATION");
+        Assert.assertEquals(values2.get("Description"), "ANNOTATION != \"NA\" || ANNOTATION <= 0.01");
+
+        // test escaped quotes and an escaped backslash
+        final Map<String,String> values3 = VCFHeaderLineTranslator.parseLine(VCFHeaderVersion.VCF4_2, "<ID=ANNOTATION,Description=\"ANNOTATION \\\\= \\\"NA\\\" || ANNOTATION <= 0.01\">", null);
+        Assert.assertEquals(values3.size(), 2);
+        Assert.assertEquals(values3.get("ID"), "ANNOTATION");
+        Assert.assertEquals(values3.get("Description"), "ANNOTATION \\= \"NA\" || ANNOTATION <= 0.01");
+
+        // test a header line with two value tags, one with an escaped backslash and two escaped quotes, one with an escaped quote
+        final Map<String,String> values4 = VCFHeaderLineTranslator.parseLine(VCFHeaderVersion.VCF4_2, "<ID=ANNOTATION,Description=\"ANNOTATION \\\\= \\\"NA\\\" || ANNOTATION <= 0.01\", Description2=\"foo\\\"bar\">", null);
+        Assert.assertEquals(values4.size(), 3);
+        Assert.assertEquals(values4.get("ID"), "ANNOTATION");
+        Assert.assertEquals(values4.get("Description"), "ANNOTATION \\= \"NA\" || ANNOTATION <= 0.01");
+        Assert.assertEquals(values4.get("Description2"), "foo\"bar");
+
+        // test a line with a backslash that appears before something other than a quote or backslash
+        final Map<String,String> values5 = VCFHeaderLineTranslator.parseLine(VCFHeaderVersion.VCF4_2, "<ID=ANNOTATION,Description=\"ANNOTATION \\n with a newline in it\">", null);
+        Assert.assertEquals(values5.size(), 2);
+        Assert.assertEquals(values5.get("ID"), "ANNOTATION");
+        Assert.assertEquals(values5.get("Description"), "ANNOTATION \\n with a newline in it");
+
+        // test with an unclosed quote
+        try {
+            final Map<String, String> values6 = VCFHeaderLineTranslator.parseLine(VCFHeaderVersion.VCF4_2, "<ID=ANNOTATION,Description=\"ANNOTATION \\n with a newline in it>", null);
+            Assert.fail("Should have thrown a TribbleException for having an unclosed quote in the description line");
+        } catch (TribbleException.InvalidHeader e) {
+        }
+
+        // test with an escaped quote at the end
+        try {
+            final Map<String, String> values7 = VCFHeaderLineTranslator.parseLine(VCFHeaderVersion.VCF4_2, "<ID=ANNOTATION,Description=\"ANNOTATION \\n with a newline in it\\\">", null);
+            Assert.fail("Should have thrown a TribbleException for having an unclosed quote in the description line");
+        } catch (TribbleException.InvalidHeader e) {
+        }
+
+    }
+}
diff --git a/src/tests/java/htsjdk/variant/vcf/VCFHeaderLineUnitTest.java b/src/test/java/htsjdk/variant/vcf/VCFHeaderLineUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/vcf/VCFHeaderLineUnitTest.java
rename to src/test/java/htsjdk/variant/vcf/VCFHeaderLineUnitTest.java
diff --git a/src/test/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java b/src/test/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java
new file mode 100644
index 0000000..af875fc
--- /dev/null
+++ b/src/test/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java
@@ -0,0 +1,561 @@
+/*
+* Copyright (c) 2012 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.vcf;
+
+import htsjdk.samtools.util.CloseableIterator;
+import htsjdk.samtools.util.TestUtil;
+import htsjdk.tribble.TribbleException;
+import htsjdk.tribble.readers.*;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.writer.Options;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintWriter;
+import java.io.StringReader;
+import java.math.BigInteger;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Created by IntelliJ IDEA.
+ * User: aaron
+ * Date: Jun 30, 2010
+ * Time: 3:32:08 PM
+ * To change this template use File | Settings | File Templates.
+ */
+public class VCFHeaderUnitTest extends VariantBaseTest {
+
+    private File tempDir;
+
+    private VCFHeader createHeader(String headerStr) {
+        VCFCodec codec = new VCFCodec();
+        VCFHeader header = (VCFHeader) codec.readActualHeader(new LineIteratorImpl(new SynchronousLineReader(
+                new StringReader(headerStr))));
+        Assert.assertEquals(header.getMetaDataInInputOrder().size(), VCF4headerStringCount);
+        return header;
+    }
+
+    @BeforeClass
+    private void createTemporaryDirectory() {
+        tempDir = TestUtil.getTempDirectory("VCFHeader", "VCFHeaderTest");
+    }
+
+    @AfterClass
+    private void deleteTemporaryDirectory() {
+        for (File f : tempDir.listFiles()) {
+            f.delete();
+        }
+        tempDir.delete();
+    }
+
+    @Test
+    public void testVCF4ToVCF4() {
+        VCFHeader header = createHeader(VCF4headerStrings);
+        checkMD5ofHeaderFile(header, "91c33dadb92e01ea349bd4bcdd02d6be");
+    }
+
+    @Test
+    public void testVCF4ToVCF4_alternate() {
+        VCFHeader header = createHeader(VCF4headerStrings_with_negativeOne);
+        checkMD5ofHeaderFile(header, "39318d9713897d55be5ee32a2119853f");
+    }
+
+    @Test
+    public void testVCFHeaderSampleRenamingSingleSampleVCF() throws Exception {
+        final VCFCodec codec = new VCFCodec();
+        codec.setRemappedSampleName("FOOSAMPLE");
+        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "HiSeq.10000.vcf")));
+        final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
+
+        Assert.assertEquals(header.getNGenotypeSamples(), 1, "Wrong number of samples in remapped header");
+        Assert.assertEquals(header.getGenotypeSamples().get(0), "FOOSAMPLE", "Sample name in remapped header has incorrect value");
+
+        int recordCount = 0;
+        while (vcfIterator.hasNext() && recordCount < 10) {
+            recordCount++;
+            final VariantContext vcfRecord = codec.decode(vcfIterator.next());
+
+            Assert.assertEquals(vcfRecord.getSampleNames().size(), 1, "Wrong number of samples in vcf record after remapping");
+            Assert.assertEquals(vcfRecord.getSampleNames().iterator().next(), "FOOSAMPLE", "Wrong sample in vcf record after remapping");
+        }
+    }
+
+    @Test
+    public void testVCFHeaderDictionaryMerging() {
+        VCFHeader headerOne = new VCFFileReader(new File(variantTestDataRoot + "dbsnp_135.b37.1000.vcf"), false).getFileHeader();
+        VCFHeader headerTwo = new VCFHeader(headerOne); // deep copy
+        final List<String> sampleList = new ArrayList<String>();
+        sampleList.addAll(headerOne.getSampleNamesInOrder());
+
+        // Check that the two dictionaries start out the same
+        headerOne.getSequenceDictionary().assertSameDictionary(headerTwo.getSequenceDictionary());
+
+        // Run the merge command
+        final VCFHeader mergedHeader = new VCFHeader(VCFUtils.smartMergeHeaders(Arrays.asList(headerOne, headerTwo), false), sampleList);
+
+        // Check that the mergedHeader's sequence dictionary matches the first two
+        mergedHeader.getSequenceDictionary().assertSameDictionary(headerOne.getSequenceDictionary());
+    }
+
+    @Test(expectedExceptions = TribbleException.class)
+    public void testVCFHeaderSampleRenamingMultiSampleVCF() throws Exception {
+        final VCFCodec codec = new VCFCodec();
+        codec.setRemappedSampleName("FOOSAMPLE");
+        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "ex2.vcf")));
+        final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
+    }
+
+    @Test(expectedExceptions = TribbleException.class)
+    public void testVCFHeaderSampleRenamingSitesOnlyVCF() throws Exception {
+        final VCFCodec codec = new VCFCodec();
+        codec.setRemappedSampleName("FOOSAMPLE");
+        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "dbsnp_135.b37.1000.vcf")));
+        final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
+    }
+
+    private VCFHeader getHiSeqVCFHeader() {
+        final File vcf = new File("src/test/resources/htsjdk/variant/HiSeq.10000.vcf");
+        final VCFFileReader reader = new VCFFileReader(vcf, false);
+        final VCFHeader header = reader.getFileHeader();
+        reader.close();
+        return header;
+    }
+
+    @Test
+    public void testVCFHeaderAddInfoLine() {
+        final VCFHeader header = getHiSeqVCFHeader();
+        final VCFInfoHeaderLine infoLine = new VCFInfoHeaderLine("TestInfoLine", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "test info line");
+        header.addMetaDataLine(infoLine);
+
+        Assert.assertTrue(header.getInfoHeaderLines().contains(infoLine), "TestInfoLine not found in info header lines");
+        Assert.assertTrue(header.getMetaDataInInputOrder().contains(infoLine), "TestInfoLine not found in set of all header lines");
+        Assert.assertNotNull(header.getInfoHeaderLine("TestInfoLine"), "Lookup for TestInfoLine by key failed");
+
+        Assert.assertFalse(header.getFormatHeaderLines().contains(infoLine), "TestInfoLine present in format header lines");
+        Assert.assertFalse(header.getFilterLines().contains(infoLine), "TestInfoLine present in filter header lines");
+        Assert.assertFalse(header.getContigLines().contains(infoLine), "TestInfoLine present in contig header lines");
+        Assert.assertFalse(header.getOtherHeaderLines().contains(infoLine), "TestInfoLine present in other header lines");
+    }
+
+    @Test
+    public void testVCFHeaderAddFormatLine() {
+        final VCFHeader header = getHiSeqVCFHeader();
+        final VCFFormatHeaderLine formatLine = new VCFFormatHeaderLine("TestFormatLine", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "test format line");
+        header.addMetaDataLine(formatLine);
+
+        Assert.assertTrue(header.getFormatHeaderLines().contains(formatLine), "TestFormatLine not found in format header lines");
+        Assert.assertTrue(header.getMetaDataInInputOrder().contains(formatLine), "TestFormatLine not found in set of all header lines");
+        Assert.assertNotNull(header.getFormatHeaderLine("TestFormatLine"), "Lookup for TestFormatLine by key failed");
+
+        Assert.assertFalse(header.getInfoHeaderLines().contains(formatLine), "TestFormatLine present in info header lines");
+        Assert.assertFalse(header.getFilterLines().contains(formatLine), "TestFormatLine present in filter header lines");
+        Assert.assertFalse(header.getContigLines().contains(formatLine), "TestFormatLine present in contig header lines");
+        Assert.assertFalse(header.getOtherHeaderLines().contains(formatLine), "TestFormatLine present in other header lines");
+    }
+
+    @Test
+    public void testVCFHeaderAddFilterLine() {
+        final VCFHeader header = getHiSeqVCFHeader();
+        final VCFFilterHeaderLine filterLine = new VCFFilterHeaderLine("TestFilterLine");
+        header.addMetaDataLine(filterLine);
+
+        Assert.assertTrue(header.getFilterLines().contains(filterLine), "TestFilterLine not found in filter header lines");
+        Assert.assertTrue(header.getMetaDataInInputOrder().contains(filterLine), "TestFilterLine not found in set of all header lines");
+        Assert.assertNotNull(header.getFilterHeaderLine("TestFilterLine"), "Lookup for TestFilterLine by key failed");
+
+        Assert.assertFalse(header.getInfoHeaderLines().contains(filterLine), "TestFilterLine present in info header lines");
+        Assert.assertFalse(header.getFormatHeaderLines().contains(filterLine), "TestFilterLine present in format header lines");
+        Assert.assertFalse(header.getContigLines().contains(filterLine), "TestFilterLine present in contig header lines");
+        Assert.assertFalse(header.getOtherHeaderLines().contains(filterLine), "TestFilterLine present in other header lines");
+    }
+
+    @Test
+    public void testVCFHeaderAddContigLine() {
+        final VCFHeader header = getHiSeqVCFHeader();
+        final VCFContigHeaderLine contigLine = new VCFContigHeaderLine("<ID=chr1,length=1234567890,assembly=FAKE,md5=f126cdf8a6e0c7f379d618ff66beb2da,species=\"Homo sapiens\">", VCFHeaderVersion.VCF4_0, "chr1", 0);
+        header.addMetaDataLine(contigLine);
+
+        Assert.assertTrue(header.getContigLines().contains(contigLine), "Test contig line not found in contig header lines");
+        Assert.assertTrue(header.getMetaDataInInputOrder().contains(contigLine), "Test contig line not found in set of all header lines");
+
+        Assert.assertFalse(header.getInfoHeaderLines().contains(contigLine), "Test contig line present in info header lines");
+        Assert.assertFalse(header.getFormatHeaderLines().contains(contigLine), "Test contig line present in format header lines");
+        Assert.assertFalse(header.getFilterLines().contains(contigLine), "Test contig line present in filter header lines");
+        Assert.assertFalse(header.getOtherHeaderLines().contains(contigLine), "Test contig line present in other header lines");
+    }
+
+    @Test
+    public void testVCFHeaderAddOtherLine() {
+        final VCFHeader header = getHiSeqVCFHeader();
+        final VCFHeaderLine otherLine = new VCFHeaderLine("TestOtherLine", "val");
+        header.addMetaDataLine(otherLine);
+
+        Assert.assertTrue(header.getOtherHeaderLines().contains(otherLine), "TestOtherLine not found in other header lines");
+        Assert.assertTrue(header.getMetaDataInInputOrder().contains(otherLine), "TestOtherLine not found in set of all header lines");
+        Assert.assertNotNull(header.getOtherHeaderLine("TestOtherLine"), "Lookup for TestOtherLine by key failed");
+
+        Assert.assertFalse(header.getInfoHeaderLines().contains(otherLine), "TestOtherLine present in info header lines");
+        Assert.assertFalse(header.getFormatHeaderLines().contains(otherLine), "TestOtherLine present in format header lines");
+        Assert.assertFalse(header.getContigLines().contains(otherLine), "TestOtherLine present in contig header lines");
+        Assert.assertFalse(header.getFilterLines().contains(otherLine), "TestOtherLine present in filter header lines");
+    }
+
+    @Test
+    public void testVCFHeaderAddMetaDataLineDoesNotDuplicateContigs() {
+        File input = new File("src/test/resources/htsjdk/variant/ex2.vcf");
+
+        VCFFileReader reader = new VCFFileReader(input, false);
+        VCFHeader header = reader.getFileHeader();
+
+        final int numContigLinesBefore = header.getContigLines().size();
+
+        VCFInfoHeaderLine newInfoField = new VCFInfoHeaderLine("test", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "test info field");
+        header.addMetaDataLine(newInfoField);
+
+        // getting the sequence dictionary was failing due to duplicating contigs in issue #214,
+        // we expect this to not throw an exception
+        header.getSequenceDictionary();
+
+        final int numContigLinesAfter = header.getContigLines().size();
+        // assert that we have the same number of contig lines before and after
+        Assert.assertEquals(numContigLinesBefore, numContigLinesAfter);
+    }
+
+    @Test
+    public void testVCFHeaderAddDuplicateContigLine() {
+        File input = new File("src/test/resources/htsjdk/variant/ex2.vcf");
+
+        VCFFileReader reader = new VCFFileReader(input, false);
+        VCFHeader header = reader.getFileHeader();
+
+
+        final int numContigLinesBefore = header.getContigLines().size();
+        // try to readd the first contig line
+        header.addMetaDataLine(header.getContigLines().get(0));
+        final int numContigLinesAfter = header.getContigLines().size();
+
+        // assert that we have the same number of contig lines before and after
+        Assert.assertEquals(numContigLinesBefore, numContigLinesAfter);
+    }
+
+    @Test
+    public void testVCFHeaderAddDuplicateHeaderLine() {
+        File input = new File("src/test/resources/htsjdk/variant/ex2.vcf");
+
+        VCFFileReader reader = new VCFFileReader(input, false);
+        VCFHeader header = reader.getFileHeader();
+
+        VCFHeaderLine newHeaderLine = new VCFHeaderLine("key", "value");
+        // add this new header line
+        header.addMetaDataLine(newHeaderLine);
+
+        final int numHeaderLinesBefore = header.getOtherHeaderLines().size();
+        // readd the same header line
+        header.addMetaDataLine(newHeaderLine);
+        final int numHeaderLinesAfter = header.getOtherHeaderLines().size();
+
+        // assert that we have the same number of other header lines before and after
+        Assert.assertEquals(numHeaderLinesBefore, numHeaderLinesAfter);
+    }
+
+    @Test
+    public void testVCFHeaderSerialization() throws Exception {
+        final VCFFileReader reader = new VCFFileReader(new File("src/test/resources/htsjdk/variant/HiSeq.10000.vcf"), false);
+        final VCFHeader originalHeader = reader.getFileHeader();
+        reader.close();
+
+        final VCFHeader deserializedHeader = TestUtil.serializeAndDeserialize(originalHeader);
+
+        Assert.assertEquals(deserializedHeader.getMetaDataInInputOrder(), originalHeader.getMetaDataInInputOrder(), "Header metadata does not match before/after serialization");
+        Assert.assertEquals(deserializedHeader.getContigLines(), originalHeader.getContigLines(), "Contig header lines do not match before/after serialization");
+        Assert.assertEquals(deserializedHeader.getFilterLines(), originalHeader.getFilterLines(), "Filter header lines do not match before/after serialization");
+        Assert.assertEquals(deserializedHeader.getFormatHeaderLines(), originalHeader.getFormatHeaderLines(), "Format header lines do not match before/after serialization");
+        Assert.assertEquals(deserializedHeader.getIDHeaderLines(), originalHeader.getIDHeaderLines(), "ID header lines do not match before/after serialization");
+        Assert.assertEquals(deserializedHeader.getInfoHeaderLines(), originalHeader.getInfoHeaderLines(), "Info header lines do not match before/after serialization");
+        Assert.assertEquals(deserializedHeader.getOtherHeaderLines(), originalHeader.getOtherHeaderLines(), "Other header lines do not match before/after serialization");
+        Assert.assertEquals(deserializedHeader.getGenotypeSamples(), originalHeader.getGenotypeSamples(), "Genotype samples not the same before/after serialization");
+        Assert.assertEquals(deserializedHeader.samplesWereAlreadySorted(), originalHeader.samplesWereAlreadySorted(), "Sortedness of samples not the same before/after serialization");
+        Assert.assertEquals(deserializedHeader.getSampleNamesInOrder(), originalHeader.getSampleNamesInOrder(), "Sorted list of sample names in header not the same before/after serialization");
+        Assert.assertEquals(deserializedHeader.getSampleNameToOffset(), originalHeader.getSampleNameToOffset(), "Sample name to offset map not the same before/after serialization");
+        Assert.assertEquals(deserializedHeader.toString(), originalHeader.toString(), "String representation of header not the same before/after serialization");
+    }
+
+    @Test
+    public void testVCFHeaderQuoteEscaping() throws Exception {
+        // this test ensures that the end-to-end process of quote escaping is stable when headers are
+        // read and re-written; ie that quotes that are already escaped won't be re-escaped. It does
+        // this by reading a test file, adding a header line with an unescaped quote, writing out a copy
+        // of the file, reading it back in and writing a second copy, and finally reading back the second
+        // copy and comparing it to the first.
+
+        // read an existing VCF
+        final VCFFileReader originalFileReader = new VCFFileReader(new File("src/test/resources/htsjdk/variant/VCF4HeaderTest.vcf"), false);
+        final VCFHeader originalHeader = originalFileReader.getFileHeader();
+
+        // add a header line with quotes to the header
+        final Map<String, String> attributes = new LinkedHashMap<>();
+        attributes.put("ID", "VariantFiltration");
+        attributes.put("CommandLineOptions", "filterName=[ANNOTATION] filterExpression=[ANNOTATION == \"NA\" || ANNOTATION <= 2.0]");
+        final VCFSimpleHeaderLine addedHeaderLine = new VCFSimpleHeaderLine("GATKCommandLine.Test", attributes);
+        originalHeader.addMetaDataLine(addedHeaderLine);
+
+        final VCFFilterHeaderLine originalCopyAnnotationLine1 = originalHeader.getFilterHeaderLine("ANNOTATION");
+        Assert.assertNotNull(originalCopyAnnotationLine1);
+        Assert.assertEquals(originalCopyAnnotationLine1.getGenericFieldValue("Description"), "ANNOTATION != \"NA\" || ANNOTATION <= 0.01", originalCopyAnnotationLine1.toString());
+
+        final VCFFilterHeaderLine originalCopyAnnotationLine2 = originalHeader.getFilterHeaderLine("ANNOTATION2");
+        Assert.assertNotNull(originalCopyAnnotationLine2);
+        Assert.assertEquals(originalCopyAnnotationLine2.getGenericFieldValue("Description"), "ANNOTATION with quote \" that is unmatched but escaped");
+
+        final VCFInfoHeaderLine originalEscapingQuoteInfoLine = originalHeader.getInfoHeaderLine("EscapingQuote");
+        Assert.assertNotNull(originalEscapingQuoteInfoLine);
+        Assert.assertEquals(originalEscapingQuoteInfoLine.getDescription(), "This description has an escaped \" quote in it");
+
+        final VCFInfoHeaderLine originalEscapingBackslashInfoLine = originalHeader.getInfoHeaderLine("EscapingBackslash");
+        Assert.assertNotNull(originalEscapingBackslashInfoLine);
+        Assert.assertEquals(originalEscapingBackslashInfoLine.getDescription(), "This description has an escaped \\ backslash in it");
+
+        final VCFInfoHeaderLine originalEscapingNonQuoteOrBackslashInfoLine = originalHeader.getInfoHeaderLine("EscapingNonQuoteOrBackslash");
+        Assert.assertNotNull(originalEscapingNonQuoteOrBackslashInfoLine);
+        Assert.assertEquals(originalEscapingNonQuoteOrBackslashInfoLine.getDescription(), "This other value has a \\n newline in it");
+
+        // write the file out into a new copy
+        final File firstCopyVCFFile = File.createTempFile("testEscapeHeaderQuotes1.", ".vcf");
+        firstCopyVCFFile.deleteOnExit();
+
+        final VariantContextWriter firstCopyWriter = new VariantContextWriterBuilder()
+                .setOutputFile(firstCopyVCFFile)
+                .setReferenceDictionary(createArtificialSequenceDictionary())
+                .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
+                .build();
+        firstCopyWriter.writeHeader(originalHeader);
+        final CloseableIterator<VariantContext> firstCopyVariantIterator = originalFileReader.iterator();
+        while (firstCopyVariantIterator.hasNext()) {
+            VariantContext variantContext = firstCopyVariantIterator.next();
+            firstCopyWriter.add(variantContext);
+        }
+        originalFileReader.close();
+        firstCopyWriter.close();
+
+        // read the copied file back in
+        final VCFFileReader firstCopyReader = new VCFFileReader(firstCopyVCFFile, false);
+        final VCFHeader firstCopyHeader = firstCopyReader.getFileHeader();
+        final VCFHeaderLine firstCopyNewHeaderLine = firstCopyHeader.getOtherHeaderLine("GATKCommandLine.Test");
+        Assert.assertNotNull(firstCopyNewHeaderLine);
+
+        final VCFFilterHeaderLine firstCopyAnnotationLine1 = firstCopyHeader.getFilterHeaderLine("ANNOTATION");
+        Assert.assertNotNull(firstCopyAnnotationLine1);
+        Assert.assertEquals(firstCopyAnnotationLine1.getGenericFieldValue("Description"), "ANNOTATION != \"NA\" || ANNOTATION <= 0.01");
+
+        final VCFFilterHeaderLine firstCopyAnnotationLine2 = firstCopyHeader.getFilterHeaderLine("ANNOTATION2");
+        Assert.assertNotNull(firstCopyAnnotationLine2);
+
+        final VCFInfoHeaderLine firstCopyEscapingQuoteInfoLine = firstCopyHeader.getInfoHeaderLine("EscapingQuote");
+        Assert.assertNotNull(firstCopyEscapingQuoteInfoLine);
+        Assert.assertEquals(firstCopyEscapingQuoteInfoLine.getDescription(), "This description has an escaped \" quote in it");
+
+        final VCFInfoHeaderLine firstCopyEscapingBackslashInfoLine = firstCopyHeader.getInfoHeaderLine("EscapingBackslash");
+        Assert.assertNotNull(firstCopyEscapingBackslashInfoLine);
+        Assert.assertEquals(firstCopyEscapingBackslashInfoLine.getDescription(), "This description has an escaped \\ backslash in it");
+
+        final VCFInfoHeaderLine firstCopyEscapingNonQuoteOrBackslashInfoLine = firstCopyHeader.getInfoHeaderLine("EscapingNonQuoteOrBackslash");
+        Assert.assertNotNull(firstCopyEscapingNonQuoteOrBackslashInfoLine);
+        Assert.assertEquals(firstCopyEscapingNonQuoteOrBackslashInfoLine.getDescription(), "This other value has a \\n newline in it");
+
+
+        // write one more copy to make sure things don't get double escaped
+        final File secondCopyVCFFile = File.createTempFile("testEscapeHeaderQuotes2.", ".vcf");
+        secondCopyVCFFile.deleteOnExit();
+        final VariantContextWriter secondCopyWriter = new VariantContextWriterBuilder()
+                .setOutputFile(secondCopyVCFFile)
+                .setReferenceDictionary(createArtificialSequenceDictionary())
+                .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
+                .build();
+        secondCopyWriter.writeHeader(firstCopyHeader);
+        final CloseableIterator<VariantContext> secondCopyVariantIterator = firstCopyReader.iterator();
+        while (secondCopyVariantIterator.hasNext()) {
+            VariantContext variantContext = secondCopyVariantIterator.next();
+            secondCopyWriter.add(variantContext);
+        }
+        secondCopyWriter.close();
+
+        // read the second copy back in and verify that the two files have the same header line
+        final VCFFileReader secondCopyReader = new VCFFileReader(secondCopyVCFFile, false);
+        final VCFHeader secondCopyHeader = secondCopyReader.getFileHeader();
+
+        final VCFHeaderLine secondCopyNewHeaderLine = secondCopyHeader.getOtherHeaderLine("GATKCommandLine.Test");
+        Assert.assertNotNull(secondCopyNewHeaderLine);
+
+        final VCFFilterHeaderLine secondCopyAnnotationLine1 = secondCopyHeader.getFilterHeaderLine("ANNOTATION");
+        Assert.assertNotNull(secondCopyAnnotationLine1);
+
+        final VCFFilterHeaderLine secondCopyAnnotationLine2 = secondCopyHeader.getFilterHeaderLine("ANNOTATION2");
+        Assert.assertNotNull(secondCopyAnnotationLine2);
+
+        Assert.assertEquals(firstCopyNewHeaderLine, secondCopyNewHeaderLine);
+        Assert.assertEquals(firstCopyNewHeaderLine.toStringEncoding(), "GATKCommandLine.Test=<ID=VariantFiltration,CommandLineOptions=\"filterName=[ANNOTATION] filterExpression=[ANNOTATION == \\\"NA\\\" || ANNOTATION <= 2.0]\">");
+        Assert.assertEquals(secondCopyNewHeaderLine.toStringEncoding(), "GATKCommandLine.Test=<ID=VariantFiltration,CommandLineOptions=\"filterName=[ANNOTATION] filterExpression=[ANNOTATION == \\\"NA\\\" || ANNOTATION <= 2.0]\">");
+
+        Assert.assertEquals(firstCopyAnnotationLine1, secondCopyAnnotationLine1);
+        Assert.assertEquals(secondCopyAnnotationLine1.getGenericFieldValue("Description"), "ANNOTATION != \"NA\" || ANNOTATION <= 0.01");
+        Assert.assertEquals(firstCopyAnnotationLine2, secondCopyAnnotationLine2);
+        Assert.assertEquals(secondCopyAnnotationLine2.getGenericFieldValue("Description"), "ANNOTATION with quote \" that is unmatched but escaped");
+
+        final VCFInfoHeaderLine secondCopyEscapingQuoteInfoLine = secondCopyHeader.getInfoHeaderLine("EscapingQuote");
+        Assert.assertNotNull(secondCopyEscapingQuoteInfoLine);
+        Assert.assertEquals(secondCopyEscapingQuoteInfoLine.getDescription(), "This description has an escaped \" quote in it");
+
+        final VCFInfoHeaderLine secondCopyEscapingBackslashInfoLine = secondCopyHeader.getInfoHeaderLine("EscapingBackslash");
+        Assert.assertNotNull(secondCopyEscapingBackslashInfoLine);
+        Assert.assertEquals(secondCopyEscapingBackslashInfoLine.getDescription(), "This description has an escaped \\ backslash in it");
+
+        final VCFInfoHeaderLine secondCopyEscapingNonQuoteOrBackslashInfoLine = secondCopyHeader.getInfoHeaderLine("EscapingNonQuoteOrBackslash");
+        Assert.assertNotNull(secondCopyEscapingNonQuoteOrBackslashInfoLine);
+        Assert.assertEquals(secondCopyEscapingNonQuoteOrBackslashInfoLine.getDescription(), "This other value has a \\n newline in it");
+
+        firstCopyReader.close();
+        secondCopyReader.close();
+
+    }
+
+    /**
+     * a little utility function for all tests to md5sum a file
+     * Shameless taken from:
+     * <p/>
+     * http://www.javalobby.org/java/forums/t84420.html
+     *
+     * @param file the file
+     * @return a string
+     */
+    private static String md5SumFile(File file) {
+        MessageDigest digest;
+        try {
+            digest = MessageDigest.getInstance("MD5");
+        } catch (NoSuchAlgorithmException e) {
+            throw new RuntimeException("Unable to find MD5 digest");
+        }
+        InputStream is;
+        try {
+            is = new FileInputStream(file);
+        } catch (FileNotFoundException e) {
+            throw new RuntimeException("Unable to open file " + file);
+        }
+        byte[] buffer = new byte[8192];
+        int read;
+        try {
+            while ((read = is.read(buffer)) > 0) {
+                digest.update(buffer, 0, read);
+            }
+            byte[] md5sum = digest.digest();
+            BigInteger bigInt = new BigInteger(1, md5sum);
+            return bigInt.toString(16);
+
+        } catch (IOException e) {
+            throw new RuntimeException("Unable to process file for MD5", e);
+        } finally {
+            try {
+                is.close();
+            } catch (IOException e) {
+                throw new RuntimeException("Unable to close input stream for MD5 calculation", e);
+            }
+        }
+    }
+
+    private void checkMD5ofHeaderFile(VCFHeader header, String md5sum) {
+        File myTempFile = null;
+        PrintWriter pw = null;
+        try {
+            myTempFile = File.createTempFile("VCFHeader", "vcf");
+            myTempFile.deleteOnExit();
+            pw = new PrintWriter(myTempFile);
+        } catch (IOException e) {
+            Assert.fail("Unable to make a temp file!");
+        }
+        for (VCFHeaderLine line : header.getMetaDataInSortedOrder())
+            pw.println(line);
+        pw.close();
+        Assert.assertEquals(md5SumFile(myTempFile), md5sum);
+    }
+
+    public static int VCF4headerStringCount = 16;
+
+    public static String VCF4headerStrings =
+            "##fileformat=VCFv4.2\n" +
+                    "##filedate=2010-06-21\n" +
+                    "##reference=NCBI36\n" +
+                    "##INFO=<ID=GC, Number=0, Type=Flag, Description=\"Overlap with Gencode CCDS coding sequence\">\n" +
+                    "##INFO=<ID=DP, Number=1, Type=Integer, Description=\"Total number of reads in haplotype window\">\n" +
+                    "##INFO=<ID=AF, Number=A, Type=Float, Description=\"Dindel estimated population allele frequency\">\n" +
+                    "##INFO=<ID=CA, Number=1, Type=String, Description=\"Pilot 1 callability mask\">\n" +
+                    "##INFO=<ID=HP, Number=1, Type=Integer, Description=\"Reference homopolymer tract length\">\n" +
+                    "##INFO=<ID=NS, Number=1, Type=Integer, Description=\"Number of samples with data\">\n" +
+                    "##INFO=<ID=DB, Number=0, Type=Flag, Description=\"dbSNP membership build 129 - type match and indel sequence length match within 25 bp\">\n" +
+                    "##INFO=<ID=NR, Number=1, Type=Integer, Description=\"Number of reads covering non-ref variant on reverse strand\">\n" +
+                    "##INFO=<ID=NF, Number=1, Type=Integer, Description=\"Number of reads covering non-ref variant on forward strand\">\n" +
+                    "##FILTER=<ID=NoQCALL, Description=\"Variant called by Dindel but not confirmed by QCALL\">\n" +
+                    "##FORMAT=<ID=GT, Number=1, Type=String, Description=\"Genotype\">\n" +
+                    "##FORMAT=<ID=HQ, Number=2, Type=Integer, Description=\"Haplotype quality\">\n" +
+                    "##FORMAT=<ID=GQ, Number=1, Type=Integer, Description=\"Genotype quality\">\n" +
+                    "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n";
+
+
+    public static String VCF4headerStrings_with_negativeOne =
+            "##fileformat=VCFv4.2\n" +
+                    "##filedate=2010-06-21\n" +
+                    "##reference=NCBI36\n" +
+                    "##INFO=<ID=GC, Number=0, Type=Flag, Description=\"Overlap with Gencode CCDS coding sequence\">\n" +
+                    "##INFO=<ID=YY, Number=., Type=Integer, Description=\"Some weird value that has lots of parameters\">\n" +
+                    "##INFO=<ID=AF, Number=A, Type=Float, Description=\"Dindel estimated population allele frequency\">\n" +
+                    "##INFO=<ID=CA, Number=1, Type=String, Description=\"Pilot 1 callability mask\">\n" +
+                    "##INFO=<ID=HP, Number=1, Type=Integer, Description=\"Reference homopolymer tract length\">\n" +
+                    "##INFO=<ID=NS, Number=1, Type=Integer, Description=\"Number of samples with data\">\n" +
+                    "##INFO=<ID=DB, Number=0, Type=Flag, Description=\"dbSNP membership build 129 - type match and indel sequence length match within 25 bp\">\n" +
+                    "##INFO=<ID=NR, Number=1, Type=Integer, Description=\"Number of reads covering non-ref variant on reverse strand\">\n" +
+                    "##INFO=<ID=NF, Number=1, Type=Integer, Description=\"Number of reads covering non-ref variant on forward strand\">\n" +
+                    "##FILTER=<ID=NoQCALL, Description=\"Variant called by Dindel but not confirmed by QCALL\">\n" +
+                    "##FORMAT=<ID=GT, Number=1, Type=String, Description=\"Genotype\">\n" +
+                    "##FORMAT=<ID=HQ, Number=2, Type=Integer, Description=\"Haplotype quality\">\n" +
+                    "##FORMAT=<ID=TT, Number=., Type=Integer, Description=\"Lots of TTs\">\n" +
+                    "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n";
+
+}
diff --git a/src/tests/java/htsjdk/variant/vcf/VCFStandardHeaderLinesUnitTest.java b/src/test/java/htsjdk/variant/vcf/VCFStandardHeaderLinesUnitTest.java
similarity index 100%
rename from src/tests/java/htsjdk/variant/vcf/VCFStandardHeaderLinesUnitTest.java
rename to src/test/java/htsjdk/variant/vcf/VCFStandardHeaderLinesUnitTest.java
diff --git a/testdata/htsjdk/samtools/BAMCigarOverflowTest/CigarOverflowTest.bam b/src/test/resources/htsjdk/samtools/BAMCigarOverflowTest/CigarOverflowTest.bam
similarity index 100%
rename from testdata/htsjdk/samtools/BAMCigarOverflowTest/CigarOverflowTest.bam
rename to src/test/resources/htsjdk/samtools/BAMCigarOverflowTest/CigarOverflowTest.bam
diff --git a/testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam b/src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam
similarity index 100%
rename from testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam
rename to src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam
diff --git a/testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai b/src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai
similarity index 100%
rename from testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai
rename to src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai
diff --git a/testdata/htsjdk/samtools/BAMFileIndexTest/symlink_with_index.bam b/src/test/resources/htsjdk/samtools/BAMFileIndexTest/symlink_with_index.bam
similarity index 100%
rename from testdata/htsjdk/samtools/BAMFileIndexTest/symlink_with_index.bam
rename to src/test/resources/htsjdk/samtools/BAMFileIndexTest/symlink_with_index.bam
diff --git a/testdata/htsjdk/samtools/BAMFileIndexTest/symlink_with_index.bam.bai b/src/test/resources/htsjdk/samtools/BAMFileIndexTest/symlink_with_index.bam.bai
similarity index 100%
rename from testdata/htsjdk/samtools/BAMFileIndexTest/symlink_with_index.bam.bai
rename to src/test/resources/htsjdk/samtools/BAMFileIndexTest/symlink_with_index.bam.bai
diff --git a/testdata/htsjdk/samtools/BAMFileIndexTest/symlink_without_linked_index.bam b/src/test/resources/htsjdk/samtools/BAMFileIndexTest/symlink_without_linked_index.bam
similarity index 100%
rename from testdata/htsjdk/samtools/BAMFileIndexTest/symlink_without_linked_index.bam
rename to src/test/resources/htsjdk/samtools/BAMFileIndexTest/symlink_without_linked_index.bam
diff --git a/testdata/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.bam b/src/test/resources/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.bam
similarity index 100%
rename from testdata/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.bam
rename to src/test/resources/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.bam
diff --git a/testdata/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.sam b/src/test/resources/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.sam
rename to src/test/resources/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/Chromosome1to10.bam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/Chromosome1to10.bam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/Chromosome1to10.bam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/Chromosome1to10.bam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/Chromosome5to9.bam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/Chromosome5to9.bam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/Chromosome5to9.bam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/Chromosome5to9.bam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/case1/chr11sub_file1.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case1/chr11sub_file1.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/case1/chr11sub_file1.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case1/chr11sub_file1.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/case1/chr11sub_file2.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case1/chr11sub_file2.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/case1/chr11sub_file2.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case1/chr11sub_file2.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/case1/expected_output.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case1/expected_output.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/case1/expected_output.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case1/expected_output.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file1.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file1.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file1.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file1.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file2.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file2.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file2.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file2.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file3.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file3.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file3.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file3.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file4.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file4.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file4.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/chr11sub_file4.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/expected_output.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/expected_output.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/case2/expected_output.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/case2/expected_output.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/unsorted_input/1.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/unsorted_input/1.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/unsorted_input/1.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/unsorted_input/1.sam
diff --git a/testdata/htsjdk/samtools/SamFileHeaderMergerTest/unsorted_input/2.sam b/src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/unsorted_input/2.sam
similarity index 100%
rename from testdata/htsjdk/samtools/SamFileHeaderMergerTest/unsorted_input/2.sam
rename to src/test/resources/htsjdk/samtools/SamFileHeaderMergerTest/unsorted_input/2.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/bad_index.bai b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/bad_index.bai
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/bad_index.bai
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/bad_index.bai
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/bad_index.bam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/bad_index.bam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/bad_index.bam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/bad_index.bam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/buggyHeader.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/buggyHeader.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/buggyHeader.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/buggyHeader.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/duplicate_rg.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/duplicate_rg.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/duplicate_rg.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/duplicate_rg.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/duplicated_reads.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/duplicated_reads.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/duplicated_reads.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/duplicated_reads.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/duplicated_reads_out_of_order.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/duplicated_reads_out_of_order.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/duplicated_reads_out_of_order.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/duplicated_reads_out_of_order.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/hanging_tab.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/hanging_tab.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/hanging_tab.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/hanging_tab.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/inappropriate_mate_cigar_string.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/inappropriate_mate_cigar_string.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/inappropriate_mate_cigar_string.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/inappropriate_mate_cigar_string.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/invalid_coord_sort_order.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/invalid_coord_sort_order.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/invalid_coord_sort_order.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/invalid_coord_sort_order.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/invalid_mate_cigar_string.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/invalid_mate_cigar_string.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/invalid_mate_cigar_string.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/invalid_mate_cigar_string.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/invalid_platform_unit.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/invalid_platform_unit.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/invalid_platform_unit.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/invalid_platform_unit.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/invalid_queryname_sort_order.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/invalid_queryname_sort_order.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/invalid_queryname_sort_order.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/invalid_queryname_sort_order.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/mate_cigar_and_no_oqs.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/mate_cigar_and_no_oqs.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/mate_cigar_and_no_oqs.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/mate_cigar_and_no_oqs.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/mate_cigar_and_oqs.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/mate_cigar_and_oqs.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/mate_cigar_and_oqs.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/mate_cigar_and_oqs.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/missing_fields.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/missing_fields.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/missing_fields.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/missing_fields.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/missing_platform_unit.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/missing_platform_unit.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/missing_platform_unit.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/missing_platform_unit.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/test_samfile_version_1pt5.bam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/test_samfile_version_1pt5.bam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/test_samfile_version_1pt5.bam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/test_samfile_version_1pt5.bam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/truncated.bam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/truncated.bam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/truncated.bam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/truncated.bam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/truncated_quals.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/truncated_quals.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/truncated_quals.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/truncated_quals.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/truncated_tag.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/truncated_tag.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/truncated_tag.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/truncated_tag.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/valid.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/valid.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/valid.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/valid.sam
diff --git a/testdata/htsjdk/samtools/ValidateSamFileTest/zero_length_read.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/zero_length_read.sam
similarity index 100%
rename from testdata/htsjdk/samtools/ValidateSamFileTest/zero_length_read.sam
rename to src/test/resources/htsjdk/samtools/ValidateSamFileTest/zero_length_read.sam
diff --git a/testdata/htsjdk/samtools/block_compressed.sam.gz b/src/test/resources/htsjdk/samtools/block_compressed.sam.gz
similarity index 100%
rename from testdata/htsjdk/samtools/block_compressed.sam.gz
rename to src/test/resources/htsjdk/samtools/block_compressed.sam.gz
diff --git a/testdata/htsjdk/samtools/compressed.bam b/src/test/resources/htsjdk/samtools/compressed.bam
similarity index 100%
rename from testdata/htsjdk/samtools/compressed.bam
rename to src/test/resources/htsjdk/samtools/compressed.bam
diff --git a/testdata/htsjdk/samtools/compressed.sam.gz b/src/test/resources/htsjdk/samtools/compressed.sam.gz
similarity index 100%
rename from testdata/htsjdk/samtools/compressed.sam.gz
rename to src/test/resources/htsjdk/samtools/compressed.sam.gz
diff --git a/testdata/htsjdk/samtools/coordinate_sorted.sam b/src/test/resources/htsjdk/samtools/coordinate_sorted.sam
similarity index 100%
rename from testdata/htsjdk/samtools/coordinate_sorted.sam
rename to src/test/resources/htsjdk/samtools/coordinate_sorted.sam
diff --git a/testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram b/src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram
rename to src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram
diff --git a/testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram.bai b/src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram.bai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram.bai
rename to src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram.bai
diff --git a/testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.dict b/src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.dict
similarity index 100%
rename from testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.dict
rename to src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.dict
diff --git a/testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa b/src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa
similarity index 100%
rename from testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa
rename to src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa
diff --git a/testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa.fai b/src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa.fai
rename to src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa.fai
diff --git a/testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fasta b/src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fasta
rename to src/test/resources/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fasta
diff --git a/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram
new file mode 100644
index 0000000..96ca860
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram.crai b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram.crai
new file mode 100644
index 0000000..1ca5df2
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram.crai differ
diff --git a/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram
new file mode 100644
index 0000000..4c74ab9
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram.crai b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram.crai
new file mode 100644
index 0000000..3fee0e2
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram.crai differ
diff --git a/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.500-unMapped.cram b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.500-unMapped.cram
new file mode 100644
index 0000000..fbfbbf7
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.500-unMapped.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.500-unMapped.cram.crai b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.500-unMapped.cram.crai
new file mode 100644
index 0000000..b50f3ce
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/NA12878.20.21.1-100.100-SeqsPerSlice.500-unMapped.cram.crai differ
diff --git a/testdata/htsjdk/samtools/cram/auxf#values.2.1.cram b/src/test/resources/htsjdk/samtools/cram/auxf#values.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/auxf#values.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/auxf#values.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/auxf#values.3.0.cram b/src/test/resources/htsjdk/samtools/cram/auxf#values.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/auxf#values.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/auxf#values.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/auxf#values.sam b/src/test/resources/htsjdk/samtools/cram/auxf#values.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/auxf#values.sam
rename to src/test/resources/htsjdk/samtools/cram/auxf#values.sam
diff --git a/testdata/htsjdk/samtools/cram/auxf.fa b/src/test/resources/htsjdk/samtools/cram/auxf.fa
similarity index 100%
rename from testdata/htsjdk/samtools/cram/auxf.fa
rename to src/test/resources/htsjdk/samtools/cram/auxf.fa
diff --git a/testdata/htsjdk/samtools/cram/auxf.fa.fai b/src/test/resources/htsjdk/samtools/cram/auxf.fa.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/auxf.fa.fai
rename to src/test/resources/htsjdk/samtools/cram/auxf.fa.fai
diff --git a/testdata/htsjdk/samtools/cram/auxf.fasta b/src/test/resources/htsjdk/samtools/cram/auxf.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/cram/auxf.fasta
rename to src/test/resources/htsjdk/samtools/cram/auxf.fasta
diff --git a/testdata/htsjdk/samtools/cram/c1#bounds.2.1.cram b/src/test/resources/htsjdk/samtools/cram/c1#bounds.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#bounds.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#bounds.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#bounds.3.0.cram b/src/test/resources/htsjdk/samtools/cram/c1#bounds.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#bounds.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#bounds.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#bounds.sam b/src/test/resources/htsjdk/samtools/cram/c1#bounds.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#bounds.sam
rename to src/test/resources/htsjdk/samtools/cram/c1#bounds.sam
diff --git a/testdata/htsjdk/samtools/cram/c1#clip.2.1.cram b/src/test/resources/htsjdk/samtools/cram/c1#clip.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#clip.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#clip.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#clip.3.0.cram b/src/test/resources/htsjdk/samtools/cram/c1#clip.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#clip.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#clip.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#clip.sam b/src/test/resources/htsjdk/samtools/cram/c1#clip.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#clip.sam
rename to src/test/resources/htsjdk/samtools/cram/c1#clip.sam
diff --git a/testdata/htsjdk/samtools/cram/c1#noseq.2.1.cram b/src/test/resources/htsjdk/samtools/cram/c1#noseq.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#noseq.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#noseq.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#noseq.3.0.cram b/src/test/resources/htsjdk/samtools/cram/c1#noseq.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#noseq.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#noseq.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#noseq.sam b/src/test/resources/htsjdk/samtools/cram/c1#noseq.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#noseq.sam
rename to src/test/resources/htsjdk/samtools/cram/c1#noseq.sam
diff --git a/testdata/htsjdk/samtools/cram/c1#pad1.2.1.cram b/src/test/resources/htsjdk/samtools/cram/c1#pad1.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad1.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#pad1.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#pad1.3.0.cram b/src/test/resources/htsjdk/samtools/cram/c1#pad1.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad1.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#pad1.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#pad1.sam b/src/test/resources/htsjdk/samtools/cram/c1#pad1.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad1.sam
rename to src/test/resources/htsjdk/samtools/cram/c1#pad1.sam
diff --git a/testdata/htsjdk/samtools/cram/c1#pad2.2.1.cram b/src/test/resources/htsjdk/samtools/cram/c1#pad2.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad2.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#pad2.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#pad2.3.0.cram b/src/test/resources/htsjdk/samtools/cram/c1#pad2.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad2.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#pad2.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#pad2.sam b/src/test/resources/htsjdk/samtools/cram/c1#pad2.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad2.sam
rename to src/test/resources/htsjdk/samtools/cram/c1#pad2.sam
diff --git a/testdata/htsjdk/samtools/cram/c1#pad3.2.1.cram b/src/test/resources/htsjdk/samtools/cram/c1#pad3.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad3.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#pad3.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#pad3.3.0.cram b/src/test/resources/htsjdk/samtools/cram/c1#pad3.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad3.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#pad3.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#pad3.sam b/src/test/resources/htsjdk/samtools/cram/c1#pad3.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#pad3.sam
rename to src/test/resources/htsjdk/samtools/cram/c1#pad3.sam
diff --git a/testdata/htsjdk/samtools/cram/c1#unknown.2.1.cram b/src/test/resources/htsjdk/samtools/cram/c1#unknown.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#unknown.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#unknown.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#unknown.3.0.cram b/src/test/resources/htsjdk/samtools/cram/c1#unknown.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#unknown.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/c1#unknown.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/c1#unknown.sam b/src/test/resources/htsjdk/samtools/cram/c1#unknown.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1#unknown.sam
rename to src/test/resources/htsjdk/samtools/cram/c1#unknown.sam
diff --git a/testdata/htsjdk/samtools/cram/c1.fa b/src/test/resources/htsjdk/samtools/cram/c1.fa
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1.fa
rename to src/test/resources/htsjdk/samtools/cram/c1.fa
diff --git a/testdata/htsjdk/samtools/cram/c1.fa.fai b/src/test/resources/htsjdk/samtools/cram/c1.fa.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c1.fa.fai
rename to src/test/resources/htsjdk/samtools/cram/c1.fa.fai
diff --git a/testdata/htsjdk/samtools/cram/c2#pad.2.1.cram b/src/test/resources/htsjdk/samtools/cram/c2#pad.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c2#pad.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/c2#pad.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/c2#pad.3.0.cram b/src/test/resources/htsjdk/samtools/cram/c2#pad.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/c2#pad.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/c2#pad.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#1.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#1.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#1.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#1.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#1.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#1.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#1.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#1.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#1.sam b/src/test/resources/htsjdk/samtools/cram/ce#1.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#1.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#1.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#2.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#2.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#2.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#2.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#2.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#2.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#2.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#2.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#2.sam b/src/test/resources/htsjdk/samtools/cram/ce#2.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#2.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#2.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#5.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#5.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#5.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#5.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#5.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#5.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#5.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#5.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#5.sam b/src/test/resources/htsjdk/samtools/cram/ce#5.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#5.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#5.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#5b.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#5b.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#5b.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#5b.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#5b.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#5b.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#5b.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#5b.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#5b.sam b/src/test/resources/htsjdk/samtools/cram/ce#5b.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#5b.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#5b.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#large_seq.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#large_seq.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#large_seq.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#large_seq.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#large_seq.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#large_seq.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#large_seq.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#large_seq.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#large_seq.sam b/src/test/resources/htsjdk/samtools/cram/ce#large_seq.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#large_seq.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#large_seq.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#supp.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#supp.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#supp.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#supp.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#supp.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#supp.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#supp.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#supp.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#supp.sam b/src/test/resources/htsjdk/samtools/cram/ce#supp.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#supp.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#supp.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#tag_depadded.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#tag_depadded.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#tag_depadded.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#tag_depadded.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#tag_depadded.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#tag_depadded.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#tag_depadded.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#tag_depadded.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#tag_depadded.sam b/src/test/resources/htsjdk/samtools/cram/ce#tag_depadded.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#tag_depadded.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#tag_depadded.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#tag_padded.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#tag_padded.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#tag_padded.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#tag_padded.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#tag_padded.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#tag_padded.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#tag_padded.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#tag_padded.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#tag_padded.sam b/src/test/resources/htsjdk/samtools/cram/ce#tag_padded.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#tag_padded.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#tag_padded.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#unmap.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#unmap.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap.sam b/src/test/resources/htsjdk/samtools/cram/ce#unmap.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap1.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#unmap1.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap1.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap1.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap1.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#unmap1.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap1.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap1.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap1.sam b/src/test/resources/htsjdk/samtools/cram/ce#unmap1.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap1.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap1.sam
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap2.2.1.cram b/src/test/resources/htsjdk/samtools/cram/ce#unmap2.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap2.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap2.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap2.3.0.cram b/src/test/resources/htsjdk/samtools/cram/ce#unmap2.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap2.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap2.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/ce#unmap2.sam b/src/test/resources/htsjdk/samtools/cram/ce#unmap2.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce#unmap2.sam
rename to src/test/resources/htsjdk/samtools/cram/ce#unmap2.sam
diff --git a/testdata/htsjdk/samtools/cram/ce.fa b/src/test/resources/htsjdk/samtools/cram/ce.fa
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce.fa
rename to src/test/resources/htsjdk/samtools/cram/ce.fa
diff --git a/testdata/htsjdk/samtools/cram/ce.fa.fai b/src/test/resources/htsjdk/samtools/cram/ce.fa.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/ce.fa.fai
rename to src/test/resources/htsjdk/samtools/cram/ce.fa.fai
diff --git a/src/test/resources/htsjdk/samtools/cram/cramQueryTest.cram b/src/test/resources/htsjdk/samtools/cram/cramQueryTest.cram
new file mode 100644
index 0000000..240df31
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/cramQueryTest.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/cramQueryTest.cram.bai b/src/test/resources/htsjdk/samtools/cram/cramQueryTest.cram.bai
new file mode 100644
index 0000000..c168662
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/cramQueryTest.cram.bai differ
diff --git a/src/test/resources/htsjdk/samtools/cram/cramQueryTestEmpty.cram b/src/test/resources/htsjdk/samtools/cram/cramQueryTestEmpty.cram
new file mode 100644
index 0000000..312f642
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/cramQueryTestEmpty.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/cramQueryTestEmpty.cram.bai b/src/test/resources/htsjdk/samtools/cram/cramQueryTestEmpty.cram.bai
new file mode 100644
index 0000000..bfe6239
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/cramQueryTestEmpty.cram.bai differ
diff --git a/src/test/resources/htsjdk/samtools/cram/cramQueryWithBAI.cram b/src/test/resources/htsjdk/samtools/cram/cramQueryWithBAI.cram
new file mode 100644
index 0000000..77a5ef2
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/cramQueryWithBAI.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/cramQueryWithBAI.cram.bai b/src/test/resources/htsjdk/samtools/cram/cramQueryWithBAI.cram.bai
new file mode 100644
index 0000000..c44b7e9
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/cramQueryWithBAI.cram.bai differ
diff --git a/src/test/resources/htsjdk/samtools/cram/cramQueryWithCRAI.cram b/src/test/resources/htsjdk/samtools/cram/cramQueryWithCRAI.cram
new file mode 100644
index 0000000..285c0a0
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/cramQueryWithCRAI.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/cramQueryWithCRAI.cram.crai b/src/test/resources/htsjdk/samtools/cram/cramQueryWithCRAI.cram.crai
new file mode 100644
index 0000000..2b6f632
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/cramQueryWithCRAI.cram.crai differ
diff --git a/testdata/htsjdk/samtools/cram/fieldarith.sam b/src/test/resources/htsjdk/samtools/cram/fieldarith.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/fieldarith.sam
rename to src/test/resources/htsjdk/samtools/cram/fieldarith.sam
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.dict b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.dict
new file mode 100644
index 0000000..ca3f427
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.dict
@@ -0,0 +1,3 @@
+ at HD	VN:1.0	SO:unsorted
+ at SQ	SN:20	LN:200	M5:8ccb6b8284befc25d90a23637cad0bdd	UR:file:///Users/cmn/projects/htsjdk/testdata/htsjdk/samtools/cram/human_g1k_v37.20.21.0-100.fasta
+ at SQ	SN:21	LN:200	M5:8ccb6b8284befc25d90a23637cad0bdd	UR:file:///Users/cmn/projects/htsjdk/testdata/htsjdk/samtools/cram/human_g1k_v37.20.21.0-100.fasta
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.fasta b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.fasta
new file mode 100644
index 0000000..53a8bbd
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.fasta
@@ -0,0 +1,11 @@
+>20 dna:chromosome chromosome:GRCh37:20:1:63025520:1
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNN
+>21 dna:chromosome chromosome:GRCh37:21:1:48129895:1
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNN
+
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.fasta.fai b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.fasta.fai
new file mode 100644
index 0000000..0214054
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.1-100.fasta.fai
@@ -0,0 +1,2 @@
+20	200	53	60	61
+21	200	310	60	61
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.dict b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.dict
new file mode 100644
index 0000000..10aff59
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.dict
@@ -0,0 +1,3 @@
+ at HD	VN:1.0	SO:unsorted
+ at SQ	SN:20	LN:200000	M5:fcb26df26fe47f4b364854e8371e465e	UR:file:///home/chris/projects/htsjdk/testdata/htsjdk/samtools/human_g1k_v37.20.21.10M-10M200.fasta
+ at SQ	SN:21	LN:200000	M5:f02fab23cf86525e5ce048774c66a23d	UR:file:///home/chris/projects/htsjdk/testdata/htsjdk/samtools/human_g1k_v37.20.21.10M-10M200.fasta
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.fasta b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.fasta
new file mode 100644
index 0000000..f77d885
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.fasta
@@ -0,0 +1,5002 @@
+>20
+TTGTTTACTACAGCTTTGTAGTAAATTTTGAACTCTAAAGTGTTAGTTCTCTAACTTTGTTTGTTTTTCAAGAGTGTTTT
+GACTCTTCTTACTGCATCCCTTGCATTTCCATATGGACTTTATAATCAGCCATGTCAACTTCTGCAAGAAAGACAGCTAG
+GATTTTGATAAGGATTGTGTTGAATCTGTAGTCCAATTTTGGGAATACTACCATGTTAACATCGTCTTCCCATCCATGCA
+CGTGCAATAGCTTACCATTTATTTGGTCTTCCTCAATTTCTGTCAACAATGATTTGTAGTTTTCAGTTGCAAGTCTTGCA
+CTTCTTTTGTTAAACTTTTTCCAAATATTTATTCTTCTTAATTACATAATTCTCATAATTAATAAAATTCTGAAATTTTC
+TTAATTTCATTTTTGTGGCCATGCACTTTAAAACTTGCCTTTTAACAAGACTTCCAGATGATTCTTGTGAACATTAAGTT
+TGTGAAGTGCTTCTCTATTGACAAACTGAACTCACGTGACATTCCACACAGCACTGGCAAATTCTGTCCCGTAACTCCGC
+TAGCTCTCCAACAAGAAGTGACTTGACGCAGCCCAAGGTTACTACTTAACACAATGAATTAAATGTTTTTAAATAAGAGG
+AAGCAATAAGATTCTAAAGGCTTTTCTGTTTTAATTTTCATGCAATGGAAAACTGGTATTAAATATCTATTTAATTAGGA
+GGAAACTACAATGCTGACTTTTGTCTGAATTATGTAGATAAGTGATTCATTTGAAACAATTATTTTGATAATTGTCAATT
+ATCCATTTCATTTTAATGCATTTTTTATTCTTTTTTCAAAAATAGCAACAATTACAACAGTTAAACCTTATAATGAATAT
+GTTTCCTAAACCCTGTTCTACTTTCTGGTTCCAGATCTGACACCAATTACCTTTCTGATTTTGGACAAACCACTTAATAT
+TTGTAACTTACAATTACTTCAACTGAATAATAAAAGAATTGGACTAGATTTCTCCAACATCTCTCTCTTTTGGCTTTATG
+TTAGATAATGCTAAATTTTCATCATATCCAAACATGCTATATAATTTTATGAACTGTTACAGAGTCAGACATAAGCAATA
+TAAAGTATGATTCTGAATAAAGGCTGTGAAGTCTCGTTTTTCATACATAATTACAGGAACCGATCAAATTCAATAAAGCA
+TTTATAGTCATGACAATATATTCTCTTAACTTGCAATGTGGTTTTAGGAAACAATGAAACAAATTAAACAAATGCATGAT
+TCCTTAAATTTTGTTGACTCGGGATTTAGTTACATATAAGACTCTTTTCCCCCAGCCAGATTAACCCTGCTCTGTATATG
+TAATACGCATCTTCCCCCCAAATTTCACAATAATAATTTTATGAAAATCAGTATGTAAGTTGCTGCATTGGCATGAAAGG
+CTACTAATGTGATATCTTTCAAAGATAAAGTGAACGTTTTTGTAAAAAGCCACCACACTGGAGGCATTTAACAATATTCA
+TCTTCCTATGGCATTACTACCCTAGATGTACTTTGCAATATTAATCAAGCCTTGTCTTACTTCAGGCTTCCTAGAAATAG
+AGCACAAGGCAGGGATTCTTTCTTGCTCGGTGATTTATTGAGGAAGTGCTCTCAGAAGAAATTGGTAAGATGCTAAGGAG
+AACAGCAAAGGACAGGAGAAGGGGGCTGAGCAGAGATGGGGATCTAACTGGAATCTGGCCTTTGCTGATTACCAGAGCAG
+CTCTGGGTGCGAATGGTGAAGTGCTGTTCTATTGACAAACTCCACTCAAGTGACATTCCATGCAGCACTGCCAAATTCTG
+TCCCATAACTCTGCTAGCTCTTCAACAAGAAGTAACTTGAGGCAGCACAGTTACAGAAAACAGCACAAGAACCAAGGATA
+CTAAACAGTGACTTAAATGTTTTTAGATAAGAGGAAGCAATGAGATACTATAGGGTTTTAATTTCTGTGCAATGGAAAAC
+TAGTATTAAATATTTATTAATTAGGAGGAAACGACAATGCTGACTTCTGTCTGCATTATAGACAAAATTGAGTTCTCCAA
+CCATGAGGCAAGGTGCTGGCTTTTTGTACCCCTGCACTATTCATTTGATGGGTGAGTGCACTGTAACTTCCAGACGTCTC
+TTGGTAGGTGGGCCCCACTGGCCAAGGGCATTTCCCGGGAGAGTGAACGGCTGTGAGCTACTAGCAGTAGCAGTCACAGC
+AGCTGGGAGGAAACACACTGGTCAGGTAAAGGGCCAGCATCTACCAAGGTCCAGTTCCACAATTAGTGGAGAAATATTTA
+ATAACATTATTTTTGAAAATAATTAGGCCATATGACTTTGACACTTTTTCTCTAGCAAAGTGACTAAAGAGAAGCAGGTT
+TTTAACTGGGTTTTTATTTCTGTTGTGTCTCTAGCCAAAGCCATGCTGATGTTTGATAGTTTTTTTTTTTCTTTTTTTAA
+GAGATGGGGTTTCACCATGTTGGCCATGCTGGTCGTGAACTCCTGACCTCAATGGCCTCCCAAACTGTTGGGATTACAGA
+CGTGAGCCACCACGCCCAGCCAGAATTTTTTTTCCTAAAGAATAGAACAACATTATAAAATTTTAGGCATTAAGGACAAA
+TTTTATTTTATGATTTTCATTTCGGTGAGTCACAAGATATTCAACACAAAATGAAACTTCACAAAATTTCACAAAATGAA
+AAATTCATTTCAGATTCAATATCCACCTCCATCATCCATATTCATTCTTCCAATGTCTCAGGCCAGAAATTTGGAGTATG
+TGGCTTCTCCACCTCACACAATTTGCCCTAACTTTAATATATACTCAGAAATTACTGGCTTTTCACTGTTTCTATGATTC
+CCATGTGTAATATACAATACTCACCATGCATACAATAATACAATAATTCTGTGTCACAACCACACCTAAATTGGTAAGTT
+TATAAGGTTATAAGCTGAGAGGTTTTGCTGATCTTGGCTGAGCTCAGCTGGGCAGGTCTTCCGGTCTTGGCTGGGGTTCA
+CTGACACACAAGCAGCTGACAGTTGGCTGATCTAGGATGGCCTCAGCTGGGATGACAGGCTGTTTCCTCACCTTCCAGCA
+GGCAAGCCAGTCCCAAGAAAGAGAAGGGTGAAACATGGAGGCCATTAATTGAGCCCATCGCATGAAACGCATCTGTGACA
+CCATCACCGTCTGATATCAACTCTCACCTGGATGTTTGCAATCGCCTAACTTTTCTCCTTTCATTCACTATGCTGCCTTA
+CAAACCTATTCTCCACAAATCAGCTAGAGCAAACCTTTTAAATCCTAAGTAGAATGCTACCATTCCTCTGCTCAAACTAC
+TGGAGAGGAGATGCCACAGTCTTTACTATGGTCTTCAAGACCCTATGGGAGGTAGCCCTGTCTTACAACCATAGCCTCCT
+ACCACTGCCCCAACATGCACACTGAGTTCCGGCCACAGCATCTGCTTCCTCTTTATTGATCTGCCAAGGACATGAATGAC
+TCAGAATCTCAAACTTATTCTTCCCTCAGAATCCACTGAGCCTACTCCCTCTTTTTCTCAAATGGCAACTTCTCAGAGAT
+GCCTTCTCTGGCTAACGTATATGTACTAAAACCTCCATCCAGCACTCTATGTCATCCTTACTATGGTTTATTTTTCTTCA
+TAGCACCTATCAATTGGTGAAGTATTAAATATGCATTTTTGTTTGTATGCATCTCTCTCCCCGCACCAGTATGTGAACTC
+CACCAGACTGGAGAGCGTGTTTATTTTGTTCACTGCTGTAACTCCAGTGTCTAGAACAGTGCCTGGCACACAGTAGGTGT
+TTAATAATGATTTGTTAAGCTAAGCCAATGAATAAATATTCTTTTGCCCATGAGATGATGTTAAAAATTTTTCAATTATT
+CATAACTGCCTGTAAGGAATGAATGTTAGTGAATTACTGTGATACCAATGAAAGTTAAATGATGCAATGAAGATTGGGTG
+GATCACAAGGTCAGGAGTTTGAGACCATCCTGGCCAATATGGTGAAACCCCGTCTCTACTAAAAATACCAAAATTAGCTG
+GGCATCGTGGCAGGACCCTGTAGTCCCAGCTACTCAGGAGGCTGAGGCAGGAGAATCGCTTGAACCCAGGAGGCGGAGGT
+TGCAGTGAGCCGAGATCACGCCACTGCACTCCAGCCTGGCCGACAGAGTGAGATTCTGTCTCCAAAAAAAAAAAAAAAAA
+AGATGTAAATATTATTATTGTTTGCCATCACCCTTATGTGCTGTCTTGGCTTTATGTCGTGCTTAGATCTTTTCCAGGTG
+CAATTTGAATTGATGCAACTAATGGAAACTACAACAGAAGTTCATATTGCCCTATTGTATTACACTATTTGACTCATCTT
+TCAGTATCACCAGGTGCATAAGGAAAATTTCAAAAGTAAATCAAGAGAAAGAAATATGATCGCAGCTTAACACAATGTAA
+ATTTATTATTTGTACTTTTTGTCTAAATGGTTTGCCTAAAAGACTGAAAGACATTTTATATTAGTTAGAATACTTGAGGA
+TAATAACATAAAAACTTTCCTTTCCAACTTGTTTATAAAAGGAAATCTTCACTGTTTTGAACATCAGTTATTTTAAACTT
+TTAAGTTGTTAGCACAGCAAAAGCAACAAAATTCTAAGTGCAGTAATCACTTTACTGCGTGGTCATATGAAATCAAGGCA
+ATGTTATGAGTATTACTGGAAAGCTGGACAGAGTAACGGGAAAAGTGACTAAAACTATGCAAAACTATGCAAAACTAAGC
+AGATTGTGTCTCTAGAGTATTTCCCATCTCAAGTTTAGTTATTTACTAATTTGGCAACATCTGACCTATCTTTAATTGTG
+AGAAAATAAACAAACACATAAGCCAACTCTCAGAATATGGTTATACATAGGTGTAGCCTATGACTTTGAATGTATTTGTT
+TGAATAGCGTAAAACAAAATAAAAATAAAATCTTGTTACAGTGCAAGAAACGGCAGTCATCAAACTAAGATGAGGCAAGT
+GTCATGAAGTATGAAAATATGGTACCTGAATTCTATTTATTAGAAAGTCTTCACTGAGCTGAGCATGTTTTTTTTAACAA
+ATTCAATTACTGATTTGAATATTTATTATACTTAATTATTGCAGCCATGAAAAGAGGTGCTGGCTGAGGCTGCATTTAAT
+AAAAACATTTAATCAGCTTGAGGTTAGTAAACCATTTAATTTGTTTTTTCATGAAGATTTAACTTCTAGAATAATTTCAT
+TTATGTATTTTTAGGTATAGCCCTAGATTCTGGTCTACATAGTATACAAATCATTTTAGAATGACACTAGGTTATTTCAA
+CTGCTTTTCTACAGAAGTGTTAAATAAGGGAGTAAAGTGTTGGCTTTTTCCATAATTGAAATAAATGCACAATGAGCAGT
+AACATCCTGATTTCACTGCTATTTTGTTTAATCAACATAATGATGTAGATTTACTCTGTATATATATGGAAGAGTGAAAG
+AAGGTTGGGAAGGAATAACTATCAATTAATATAGGTGATATAGTAGTTATTTTTGCAAATCAACTATAATTTCTGAATGG
+ATATTCAGACCATATTTACATTACATAGAAGAGGCACACACCAAAAGATTTAACAAATGTGCCAAATATTGGTGAATATT
+TAGTTAGGTACCAAAAGGATGTTGTATAAATTAGGATGCTTTCAACCATAAGAGACTCATCTCAAAAATGGCTTGAAAAT
+GTGGGGAATTTTTATCTCAGTGTGAAGTTAAAGGTAGGGCAAGTCCAAATCAATTCAAGTTAATTAAAGCCCCAAGTTCT
+TGGAATTTGCTCTGCTAGTCTTAGCTGGTTAGCCTTTGTCCTGAAGTTTGTAACTTCATGACCAGAAGATGATCGCAATA
+TTTTCTAAGTATAAAGCCTGAAGGTGTAAACCAGGTAGTCTCAGCAAAACCAATGATGCATGGTCACCTTTCCCTTAGTT
+GACAAATACTTGCCTTCCAGATACTTTGAATCAGAATGGGCATTTTAACTAAGATCCAGTGCAACTAAAGGAATAATCAA
+ACAAATGATAAAATAATTATTCTGAGCTGAAAACACAAGTCTGAATATTGGAAGAGGTTCCTGATTTCCAGGCAGAATAG
+ATAAGCAAAGATATCAACCTAAACACATCCTGGTTATAGTCTAAAATTTAGAGAATAAATGGGAAAAGATTATGAACTTT
+AAGGAAGAAATAACAACTTACACATAAAACAAAAAGGAAAAACTATCTGTTGACAGATTTATCACCTGCTAGATAAGAGC
+AGAATAACCATTCACTAGGAGAAAATGGGAGAAGCTAGAAGATACTGGAGTAATATTTATAGAGTACTGAAGAAAAAACA
+AAAAACAGAAATTCAATACACAGCTAAGATATCATTTACCTGTAAATGATACATGGTAAAATAAAGGGTAAAATAAGGAT
+ATTTGCAGATTCACAAGGAGAAGTTAGCCTTCACATACTCAGCTGAGGAAAATCCTAGAGACAAAACTCTAACAAAAACA
+AACTAGCCGGGTGCAGTGGTGCAGGCCTGTAGTTCCAGCTACTCAAGAGGCTGAAGTAAGAGGATCACTGGAGCCCAGGG
+GTTCTGGGCTGTACTGTGCTATGCTGATCAGGTGTCCACACTTAGCTCGCCATCAATATGATGACCTCCCAGGTGGGAAA
+TGGAGCAGGTGAAAACTCCCATGCTGATCAGTAGTGGGATCATGTCTGTGAATAATCACTGCACTCCAACTGGGAAACAT
+AGCAAGACCCCATCTCTAAAAAACAAACAAACAAACAAACAAACAAACAAAAATGTTTCAGACAAATGTCAAGATAGAGT
+AAAAGAAGAAAGTACTGAAGGCCCTTCAACATAAATTGGATCAGATAATAAAAATAATAGCAAAGTTCTTTTCATGCTGT
+ATCCTTAATTCTTCACCATAATCTTAGGAAGTGAATGTATTAATTATCTTTTGCTATATAACAAATTACTCCCAAAACTT
+GGCGGCTTAAAACAACAAATATTATTTCACAATTTCTGTGGGTCAAGAATTTGGAAGTAGTGACTCTGGCTCAGGGTCTC
+ATTTAAGGTGGTAGTTCAGGATGCCAGTCAGGGCTGCAGGCACTGAGGCTGCTTCCTCAATGGCCCACTCACATGGCTGT
+TGGCTGGAGGCCTCTCTTTCTCACCACACGGGCCTCTTCATAGGACTGCCCGAGTGTCCTTACAGCGTGGCAGCTGGCTT
+CCCCCAGAGTGAACATTCTGAGAGAGAGAGAAAGAGAGAGTTAGAAGGCATGCTGTCACTTCCACTGTATCCCATTCACC
+AGAAGTGAGAGACTAAATTCACCAAACAGAAAAGTGAAGGAGAATGAGGCTCCAGTTTTTTGGGTGAGAGTTGAAGAATG
+TATGGATATTTTGAACAACCACAATATAATTCTTCTTTTCACAGAAGCACAAAAAAATTTATTTAACTTGTCCAAGTTTA
+CACAGTTAGCAAGCAACACCTTTGAGAAAAAAATCCATGTAGTCTGATACAAGCACCCAAACTCATAACCACAATGTGAA
+TCTAACTGCTTTTCAATTAAAAAAGAAAGAAAGATTCCCTTCAAATCTGGCATATGCATTCACATGGAGCATTCATACTG
+CCAGTGACAGTACCATAGTTATATGGAATTAGAAGTTCTAACTTATCTTGGCCAAACTAAAGACTTAGGGCTGGGTAGAA
+GGTTGGAGGGATGTAAGGTCATTCTCAAGATCTCATCTAGGAGAAGAAAACAAAATGGGGAAGTAGAAGACAAAATGCTT
+TTTTAGGTTGGGAAAGGACTGGGAGAATCAAGCATCTAGAAATGGGCACAAAGAGTTACCTTATTTTATTTAAAAGAAAA
+TAAATGTTTGACTATTAATGCCTGAGAACGGAAGGTGATTATTAATGAGATGAAAAAGTTAATCAGATTCTCCAAGTTAG
+GAGGGACTTGAAGACCAAATTGATAAAAATAAAAAAAAAGATGTCATAGTAGAATAATCTAGATAATAAGCAATCAATGA
+GACTGAAAAAATAAAATCAAGTATATCATTTGTTACACTAAATATTAATATACCAGATTCTCTCATTAAAAAAACAGAGA
+AAGTCAAATTGGATTAAATAAGAACAAAAAGTTAGCTATATAGTATTTATCAGAAACATTCTTATAAACAAATTGATAAT
+GAAAGATTAAAAATAAGAGATTTGAGGCAAGGCAAGCAAAAAGAAATAAATGTTAAACAAGGAGAAATTAAAGGCTACGG
+ACATTACCTAAGGAAAAGGATGACATAGAGTTACAGTGGCAAAAGTTAGGAAGCAGATGACATAAATCTATATGCACAAA
+CAGTATGGCCACAAAATACATTAATTAAAAATTACTAGAAATATAAGATGACTTTGATTAAAATACACTGATTACAAGGG
+ATTTAACATATAAAAATTAGGCTGATGTGGTAAATTTAAATATAATCAAATATTTAGGAAAATAGAACAACACAACAAAG
+TTGATTACATATATTCATTTTCCAGATAGTATACTTTATGCCTATGAAATAGTTCTTAAAATCAATTATATATGGCCGGG
+TGCAGTGGCCCACGCCTGTAATCCCAGCACTTTGGGAGGCCAAGGCAGGTGGATCACGAGGTCAGGAGATCGAGACCATC
+CTGGCTAACACAGTGAAACCCCGTCTCTACTAAAAATACAAAAAAAAAAAAAATTAGCTGGGCGTGGTGGCTGGAACCTG
+TAGTCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATGGCGTGAACCCAGGAGGCGGAGCTTGCAGTGAGCAGAGATTGCA
+CCACTGTACTCCAGCCTGGGCGACAGAGTGAGACACCGTCTCAAAAAAAAAAAAATCAATTATATATTTAAACACACACA
+CACACACACACACACACACACACACACACACACACACCTTTTCTAAAAAATCGGGATTTAATAGACATATTATCTGATCA
+TAGGCTGTTGAAGATAAATGATTTAAAGGTGGCCTAAAGTTTAAGTACTTGCAAATTAAGAAACATCCTCTCCACATCAA
+AGAGAAAATCACAACTAATGTTACTAATTTTCTAGGAAGCAATTTAAAGAACACTAAATCCAAAACCTATGAGACACAGA
+TGAAACTGTGTTCAGGGGAGAATGTATAGCCTTAAATGCTTACATTAATAAAGTAAATGAAAGAAAAAAATAGTTTTTAT
+CCTAGGAAATTAGGAAAAAAGCAAATGAAGTGAAATATACTACTAAGAAATAAAGGCAAAATTACTGAATTTCATGAAAT
+AGAAAACAATAAAATATATTAGAAAGGATAAAAAGGATTTTAAAAATCTGCTCTTTTAAAGAATCTGTAAAACAGAAAGC
+CTTATGGTTCAGATTAAAGAAAAAAAGTTTGTGTACGTGTGTGTGTATGTATTAAGAAAGGAGATATAACTTCACAATCA
+AAGAAGTTTCAAAGAATTGCAGTTACATGGCAATAAATTTGAAAACTGAGAGGACACAGAAGATTGTGTAACAAATTACA
+AATAATGAAAATTGACCTTATTCTGAAGTAGAAAACAGGAATAGATCAAGTTTCATGCAAGAGTTTGGAAAGATGATTAA
+AAATGTACCCTCTAAAGAGCAAGCTGGGCATGGTGGCTCATGCCTGTAGCCCTAGCTACATGGGTGGCTGAGGCAGAAAG
+ATCACTTGAGCCCAGAAGTCCAGAAGTTCAAGGCTACAGTGAGCTATGATTGTGCCAGTGCCCTCCAGAAGAGAGAAAGA
+AAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAGAAGAAGGAAGGAAGGAAGGAAGGGCAGAAAGCAGA
+CCAGACCAGATTTCATAGCTGTATTCTGTCTATCCTTAAAGAATAAATAGTTCCACTTATATTTTAAATATTCTAGACGA
+CAGAAGAAGGAGGAAAGTTCTTAGGAACCGATAAGAGAATTCAGAAGAACCAAGATTCAGTAGCCTACAAAGAATGAAAA
+TATGCTCATAGCAAGACAGCTTTTGAGATATTTATTTCAAATTCACAAAAACTCCCCCATGCAATCTTAAAGAGGCTTTA
+CTTTATTTAAGCCAAAGAAAAGTGCAGATCCTGATATGAATATATTTATCTACAAAAGGCTCATATTCTGAAGTAATACT
+ATTCAACTCTCATTCATTCAAGAGCTGTTCTGTAATTGTTCCAATATAAAGGAGTCAAAACAATTTCTTAATGGAAGTAT
+TAAGTAGTGCTCTATGTTGTCAACTAATTTATTTCCCATTTCAAACATTAGTTGACATGTTTTCATTTCTCTTTTGGAAG
+GAAACAACTAAATATGTTATCAATCCATCATTTACTTGTACAATAAATAAAGTTCTAAATCACTGCACAGTGTAAAATGG
+CAAATAGACTTCCCCATAACACAAAGCCATCCTGAAAAGTTTTGTTCATTTTAGAAGAAAAAATTTTAAAACCTGAGCAC
+CATAGGATATGAATTTTGTAAATAATTATGAAAAGAAAACAACAATAATAAAACTCTACTAATCTTAGCTCGATCTTATA
+AAAAGTTCCATAATGAGTAATCAAATTTTATTTCCAATTAAATATGTTATCACCCAGTAGTATGCCATACCAGTTTCTAA
+TTCATACTTCAATTATCTTCTAATTTAAATTAATGACTATAATTGCTGTTATAAAACAACAGCTCTATAGCCTGCTATTC
+AGACCAGTAAATAAGAGTTTAAGGGCTTGTGATAGCAAATGAAGTTTCTTATTGGATTTTAAGAAAAATTTTTATAAAAA
+TATGTGAGGTTATTCAATAGAATCACATTTAATTTGCCAAGCATTTTGCAGAATGCCTAGGACTATGTAAGAAGTATTAA
+ATTTGCAAGCCCTTTGAATAGTTGTAATTTAAAGATAAAAATTGGTTTAATACCAGACAAAGATAGAAGCACAAGTTAGG
+TTATTAGAGAATTTAGCCAGTGTATCAGTTTGTATCGTAAGTCATTGGCAAGAACAACGTGTACTTTTCTGTCACCTCCC
+AACTAGCTATGTTTTGAGCAGTAGGAATATTTAATACCCCTTCCTCCCATTTTTCCTTTGTGTTGTCCAAATTCTGACAA
+CTCTACTGCCAGATAGCTCAGGGCAAAAATGATAAAGTTCAAGTTAAGAAGGCTCTGCAGTGTTCTCAGTTCTCCTCTGG
+TGAAAGAGGAGAAAGGTTGTGTTTAATTATGAATCTGGGATTTCCAAAACTTTACCCATGCCCTGCCTGTCCCCTCATTA
+GCATGAAGCTGTTATTTAAATAGTTCAGCAATAACGACTTTAGTAGCCTCCCTAGGTTAAAAAGATTGAAATTAAATGTG
+TTTATCTATTGTTCTACTATTCAGTTACCTGATTATAAAATCAAAGATTATTTCATGAAACTCAGTACCCCTTCAGGGAA
+AAAAAAAAAAAATTCCCTAAAACAAAGTTAGGAGAGTGAATCGGACCACATGCTTATCTCCAAGGTCTCAATCAAACAGC
+AAATGCTTACCCAACTTCTATTCAAAATATTTGCGCCAGTAGTTCTGATATGACCCAAGCAGAGTTCACACATTATTAAT
+CTACTCCTTTCAGTCTTCTAGATGTGTTTCCTCCAAAATCTACCAGATTCTCAAATAATTTCAGGAACTTTCTCCAGAAC
+AGAAACAAGGTTGTTACTGATACCAACTTTGTCTCCAAACATGGGGAAGATTATCATTGGAAAGATCTATTGATGACCTA
+TAATACATAGTTGGAACTGTTTATCCACAGAAGTATTCCCCAAGAATCAACCACAGAGCCAAGATGGAGCTTATGTCATT
+GTTATGCATACTTCTTTTACGGCTTGTGAGGGCAGTTCATACTATTCTGATTTTACAACTGAGACCCAAGGAACCTGAGT
+GACTTCTAGGCTCCATTATGTCAAAAAAAACTCAAATGTGAGGCTTTGCCTACACTGAGAAACAGTAGTTCAAGAAACGG
+TGCCCTGGTTCTGTTAAAATAATCTGAGAGTTATGTGGTAAGTAGTTGAGAGTGAATAGGGTAGCTTTGAGAGGTGACAG
+CGTGCTGGCAGTCCTCACAGCCCTCGCTGGCTCCAGGCGCCTCCTCTGCCTGGGCTCCCACTTTGGCGGCACTTGAGGAG
+CCCTTCAGCCCACCACTGCACTGTGGGAGCCCCTTTCTGGGCTGGCCAAGGCCGGAGCCGGCTCCCTCAGCTTGCAGGGA
+GGTGTGGAGGGAGAGGCGCGAGCGGCAACCGGGGCTGCGCACGGCGTTTGCGGGCCAGCTGGAGTTCTGGGTGGGCGTGG
+GCTTGGCGGGCCCCGCACTCGGAGCAGCCGGCCAGCCCTTCCAGCCCCAGGCAATGAGAGGCTTAGCACCCGGGCCAGCA
+GCTGCGGAGGGTGTACTCCGTCCCCCAGCAGTGCCAGCTCACAGGCGCTGCGCTCAATTTCTCACCGGGCCTTAGCTGCC
+TTCGCGCGGGGGGTGCTCGGGACCTGCAGCCCGCCATGCCTGAGCTCCCACCCCCTCCATGGGCTCCCGTGCGCCCGAGC
+CTCCCCGATGAGCACCACCCCCTGCTCCACGGCGCCCAGTCCCATCGACCACCCAAGAGCTGAGGAGTGCGGGCGCACGG
+CGCGGGACTGGCAGGCAGCTCCACCTGCAGCTCTCGTGCGGGATTCACTGGGGGAAGCCAGCTGGGCTCCTGAGTCTGGT
+GGGGACGTGGAGAACCTTTATGTCTAGCTCAGGGATTGTAAATACACCAATCGGCACTCCGTATCTAGCTCAAGGTTTGT
+AAACACACCAATCAGCACCCTGTGTCTAGCTTAGTGTTTGTGAACGCACCAAGCCACACTCTGTATCTAGCTACTCTGGT
+GGGGCTTTGGAGAACCTTTGTGTCCACACTCTGTAGCCAGCTAATCTGGTGGGGACATGGAGAACCTTTGGGTGTAGCTC
+AGGGATTGTAAACGCACCAATCAGCGCCCTGTCAAAACAGAGGACTCGGCTCTACCAATCAGCAGGATGTGGGTGGGGCC
+AGATAAGAGCATAAAAGCAGGCTGCCTGAGCCAGCAGTGGCAACCCGCTTGGGTTCCCTTCCACACTGTGGAAGGTTTGT
+TCTTTCACTGTTTGCAATAAATCTTGCTGCTGCTCACTCTTGGGGTCCACACTGCTTTTATGAGCTGTAACACTCACTGC
+GAAGGTCTGCAGCTTCACTCCTGAAGCCAGCGAGACCACGAGCCCACCAGGAGGAACCAACAACTCCAGAAGCGCCGCCT
+TAAGAGCTGTAACACTCACCGTGAAGGTCTGCAGCTTCACTCCTGAGCCAGCGAGACCACGAACCCACCAGAAGGAAGAA
+ACTCCGAACACATCCGAACATCAGAAGGAACAAACTCCAGATGCGCCACATTAAGAGCTGTAACACTCACCGCGAGGGTC
+CCTGGCTTCATTCTTGAAGTCAGTGAGACCAAGAACCCACCAATTTTGGACACAGTTTGACAATAAATTTACACTCAAAT
+ATCTCTAAGGAATCAAACTTACAGATTAATAATTAGTAATCAGGTCACGTAAAGTAAATTATAAAAGAGCATTGATACCA
+AGATTGGCAGAAAGTTTTTTGTGTGACAAAACCAAGTTTTGGCTAAGATACACACTGCTGATGGGAGTCTAAATTGCTGT
+ATATGTCGGGAAAACAAGTTGTCTTTATCTTGATGTTATAAATAACCTATGACCCAGAAATTTAACTCCTAGACATATAC
+TCTAGTGAAACTCTTGAACGTGTGCGTCCAAAGACATTTATAAACATGATCTTAGTAGTATTGCTTTTAGTAGCAAATTC
+TGGAAACATCCCAAATGTCTATCAATAGTGGAATTGATTTGAAAGGGGTGTGGAATGGTAATATAATGGAATAGCCTACA
+GCTGTTTAGATAAAGGAACTCCAATTAAACATACCAACAAAGATACATTTCAAAAACAAGACGTTGAAAGGAAAAAAGTC
+ATCAAAACAATACACAACATTCTACCACATTTTTATAAATTCTCAAAATATGCAATATTAAACATGCATTATTTAGGGAG
+GCATTCAATGTAGCAATGCATTTTTAAGAGGCTGGGATGATAAATGTAAAATTCAGAACAGGTATTATCTCTGGGAACAG
+GAAGAGGAGGATGCAGTGTTGGGAAGAAATACATATAAGTACAGCAGTAGAGGCAGACTTTTTTTTCCTTTTCCTTTTTC
+CTTTATTTTTCCTAGCTTTCTTTTTCTTTAGCTATGGTATTTCTTTAGCTATGGTATGGTATGGTATGTACTTTCCATAT
+ATCATACAGTATATTTTGGTGTGCATGAAATAGTCCTTAATAACATTTTTATTATTTTTTATTTGACGTTTAAGTTCAGG
+GGTACATGTGCAGATTTGTTATATAGGTAAACTTGTGTCATGTGGTTTTGTTGCACAAATTATTTTCTCACCCAGGTATT
+AAGATTAGTACCCATTAGTTATTTCTCTTGATCCTCTCCCTCCTCCCACTCTCCACCTTCTACCCTCCAATAGGCCCCAG
+TGTGTGCTGTTCCCCTCTATGTGTCCATGTGTTCTCATCACTTAGCTCTCACTCATAAGTGAGAACATGCAGTATTGGGT
+TTTCTGTTTCTGCGTTAGCTTGCCAAGGATAATTGCCTCCAGCTCCATATTCCTGCAAAAGACATAATTTTGTTCCTTTA
+TATGGTTGCATAGTATTCCATGGTGTATATGTACCGCATTATCTTTAGCCAGTCTATCATTGATGAGCACTTAGGTTGAT
+TCCATGTCTTCGCTCTTAACATTTTTAAACAGTCTCTGAGTAGAATAGGGTAGGCTGGTGTAAGGAATTACTGTTTTTAA
+TTGGAAGCAATTACATACTGCATTAAAAAGCATATATATATAGACACACATATATATGTTTTACATTCACATAATCAAAC
+CAAAGCTTTCAGAATTTTCCGCCAAAAAGCATGTGTGGAGGGAGAGAGAGGGGTGTTATGGTGCATTTGTAGGCCCTGAA
+ATAAAGTGATCTCCATATTTGCTTCCATTTGTGTATAAATATTTTTTAAGCATATATGCATCCTAGAGTATGGAAAGAAA
+ATTTCTGGGAAGATTTGCAAGAATCTGTGGCAGTTGAGAGTAGGTTCACTTTCGCTTTATTGTGTAATTTATTGTATTTT
+TCATTTAATTGTACTTTGTAAACTAAATATTTATTGTATATTTTACTTCATTTTTTAATTGCCATATGCAGCTTTAATTT
+TATAACTAAATTACGTTCTGACCCAGGGCAGGAGGTGGGAGAAGAGAATGAGAGAAGGGCGGAATCCTCAAGACAGCAGC
+ATTCTGCGCTCCGCCCTGGAACACCCAGCGCAGCTACCCCATCCCGCCCAGGCCGGCCACCTGGCGTCGAAGCCAGGCGC
+GAGGGGCGGAGGCGGAGGCGGGGGCGGGGGCGGAGGCGGAGGCGGAGGCGGAGGCGGAGGCGGAGGCGGAGGCGGAGGCG
+GAGGCGAGCACCGCCCCACCGCGCACGCTGCGGTTGCCCCGGCAGCCGCGCCCTGCGTGGCGGAAGCTCACAATCAGCCC
+GGTCCCTCCGGCTTCCACCCCGCCCCCTGCGCTCACCTGCCCGCGCGCTCGCCTTCCGGGGACCCGGGGCCCATGGACAC
+ATACACCCAGCCCTGCTGTCCCGCGCGCCAGCTCACCAGCCCTACCCAAGGGACATCATTCACGCCTGGGCGCCTCCGCC
+GGGCTCCGGGAGCCCAAGGTCGCGGCTGGGCCAGCGCTGAGCGTCAGAGGACGAGAGCAGGGGCCTCCCCGGTCGCCCCA
+GCAGGCCCAGGCACATAGGTGCCCAGAGATCCCTGGCTTCTGATCGCCCGGAAGACTAAGAGGTGAGCATGACACATGGC
+TTTTCCTTTGACGTTACATCTCTACCCCCTCCACTCCTCTCTTCTCCAGATTTTTCTGGAGACTAAAAAGTGGAAAGCAC
+AGTAGAGAACCCCCAACATGTAAACTATCGCTTACACTTGTGGGTGAGCTGAAACCCGCAAAAGTTTGGTGTATTCATAA
+GGAGCCTATATTGAAGGACTTTGCTAAATGTTTTACAAGGATCATATCTCATTTCCTTTTCACACAGTCCCATGAAGTGG
+GCACCATTGCTATTGCCAGTTCACAGATGCAGAAACTGAGGTAAAAACTAGGTAGGTAGCATATCCATATTTATATCCAA
+CTAGTTTGTGTAAATAATGATACAGGGAAGGGAGGATCACAACGGATTTCCCCACCTCAAACCCTACATTCCACTGTACC
+AAGCTTGTCTTCCTCCATTCCTCTTTCTAATGATCCTTCCTTGTCGCACAGCCCCCCTTTGAGAGTTAAATGACCTGAAC
+ACACACAGATTCCCACAGATTTTCATGTAATTTCCGAGTATTCAGACCCCGAACATGGACCTCAAGTCAAGACATTCTGC
+TCTATTCCACTACCTTGGTAGATGGTTAGATGTGACTAAGGTGGCCCAGGGTCATGCTGGGGAAAAGTACAGGTGTGCTT
+GTATGTGGAGGATCCTATATGCTGCTCAATTTACAAGGAACCCAGGACCCAAGGGCTACACTTGTGTGATTTCCTGCAGT
+GGGGTGAGCTCTTGGTCAGTCTTAGCAGTTGATTGTGAAGGACACTATACAGGGTGCTGCCCTCTCCCCAGCACTTCAGG
+GCTTTAGCTGGGAGGGAGGCAGTCCACAGGAAACCGACCCATGGACCTACTGAATTCATCACTGTAGCTGCTAATTAATT
+CTCCGTTAGAGATGGGTGCTAACAGAGGCAACATTGAAGTCCCTGATTGGGAAATCCTTATTTTCACAAATCAAGTGGCA
+TTGATTCAGTCTAATCATTACCTATGTAGGAGCATCTTGCCCTATTCCATAGATGACACATATACATATGCAGTATGATG
+ACACCGCACCTAATCGGAATATGGCCGATAATAATAAAACAAGCCATAAATAGCAGCTAAGGTGAGTGTCACCAAGCCGT
+GCAGCAAAGTCCAAGTGGATGACTCTGAGGCAGCCTCAGTTGGTTCCCTAGTCCACCGCATGTTATAAACAGATTAAAAG
+GCAAAGGAGGCCTGGCCCGGTGGCTCACGCCTGTAGTCCCAACACTTTGGGAGGCTGAGGCGGGTGGATCACCTGAGGTC
+AAGAGTTCAAGATCAGCCTGGCCAACATGGTGAAACCCTGTCTTTACTAAAAATACAAAAATTAGCCAGGTGTGGTGGTG
+GGCACCTGTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCAGGAGGTGGAGGTTGCAGTGAGCCG
+AGATTGTGCCACTACACTCCAGCCTGGGTGACAGAGTGAGAGTTGGGCTGAGGGTCGAGGGTCATGGGATAGGGCGAGAC
+ACCAGGGGAGTGTGTGAGGACAGAAAAGAGTCTTGAGGACCTAGCCCTGGGCACCCAACATAGGCTTCTTGAAATGGGGA
+GTCAGCAAAGGAGCCTGAGCAGGAGGGGGCATTGGTGTTGGATGGAAGCAGGCGATGGAGATGTTGTGGTGCCTAGAGAG
+ACACATTTAAAGGAGAGAGAGCTCACTGCCGTAGAGAATGTAGGTGAGGGAGCAGGCTTGCGATGGAGAAAAATGGTGCA
+TAGTTTCAACTTTTGTAGTAAGATGGAATCTTTTCATTGCAAATGTTGGAAACTTAGCTAACTAGCTTATACCAAAATAA
+GGTGACGGGGGAGAAGATGTATTTACCCACAAAGTGGTAGATGGCTTCTGGCACCATGGAATCCATGTTGCAACTAATGC
+CATCAATCTCAGTCTCTCTCCATCCTCCAAGCATGGACTCTCTCTGTGGCACCCAAGAGAGCTACCTGCGCCCCAGCCCC
+ACACCATACTGCTTAACAACTTCAGCAGAATAGTCTTCCCTCATAGTTCTGGATGATGGGCTGCTGATTGGTTGATGTCT
+TTTATGCACATATGCTTGAGCTTCAGTGTACCTGGAAAGGGATGGTTCCCTCAAATGGGCCAAGCCTAGGCCACAGACGC
+TTTCCTCTGGCAGGGGACAGAAGGGACAGCCCCATCCAAACGACCCGGATAGGATTCCCTATGTGATAATGGAGTTCTTT
+TCTCATAAGAAATGAAGAAACAACTGGTCATACCTAGCATGGCCTGCTACTCAATCCAAAAGATTTTCCGTACAAGGAGG
+TAAAATTACATACTAGAATGCCCATCACAATTTAACAGGGAGATTGTTTATGAAGATTTGCAGTTTCCTTGCAGCCATTT
+AACTACATCTTTTCCACTTAAGATGACATAGGAATCACTCATCACATCTCTGATATGTTACAGAGCCCAGTAGATTTGTT
+TATAATGAACACCTCAAAATTATATGTTTTTATAACACCTTAAAACTATTAAAACCTGCATGATCTCTCTATTTTGCTGG
+TGTGAAACCATGTAGATAGATGGTGGGAACTCAAACCCTTACCATGGCAATTTAGTTAACCTCAGCTTCTTCATGGTAAA
+ATGGTAAAGTGTGATGAGGAGGGAAGGTTAAATGGATAATAAGAAAACCACCTGGCAAACTATTTTTAATTATGATGGAA
+GATTTGTTTGAACTGTTTTAACTTTATGCCCACTCAATATTTTTGATATTTATGTATAAATCCTACTTTCTGAACATTTC
+TTAAAAAACAATCTCTGTTCTTAAGGAAGTTCCAAAATATTGTTATTTAATCAAAATGAAGATGATAAAATTAACTTTGA
+GTTATTTTTCTTTTTTGTTGCTTCAGCTTTAGTTTTGGTCCAGAAAGCATTTTCAAGGAGCTGGTCAAGCATGGCTTTAG
+CAGATAAGAGACTTGAGAACTTACAGATCTACAAAGTTCTTCAATGTGTGCGGAACAAAGACAAGAAGCAGATAGAGAAG
+CTGACCAAGCTTGGATACCCTGAACTAATCAATTATACAGAACCCATTAATGGACTTAGTGCTTTGCACTTAGCCTCAGT
+TTCCAATGATATTGATATGGTCAGCTTTCTCCTTGACCTTGGTGCTCACCCTGATGTGCAAGACCGAATGGGCTGTACTC
+CCACAATGAGGGCTGCAGAACTGGGCCATGAATTGTCAATGGAAATATTAGCAAAGGCAAAGGCTGATATGACTATAGTT
+GATAATGAAGGAAAAGGTAAAAATCCCGACATCCTCTCCAGCAGATTGCAATTCATTTTGTAGCCAGAAAGCAATAACAT
+GGACTCTTTTTGTTTTCCAACTTTAGAAGTGAATTACTTATGGTTTTAAGGGAAAGGCTAAATTCTCACTCCTATACCTA
+AACCTACTGTCTCCTCATACTTTGAAAATGAGATAGCTATATCTTAGGGAAATCATTTATGAGAGCTTGCTCATATACTT
+AGATTAATTTGTGTTTGGTGGTTTCTTTTAAATGAGGAGAATTTTGAACATTATACACACTGATTATTCATTATAAATTA
+TAAACTCTTCTTTAAAACAAAGATGAAGTAGAGCATTTCTTATTCTTTTTTACATTTTAGAATTTCAGGCAGTACTACAC
+CTAGGACTCCAAAAGGATTTTGACCCCCTCAATAGCAGTCTGATGTAGGAAGGACTTGCACTTGGAAACTTGAAAGGACT
+AAAGTCAATTCTGGCATAAACTCCAACTTTCTAGGATCATTTAAAAATGTATTTGGAAATACAACTTCAAACTTTAGACA
+TGACCTTTCAACAATGATTCAGATCAAACAACTGCACTTCCACACCGTTAATCCCATCATCTCAAGACCATGGTCTGAGG
+ACTTCTTGCCCAAATAACTTAAGGTTCAGGCAAGAATTGACACAGCCTCATAAAAATAACTGAGATAATTACCAATAAGC
+TTAATGTAAACTGATAGGTTTGAGCAGTTGTCCTGAGTTTCTGCAGATACAAGGCCATTAACATCAGCTCTGCAATCGAG
+AAATGAATCTCATTTGATAATGTGAATATTTACTGACAAGAGCAAAATCTACTTTTCTAACTCTCTCCAGACTTGGTGTT
+TGATATGATTACTTGCTTAGAACAAAAGAGGACAAAGAAAAGGGATAATTACATATTTAATTGTACCACGTAAGAAAATA
+AACTTCAGATGCATTCAGGGGCCAGATGTACTTTGAAATGCTAAATCCTGAAAAAACGGTATCCTCTCAAAGCTCTATAG
+TATACCATTTCCCAAAGTGTGTTCCATGGACCACTGAATTGCCAGGATGTTGAGAAGTACTACAGGCAAAATTGGTTCCA
+CGGTCGCATTCATTAGAGACACATTTTAGGCATTTATCTTTCACATTGTTGGCTCAAAAATGGTGACATTTTCCATATTT
+ATTTGACCACAAAATAATTTTTTTTAATGACATCTTATATGCCTGGAGTTTTGTGGACTAACCTTTGGGAAACACTATAA
+TAGAGAAACGTTTAACTTACTGAGTTTGAAACAGATACAAATCACAAAGAAAAAAGAAATTCTATAGTGTAACAATGTAA
+AAGTGTAAATTTTTTTTAAAAAATCAAGGTAAAATTGGAAACAAGGTCAAGTAAATACTTGCAGCAAATGCAACAGGCAT
+ATGGCTAAAGGAAACAAATCTCATGGGCAGAACTGAAACTATAAAAGGAAACAAATGTAAAATCTTGTTTCACACATAAG
+CTTGTTAGGATGGCTGCCTGCAGCCCTCACTCTGCATAACCCAAGGTAGTGCCCATTGTACAGAGTACATTGTCTCTTCC
+CACATCAACAAGGCTCACTCAGCTTGCACATCCCTCAGTTTGGCTGCCAGTGGCCAAAGTGCTTTGCTTATTCCACAGTA
+CCCTCTCTGTCATTGACATCCTATTTTCTGGTGACTCATGTTTGACTCACTGTACATCGTTCCTAAAGGATAAGCAAACA
+TTTAAATGGCTAAGTGGGTCTTTAGGATGTGAGACATGCCCTAGGCCTAAGCGGCACTTTCTCTTGGAACAGTTTTACAG
+AATATGGAGAAGGTAAAGCTCTAAGCCAATACATTCTTTCTTGTATGTTGTCCTAGGGAATTAATTACAATGTGGAATCA
+TTCTCCTTAAAAGTTAAGCTATAATAGTTCCTTATAGCACCCATTCCCATTCCATTGGTTTTTGCATTTACAGGCATGTG
+TACTCAGTCCTCAATGCCGTATGCCAGTTCTATGATAAGGTACAATATTTTTCAAACTGTTTTCCCTTCAATTTAGGAAT
+GCTGATTCGTGTTCTTTGTGGCTTAAGGAGTCAGCCCAGATACACTTTAACTGTGTGTTGCACATGCCATCATTATGAAA
+TATTCATTTCATAAGTGGCTCTGGGCCTTTTGTAATGAATTCAGCCTCCTCGGGTATTATGCTTAGGTTGTTTTGCTGTT
+GCTGTTGTTGTTTTGCTCTGTTCCTGCCTTCTATGTCTTGCTCTTTCTTTGACTTTCTCTTTTCAATAATAACCTTGACA
+ATCTTTTCTTTTTTTTTTTAACTGTTCTGAGTAACCCTACTTTAGGTACATCAGTTTATATAAAATATATTTAGGTTTTG
+ATTTTCATATACATTTTATTTTTTATTTTAATGGTGGAATTTATCCAATTTATAGTTATCTAGATTTCCCATAGGGTTGG
+TTTCAGAAACATCATTTATTTTATATTGTGAAGGCAAAATATATCTAATATATAAAATACATGTGAAAGTATTGTCATAT
+ATGGTCCATAGATTTGTTTTACTTTGCTGTATATGTGTGTTTTTCTTTATATGCTTGTAAGTGTATTTTCTAAAATTGAT
+GGGCTTTTTAGTTATTTTAGTTTTTTTATTTTTTTTTAGTTATTTTAATGGTTAACTTGATACTTCATATGCCCTTAAAC
+CTGTTTATTGAGTTAGCATCTTAAACAGTATTCATTGATTCTTTACTCTGTTTAGTAAGATTAGTACAATAGTATAAATT
+AGATAAGATTAGTACAATAACCCTTCCTCTTACCTCTGCTACTTCCCTTCTACCACTCAATTTTATACTCAATTATAATT
+ACTTAGTTCTTTCAATGATTCCCTTTTAAATAATAAGTACCTTGTACTTTATTTCCTTACCTGCTTTACATAATATCTCA
+GTCTTAGCCCTAAAAGATGAGAAGATCAAATACTTATACTATTTCTCCTTCCTTCCCTCAAATTTTTGATAAATTATATT
+ATTTTTACATTATTACAAATTGCAATATATTTTTCTCTATCTAAAATTGCTAAGGTGACTTAGCTTTGGTTCTGTGGCTA
+AAGAGATTCAGTATTTATGACTGTTCTTTTAGTCACATATTCTCTATTGATCTCTTGGAAAACTCACTTTTGCCATCAAG
+TAGTTTAAGTCACTTCTTTGGGCTTGTATCCCTGATTACTTATATCCTTAAGACTATACATGTCTGTTGTTTTTACATGA
+ACAAAGTCTTGATTGGGTATAAAAGTTTCCTGTTTTTTTTCCCCAAGTTATAGTGTAGAGATCCTTTGATTATACTGTAG
+CATTGAATGTTGTTGTGATAAATTTTTGAGGGCAGCTGGACTTTTCCCCTTCATGGATTAGTTGAATTTTTGCCTGGATT
+CTTATGAGACACTTTCTTTGCCTAGCTTTACCTAGGCATGCCTTGTTGACTGTTCTTAATCAACTTTTCCTAAGACCCAA
+CGTACCCTTTTAATCTTAAGATTCAGGTCTATCTTTGCACTTTCATTTTAGAACAAGTTATATTTAGTTTCTTTGCTATC
+ATGGAAATGAATTAGCCAGTAATTTTCACTGGTATATCTTTTCTAGTATGTATCCTTTTTGCGTGGCTTTCAGTATCAGT
+TTCCTTTTTCCTTGCAGTATTTTTTTTTAATAGGTTGCATAGTGGTTTCTTTTTTGGTTATTCTTTGAATGGAGTCCCTT
+TTTTGGAATGAACTCGTTACAAAGTCAGTTTGGAGTCTGATTGTTATTTACCTAGGACTTTCTTAGTAACACTCTGCCCT
+GGGTTTATAACAGTAACCCTGATGTGTAGTATGCTTTTGACATGGATTATATTGAAGGCCTGTATGTTACAGGTCCTGGG
+TGTAGGGATTTAGCATGTTCCTTTCTCCAACGGAACAAAATGATTTGCTAAAATGGCAGAGGCACTTGCCATCTAATTAG
+CCACTTGGCATCTAATTCACCTCTTTACTGCTATACCGACAGCCTGCTTCTTACAAACATGGTCAGTCTACACATTTCAT
+CACCCAGTACCATCTCTTTTGCCATTCTGCAGTTTTAGAGGGGACCGGGGGACTTACCAGTAGCCCTTTGGCACACATAG
+CTGTAGCCCTTCACATTCCAAGAAGGGACACTCAGGGTACGTATTGCAAGATGCCAAATACTCAGGGTCAACAGGGTTCA
+CCTATCAGCGCTCCAATTCTTCCATAAGCCAGGTCCTATCATATGTGGTTGTGACTGTTTTCTAACGATGTCAGGAATTA
+AGATTTTTTGTTTTTTGTTTTCTGTCTTGGTGTTTTAGTTGGTTTCAGAGAGGAAACTGAGGCAGATATGCCTTTAACTA
+GCTAAATATGGATTTTTAATAATTTTACTTTCCTTCCTGTCAAATATGTATAGATGTTAAATACTCTCTGGGGATTTTAT
+TTCTCTGCAGGTGTTTTGTTTTACTGCATTTTACCGACTAAGCGGCATTATCGCTGTGCTCTGATCGCCCTTGAACATGG
+TGCAGATGTCAACAATTCTACCTATGAAGGAAAGCCAATATTCCTTAGAGCTTGTGAAGATGCACATGATGTTAAAGATG
+TGTGCCTGACATTTTTGGAAAAAGGAGCCAATCCTAATGCAATCAACTCAGTATGGCTATTCTTGTGATTACAAATATTT
+CTTGTTTCAATTACAGCATAGTATAATCTTTTCATTTAAATATGATGACATTATCCTGTTAGCTGATAGACTGTTCATTT
+TGATTCAAAATGCTTGAAGATGGTATACGTTTATTATCTTCATATTGCTGTGATTCTACAGTCTGTTTCCTAATTATGTT
+TGGAAACTTGGTATTTATTATGCATAATGTCAAGGTACCATTATAGGAAATAGTAAAAAAAGGCATTGCTGATGCCTAAT
+AGGATTTATCTTCTTAGAAAGAAAATATATAGTTCTGCTGTTTCTTTTCTTTTTCTTTTCCTTTTTTTTTTTTTTTTTTT
+TTTTGGAGACAGCGTCTCACTCTGTTGCCCAGGTTGGAGTGCAGTGGTCCAATCTTGGCTCACTGCAACCTCTGCCTCTC
+AGGTTCAAGCAATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGTGCATGCCACCATGCCCCGCTAATTATTT
+TTTGTATTTTTAGTAGAGACAGGGTTTTACCATGTTGGCCAGGCTGATTTCGAATTCTTGGCCTCAAGTGATCTGCCCAC
+CTCTGCCTTCCAAAGCGCTGGGATTACAGGCATGCCCAGCCTAGTTCTGTTGTTATATCAATTTCGTTCACTCTCCTTAT
+TTTGAGTTTTCTTGTAATTGCCCTTTCTTGACACTAAACATTGATTTTCTTCTCATTATAAAACACCTATGACCTAATTT
+TATATTAGGTGTTGATATTTTATCTCTCTTTCTCCCTTTTTATTCTGCTCTATTTGCAAGAAATACATTTTTTAGCATGA
+TATTCTCACAGGGGCATGCAGTGTGACCTTGACTCTGATTTTAGAATTCTACGTTTTGGATACTGACCCATTGTACTTGC
+TTAATTTTTCCTCTTTGTTACATGAAGCTAAGAAAAATGTAGAGAACTAAACTTGTAAGTGTAAAGCTAAGGTCAAATGG
+ATTCAGACTGTATTTGTACTGATTCTGCTACTTAATATGATGGGTATAAACAGCATCTCATATTGGACTATTTCATGTCC
+AGTCCACAGGCCGCACAGCTTTAATGGAAGCGTCAAGAGAAGGGGTAGTGGAAATAGTTCGAGGCATATTGGAAAGAGGA
+GGTGAAGTGAATGCATTTGACAACGACAGGCATCACGCTGCTCATTTTGCTGCTAAAGGAGGCTTTTTCGATGTAATAAT
+CTATTCTTTGCTTTAAAATTTGTTGCTAAAACTTTTCTGTCCTTTTTCTCAAATTTTTTTATATGTCATATAGAGTACGC
+TAAGGCAGACATTTTTCTGAGTTACCTATGTTGTTTTCATCTTGATTTCCCCAAATAATTCTAAAATTATATTTTTAAAA
+ACTTCATCTCTTTATTTAGTTTTATGTGTTTGCTAATTTACTTATTTAATATTATTCATAGTATTTTTCTGTACACTCGA
+TGATAACAATTTAAAACTTCTGCCACCTGGGACTACTGTATTTTATGACTTCATTACATACTAGAGTCTTCAAAATATAA
+TATGATCTAATTATGGGAGAACAATGCCCTCATACTCTTATCTCCATGGTGTTTGTGTGTATTAGATAATGATATACCTG
+AATTTTAAACTCAAGTATTCTGATTATCTAAACATACAGCAAAGAATATCTCACTGATACAGTAACATTGCAGAAACAAA
+ATAAAAACACTATTTTTTTAAATTCTGAATTTCATCTGAAATTTCTTTACTCAATTTCTTTGAGTTTTAACATAGTTATC
+TTCATTTATATCACTATTTTCCGTGCAATAAAAGTATACACAAAGATAGAAACCACGTTTGGTAAAGAATTTTCCCCTTG
+GATGCTGTTTTCCATGCTCTGATGCTTTCTTAAATTACCTACACTAACAAAAATGGTAGTGCCCCCTCTAAGTATAGAGT
+TTTCTATACTCTAGATCGATGCTATTTAATAGAACTTTTGGTGATGATGGGCATATTCTATGTTTGTGCTATACAACACA
+GTACAACACAATGGCCATGTGGCTTTTGAGTACTTTAAATGTGGCTAGTGAAACAAACAAATGAAACTTAAATATTTTCA
+GTTTAGTTCATTTCAATTTGAATTTTCAATAGTCATACGTGGCTAATTGATATTGTACTGGATAGTGCCGGTCTAGCTGT
+CAGTCTTTATATAGTAAATGTACATTCCTATTCCTCCACAATATCCACTATTTATTTTACAGATATTGAAGCTTCTTTTT
+GCCTACAATGGAGACGTGGGGCTGATTTCGATAAATGGGAACACACCACTTCATTATGCTGCCATGGGTGGTTTTGCAGA
+CTGCTGTAAATATATAGCTCAGCGAGGTAAAATTGTCTAGCAATTTTGTGCTTCAAGTACTTATGATTTACCCATGTCAT
+AGATAAGCAGAGAATTTGGGCCTATCACTTGTCAGTGGCTTATTTTTTTCTATAGCAAAACCTCACTCATATGTAGAAAG
+AAAATTGAAAATTATTTCTTATACTACCCTCTGTTAAATCATGAATCTGCGCTTAAAAGTGATGAAAACCCATTTGTCCA
+AAAATTTTAGGTTATTGTTAAAAACCTAAAGGGGATTGTTCCTGAGCCAACTAATTATTCAAATACTTATGGTTGAGGGC
+GGGGTGTGGTGGCTCATGCCTGTAATCCCAGCACTTTGGGAGGCTGAAGCAGGTGGATCACTTGAGGTCAGGAGGTCAAC
+ACCAGCCTGGCCAACAAGGCAAAACCGCGCCTCTACTAACAATGCAAAAAAAAATTAGCCAGGGGTGGTGACGGCGCCTG
+TAATCCCAGTTACTTGGGAGGCTAAGGCACAAGAATCACTTGAACCTGGGAGGCAAAGGTTGCAGTGATCCAAGATCTGT
+GTCTCAAAAAACAAACAAACAACAACAAAAAACTTACAGTTGATTAAGTTCCTTATTTTAAAAATGCCAACATCAATCTA
+AAAAAGGACACACACTGGAAACCATTTGCTTTCATTTGAATTTTTAGTTTATTTTAAAAAATGTTCCATTGTGTCAAATG
+CAAAGATAACTTTTCTGGATAATTTGCAGTTATGGGGAAAATAATTTGGGGGAAAACATGAAATGAAAATTTTCATGGAA
+AATTTGAAATGTAAGCAGAATTTTTAATTTTAGATTTTCCTGTAGCTTTTAAGTATTAATTTAGAAATATGAACTATATC
+ATATATAGCTTAATGAGTTTCTTAAAATGAAAATATTAATTATATTTAAGGATATGTATTTTTTGTTAATTACATGCTAA
+TTAAATGCACATTAAAAGCTTTTTAACTCTGTTAGAGATAGAACTGCATTTTGCAGAACTGTATATATTTTTTCTAGTTT
+ATGAGAAATAGGCAAAAACTTAAAGTTCACCAACTCTATAATAGAGCAAAGAAAGTGTTTTATTTTTCTGTCTTTCACAC
+AGACACAAAATTTTAGAAGTCACTAAAATTATAAATAAAACTAACATGCCCGCATAACTATAATAGCATAATAACATCCA
+TTTTTACTTGTATGAATGTATACTTTCCCTATGTCCCCAGTGTGATAAACCCTTTTAGCAAATGAGAAAAAAATATATCA
+AATGCAATTCTGGTTCTCTCAAACTGTCTTAATTGGTTTTGTTTTCAAATCTGTCTCTATTAGCAGTTTATTCTGACATT
+TCAGATTTAACATTATGTATTTCTAAAAACTAAAGATCCACTCAAGTTGAGACGAATTTCTTTAACTTTAATATGTTGAC
+TTATTTCTTATTGGGACAAATATCACACATACGCTATCATTTATGTATTCCTGGAATAAAGTCACAGAAAGCACATTGAG
+AAGCCAAGGACCAGAGCCTGGGTCATGCAAAAAGGCCTAACTCAGTGATGCAGAAAAGATCCATTTTCCTAGAAGAATCA
+ATCATGAGACTATTTTAAAGCAAATGTTTGAAAATGTTCTGAGTTCTGTGCATGTTCGGAAATATAAACACGTCACCAGA
+TCCTATGTGGGTGACCCAGTTTCACAGCAGAAGTGTTGTCAGTTACATTTCTACCTTTAGAATCTTACATCTAAGGCAAT
+CAGAGCAGGAAGAAACCCTTAGTGATCAAATGCTTGTTTCTGAATAAGCTCCCTGGGCTTAGTCTCTTCTGTACTTAATT
+GTAAAGCATTTGTGCTGGCTGGATTCATTTCTCACATTTCCTCATGATGGTCCTCTTTCGCCCCCTCGTCCCCCTGCTGG
+AGCAATTGGCAACTCCCTGTAAGCATGTGAGAGCCACATCCAAGGGCAGAGGAGAGGAGCTCGTGGAGTCCTCCTCCTCC
+TCTTCCTGGGCCCTCAACTCCCTGAGTTCTGCAGCACCAGCCAGCTGGAAGTTGAACAACTTTACCACAGAGGAGCATAA
+GTCTCACAAGGAATGAATAGATCTTTATTTACTAATCATAAAGGAGGAGACAGAAAGCTCCAAGTCAAAACGACAGGAAA
+ACTGCCTTTAGCTCTTTAAAGTGGGATGTTGGAGTCTTTTATACACATGGAAGTTTCTAACTAGGATTAAGCTGGAAGCT
+GCAGAGGGTAAGCTAAAACAGGATGAAAATCCTTTGGTCCTTAGTGCTGGGTGAACATAGAAGAAGAGGCTTGAGAGGTG
+GGATCAAAACCCCCCAGACACCCTGAGTAAGCAAAATAAATAGCTAAGTAAAAACAGCATATCAGAATTAGTATGGAATT
+GCACTATGGAATTACTACGCTATGGAATTACACTATAGAGTCACTGTTCTTAAAACTATCCAATATAGAAATTTTATAAC
+CTTTATTGTCCCCATAGTCTTTTTTAGTTTTTCTACTACTAGTAAAATTATCAGCCTTAGTATAAAAATGTGTGTGTGTG
+TGTGTGTGTTTGTGTAGTAAAATAGATTTATTTTTCTTTTTTTACTTTTTATTTTTTTGATAGTAAGTATATATATTTGT
+GGAGTATGTTAAATGTTTTGATATAGGAATACCATGCTTAATAATCACATCCTCTAGAATGGGGTATCCATCCCCTCAAG
+CATTTATTCTTTGTGTTACAAATAATCCAATTACGCTCTTTTAGCTATTTAAAAACATACAATTGTTATTGATTGTAGTC
+ACCCTGTTGTGCTATCAAATAGTAGGTGTTATTCATTCTTTCTATTTTTTTACCCATCGTATACAATTTTAATTTTTGGC
+AATACTAAGGAAAGTATTAATCTTGAAAAACATGGTCTGACTGCTTCTTCAAAGTCTTACTTATACATATACTTATTATA
+GACAACACAGCCTTTTTGGTAGCACTAAATGGATTCCCTAACATGTTCAAAAAAGTTTATTTCTTTGGTGAATATTTATA
+AATGATGTTGGAATCATCATTTTGTTAAATAAGGGACCAAAAAGGAAAATGAATTTTAATGTCTAACTTGCACATAAATA
+TAATGCTGAACTCCTCAGATTTAATTACAATTACTTTTTCCTCTGTGAAGGTTGTTCATATGGCCATACCACAAAACAGC
+AGATTAGTATAAATTACATTATTTAATGAAGAATACGGAAAAAATATACTTAAAATTATACACAGATTTACAGTTTAAAT
+CATTCAAAAGGAAGTGGGTATTCTGCATCTTTTCAGGTTCAGATCTGAGTTCATAAGGTCCTAGGTAAGCTGTTTGAATC
+CAGCTTCTTTCCAAATTTAAATGTATCCTTGCTCTTTTCCTGTTGGCCTGAAGTCCCCTGAGCAAGTGTACCAATATGGT
+GTGTGTTTCAAATATCTCTAGGGATAAATGTTCTAATCCTATTGTAAATTGTAAACGTATCTTCTTCAGAATTCACCAAA
+GTGATGCTCACAGAAAAAAAAAATCACAGCTCAATTTTGTAATTATAACAGCAAAATGTGCACATCTCTATATACATATA
+ATTTGGGGACACATTTTTTACTAATCACGAGAAATTAAGAAAACAGTTGGATGAGTGTCACTTATAGCCATGCAAATGCC
+TTGGCACCATTCATAATTAATGATGCTTTATGTTTTAGGATGTGACCTGAAATGGAAGAATTTAGATCATAAAACGCCCA
+GGGCTGTGGCTAAGGAAGGCGGCTTCAAAGCAGCAAGCAAAGAAATACGCCGAGCAGAGAGAATCGCTAATAAACTAGCC
+AGGCCAGGAGCCAAAAATCCAAATCCACTGTGGGCCCTTAGACTGCACGATTGGTCCGTAGAACGTGAGGCTTTCCTCCG
+GGAAGCCTTTGCGGTTTTAGACAGGGGTGATGGAAGCATCAGCAAGAACGACTTCGTGATGGTGTTGGAGGAAAGGCAGG
+ATTATGCAAGCTCAGAACAGCTGGCTGCCATCGCTCACCTTCATGAGAAAACCCGGGGAGGAGGGGTCAATATTAATGAA
+TTCTTTAAAGGAACCAGATATTTAAACAAGTCTTTTGTCTTAGGATCGTATGGACCTAAGAAAAAGGAAAAAGGGATGGG
+CAAAAAAGGAAAGAAAGGGAAATTTGTCTTACCCCTTCCAATCTGTGTCATTCCTGAGTACGCGTTTCCACGCCGGCAGG
+ATGGTGGGCCACCGTATTACATGATTGAGACCTACAAGAATGTCACTGATAGCAGCCGGTTTAATAGAGATCATCCCCCA
+GAACATCCCATTCAGGATGACTCTGTTTGGTACATTGATGATTCAGAGAAGGTATTTTCAAACATTAATATTATCACCAA
+AGCAGGGGATCTGGCTTCTCTGAAAAAGGCCTTTGAATCAGGAATACCTGTGGATATGAAGGATAATTATTACAAAACTC
+CGCTAATGACGGCGTGTGCAAGTGGAAACATAGATGTGGTCAAGTTTCTTCTTGAAAAAGGGTACGCGTCTCCGTCGGGT
+GTGGCCTAAATTTTCACGAGTCTTCACATTTCAAGGAAGTGAAAAGATGACCGAATTTTAGTAATATAGAAAAGTGCTAC
+TTTATTAAAGTCTATAATAAGGCTTTAATATATAAAAGCCTTTTATTAACAATTAACTTATAGGCTTCATTAAAGAATGT
+TATTAAAGAATGCTTTATTAAAAATGCTGATTTATTACAGGCTTTATTAAAGTCTAACAAATGCCCTATAAAAGTATGTG
+TTCATATTACTGATGGCTAGGGTTAGAAAAGGAGAATTTATGTTAGATTTATGTTATGTTAGATTCCTAGCCCCTTTATT
+AAAGCCTATAATAAATTAATTAAGCCTATAGATCTTATATGGGAAACCAGGATAATGGGAGCATAGGATTGTGTGTGCAC
+AAATATATTTTGCCAATAGCAATTATGCATGAAGTGAAGGCCAGTAAGTGTTTACTCAGGGTGAATAGAGTCTTCTGATT
+ATTACTGGCCTTCTATCAGTGGCAGCACGGCTCAGTCCTGTCTCAGAGCTTCACCTAGATGGTGTGGAGTCACTTTTTAA
+TCTGCCTCTATTTGAGGATCTCAACAACTTCAAACAAGTGGAATGGTGGCTATAATAACATGCTTTGGTCCCAAAACCTG
+CTGCCCTCCTCCAGTGTGGGGCATCCTTTTCCTTGGTCATGTAAATATTTTTAAAATATCAAATCATGAGAGTTTTATAA
+TACTTAACTTTTTAGACTACAACAATCACTCAGGTTTAATTCCTTCTTCTACCATAGTTGTATAACCTGGGTGAGTTTCT
+TAATATTTTTGTGCCTCAGTTCTGTCATGTATGTAATGAGACTGTAGTGGGACTCATGCCACAAGATTAGTTGAAGATTA
+GATGAGTTAAAATATGTAAAGCTCTTGTTAACACAGTAGTTACATACTTGAGAAATAAAGTACTAATCTTATTAGTACTT
+TAATGTTAAAGAAAGACTCAAATTTGTATATTATCAAGAGAGCAACTAATGAGAATATGAACTTCTGGACAGTGAATTTC
+TAAAAAGGACTAAAATGTTAAATAATACATGGTAATATTTATAATTTATTAATGAAGTTTCATTTAAAAATGTATTACAT
+GCAGAGAAAGTCACAAAATGAAATATGTCCTGTCAGCTTTTATCTTCATTGGATCTGATTTTATAATTTTCCTTGATTTT
+AACCTTTGCAAATCGTAATCCCTAGAATCTAAAGTGCCTTTTAATGTAGATATACACAAAGAAAGTTAAGACTGTCAAAT
+AATGCAGAACAAAGAGGTTTGAAGAAAAATTATGGCACAAAGTTTACAATTTAGCAACTTGATAGAACTCTATGAAATCT
+TACTTTGCATTTTTAGTGTCCAGTCATTGGAAAACCGTATTTTTAGTTTGTTCATCTATCTTATTTTACAGAGCTAACGT
+TAATGCAACAGATAACTTTCTGTGGACTCCACTTCATTTTGCATGCCATGCAGGCCAACAAGACATTGTTGAGCTTCTTG
+TTGAATCTGGAGCTTTAATAGATGCAGCTTCAATCAACAACTCAACTCCTTTAAATAGAGCCATTGAAAGCTGCAGACTG
+GATACAGTAAAATACCTACTTGATATTGGTGCTAAATTCCAGCTGGAAAATAGAAAAGGTATGCGTTCATATTATTGATG
+ATTAGGGTTAGAAAAGGAGAACTTATGTTAGATTCTAAGCCCCTTCTGATTCTATTCTCGTAGGCTTGCATTGTCCACTG
+GGTTCCTGGTACAAACACAAGCCTGAGTTAAATGTTATGGAAGTTTATCTTATACCAAGTGCTTCTCCACTCATTGGTGT
+TGGAGGGCCAATTCTTTATGTTTATAATCTTTCATGGACTGAAATACTTTTGTAAAATATAATAAAATTATTTATTAGAA
+AAATAAAATGAAAAAAATACAAGGCCAAAACTTTGTTTTTAATCTTTAAATTTTAATGACAGAAAATTATATTAAGTTGC
+TAAAAACATTTATAAATACTTAGTTTCTGTAGTTACTTCATTATGGATCAGTAACTATTAGTCTGTGGACTTGCACCAGT
+TTGCAGATGACACTGAGTTATTACACTGTTATGACTGTTAATTACTAATAATTGCATACCCTATAGACTTTGGGTTTCTC
+TTAAAGCTGGACAAAGCAATTCCTGAATTGAGGCTTCCCTGACTCTAGGAAGTAAACACACAACCTACCTTGTATCATAA
+ATACCATACAGGAAAAATTAAAAAATCATTTGAATATATTTGAGCTCCTTGTTTGGAAGTACCATTTTCCTACATTGGAA
+GCCATTGCTTCCTTTGACTGAGGAGCATCATACCTCTGGTTGGTTCAGGCTGCCTCTGAGTCACAGATACTACTGCACCT
+GTCCTGCCTGATGATATTATAGGTAACATATTGCCCAGCCATCTGCCTGGAGCACCTGTGGCCACTAGCTTCCCCAGACC
+TTTTGGAATTTACCTCCTTACATCAAGGAAAGGGGAAGATAATGTAAAATTATCTATCACATAGAAGGAAATTTGCACAA
+TGTGGGGAGAGAATAATCAATTTGATATTTAAGATGTAATATAATATTTCCTATCACTCTTTTAAGAAACATAGTAAAAA
+AAAAACTAAGGAAAAGTAATTTTCTGTATTTAGATGGCACTATAAATCATGGGATCTTAAAGCTGGGACCGCCATAGAGA
+AAATCTGATCTAGCTCTGTGAACTTCTGTATATATACCCCTTGCTTTAGGAATCACCCTTATTAGGGCAGGCTTAGAGCT
+GATGTGCTGAGATATTTTTCTACATGGTTTATCATTAATATGTTTATAGTCACTCTGAATTTATGTTTATGTTTCAACAG
+GGCATAGTGCCATGGACGTTGCAAAGGCATATGCTGATTATAGAATAATTGATCTGATTAAAGAAAAGCTAGATAACTTG
+CCGAAACCAGCAGAAAATCAAAAACTAAAAGGCAAGACACCTCCTATACTGAAGACTGAAGGCCCTGAAATTAAGAAAGA
+AGAGGTAAGAAAAATGGTTGACTACCCATTAGTAACTGGAACTCTTTAAAAATCTGTTTGGGGGAAGGCAGAAGCACTAT
+TTACATATTGTTATACAACATGGGTAAGATAATATGCAGTGCATACTTTCAATAGCATTATTATATAGTACTTTTAAAGA
+AATATCTAATAAAATTCTTATCACTGAAGAGCCTAAAAAGTACTCACTGCAAAGAGAATCCATACAAAAACCAAATGGGT
+TCAGCCAAATACAATTAGAATGTTGAAAAAGAAAACCAAAACTGGTGTTGGATTTGTATATATTTGGTCCTGAATTATAC
+AGATTTGATGAGATGGTCCAAATAGGGAAGGAGATTAACCCTTATGAGGAGTGAAGTAAAGCATGATTTTTCTTCATTTT
+GGTCTCATTACCACCATATCCCCTTTCATTGCCCACCACTTCCCCTTCCTCCCACCTGCCGCCTCCACCAAGGAATTTTA
+ATGAATACCACTGAATTCTGGGATACGGAGCCATTGTCTCTAGAATATCCTGCAAAATGGTTGTAGGTTTTCAACGTTTC
+TGGTAATCCGGAGTGATTTGGAACTTCTAAACTGCAGTGGAAGGTTGAAGTTGTTGCCCATGGGAGTTACTGAGACATCA
+AGACTTTATCATTAGCAGGAAACCCCTAAGTGTGTCTCTAGATTGGAAGCTTTTCTTAAGGGAGCAGAATATCTACTGGA
+GACTCTTGGCTGACACAGGACAAGAGATGAAAGTCAAGAAGGAAAAGTTTTAGTGTCATCAAATTGGAAAGCATTGGGGA
+AATGTAGGTGGTAGGGAGTAGATAGGATGAAGGGATTGTAGACAGACAGCAGACTTATAAACATGGAGGATGTTAGAATG
+GCAAAAGGTGAAGGAGGATCAGTCAACTAAAGTTCTTCCATTGTCATTTTAGTAGGCCACAGATCAAGGATGGGTATCTC
+AAAGACTTACGTGATCTTCTACCAATACTTCTAATTAAAGTTAAACAAGGCAAACTATCTCTCCCCATCCCCAAAGTTAT
+AATAATATACTATTTTATCTCAAATTATAACTTGTACAGTAGCATTTTTAGGTTTCTTTCTGGCTTCAGAGTAAGATGTG
+AAACAGCAAATAATATTAGAAAATCAGTATAGATTTTTACAATTTATAATTTTTTTGTTCTTGTTTCCAGGAACTGCTGT
+CATCAATTTATGGTGTACCAACCACATCAGAGGGAAAGAAAGTACAGAAGGGTAATGTGGTTCATCTGAATTCATTGATT
+ACCAGTGGTTATACTAAGAAAGTGGATATCACATTTATTCCACGGAGGGTAAGTGCTTCGAAAAGATCTTCATAGCATGG
+TAGAAGCTCATAATGTCATTTTTTCAAAAGCTTTTTATGTAGAATATCCAAACAAATCTGCACAGTTTGTCTTTCATAGT
+TACTACTTGTGAATGTTCTTTTGCCAGGTTCTGGTACTTTCAACTCAGTATGCCCAAGTGATAGCTCCCGGCATCTAGTT
+CTCTTTCTATTACTATTTCTGTATTGAGATTTACATTTTCCTAGAAAGGTTGAAGTCTTTCTGAATCAATGGCATGATCT
+GCAGCTGTTTAATCACTAACTCCTAAGGATACTTTTTTTTGGTACTTCTCAGACCCAACCCTTCTTTGTACTTAACTACT
+ACCAGCTCATGCAGGAGTTTCATATAAATCAAGACTGGATTTATTCAGTAGCTTCCGAAAACCTCCACAACTAAAGAGGT
+GGAGACATCTTTTGGAGAAAAGAGTTGTCTTTCCATTGGCCATCCAACCAGGCTGAACAAGCAGCTCCCCTCGCCTTCTA
+TTATAAAAATAATTTTGACTTCTACATAAGACATATAGGTAGTAAACCTAGATGATCTTCTAATTCCTTTGCTTCCCTGT
+TTTGGCATCCTAAAAAACGCACAGCAGCTGGTCTGAGCATAACTGGCCAGAGAAGTAACTGAATAATATTTCCGACATTA
+TTTGTCTAGACAGTCATCATCAAAAACAGTGATGCTCGTGCAACCACTAGTGTCATATGAGAGGGGGGGAAGAAGTTAAA
+ACACATCTAAAAGTAAATTTAATAGTTTTAAGTTAAAACTATAAACAAATTTTTAAGTTAAAACTGTGTATTGAAAATTG
+TCTGGATATTAGCATATTTACTGGCTGTCATACTCATACCTGCTGGGGTATGGCTATTTTTCTTTTTAAGATTTGGAGTC
+CTGAAGCCACAACAGCAGAGCTGATCAGGAAGAGGGAACTACGGCGAGAGAGGTTTACACATGAGGTGGACTTCGACGAT
+TTTATGATGCCTTTTCAGAAGAACATCACAGAGAAAGCTCGAGCACTGGAAGCTGCCTTGAAGACCTAAGTCATAGCAGT
+TATTTCTTGGGGTAAATGCTTTGAGGCCCAGGGACCAATCTTTGGAGAAAGTAGATATTTCCATCAAAGCCAAAGCAATC
+CATACACCAAGAACTTGTTACCAAGAATTTCTTTTTGCTTTAACAACTATAAATATTCTTAGCTGTCTAGAGAAAAGATG
+TATGTTATTTTGAAATGAATGGTATGTCATTCTGGATAAATCCCCAAGCCCCTTTATGAATGTAGTGAAATACATGGCAT
+GTGGGTTATAAACGTTGCTGTCAAAAGATTTACCAGGTCTACACCATTATGCTTATTATTTTTTTAATTATCCTTTTTAT
+TTATTAAATAGAGACAGAGTCTCACTATATTGCCCCAGCTGGTCTCGAACTCTTGAGCTCAGGCAGTCCTCCCAACTTGG
+CCGTCGAAAGTGCTGGGATTATAGGCATGAGCTACTGTGCCCAGCCTGCTTATTGAAGATACATTTCTAGTGGAAATTGA
+TGAAACAATTTAGGTTTTGGAAAAAGAAGTAATATTTAATCTTTTAAGGAAGGAGTTTATTTCTCACATGCCATTATAAA
+CTATGTGTATTAGATACCTTGGGTTGAATACACTTTTAACAGCCCTAGATCGATAGCCCTAGCCCTAGATAGATAGCCCT
+AGCCCTAGATAGATAGCTATTAGGTTGGTGCAAAAGTAATTGTGGTTTTTGCCATAACCTTTAATTGCAGCAAAAACCGC
+AATTACTTTTGCTCCAACCTGACAGCTAGGTATCTAGCTAGCTAGAAAGGCAGATAGACAGATATATAGATGATAGATAG
+ATAGATGATAGATAGATAGATAGATGATAGATAGATAGATAGATAGATAGATAGATAGATAGATAGAGATTTATTGCAAA
+TAATTGGTTTACATAATTTTGGGCCCCAGATGGTTAAGTGCTAAATTTGTAGGGTAGGCTATCAGGAAGTGCAGGCTGGA
+AACATGCAGGAGCTGTCCCTGAAGTCCACAAGTGGAATTTCTTCTTCATCATGGAAACCTCAGTTTTGTTCATACACCTA
+AATACTTCAGTATGTCTTTCCTAAAATAAAAGGCATCCATTTACCTAACCAGATTACGATTGTGAAAATTTCAACTGTTG
+TTAAAGCGACTGATGGGATCAGTTTCACCTAGATTATCTAAAGCAGTCTCCTTTACATAAAGTCAATGAATTGTAAATAT
+TGGTTGCATGTACACAATTCCTTCACCACAACACCTAAGATTAGTATTTGATTGAGTTACTGGGTGCTACAACCTAGCCA
+AGTTGAGTCATAAAACTGACCATCTCTGGAGGTGTACTCTGCAGTAAAAAGCTAGCTGAGGCATTATTCACACCAAGAAA
+TGCAGTGGGTACCGAACAGGAGCTCCAAAAATATCCAACGTCTCATTGACAGAAAGAAGCCACTTTAGAATCTGCCTTAC
+CAGAAGGCACCAATTGCAGATTACAGCTCTGTCAGTCACGTATGGATTAAACCCCTTTCCTCTAACCTCCTCCCTCTCTT
+TCCAACCTTGGAGGGACCTTCCACAGGAGCCAGTTAGGGAGGAGAAGGGAAGAGAGAGAATGAAGCAGGTAACCAGGCCC
+ACCCCTCTGGAGGAGACAGACCTGAACAGGGGTTGCTTACAAGCATTGCATTTGGATCCCAAGATTTGCTATTACAACTA
+AATTTTGTCTGTTTTGTAAGGTTTTGGTTGGTTTTTTTGTTCTCTCTTTTCTTTTTGTTTTGACATAATTTCATATTTAT
+GGAAAAGTTGCAATAATTTGACAAAGAATTCTTATATGCCCTTTACCCAGATTACCTAAATGTTAACCACTTACCACATT
+TGCTCTATCTCTTTCTCCCTCTACATATATGCATATGTATGTTTAATTTTTGTCCCCGAACTATCTGTGTGTCCGTTGAA
+GACATGATGAAGACATGATGTTCTTCACATGATGTTTTCATATTTGTGTGTCAGTTGAAGACATGATGTTTTAAATATTT
+CATTGTGTATTTCCTAAAATCACGATGTTCTTTTGCATAACAATATTACCATTGGCAAACTAAGGAAATTAACACTTAAG
+TGAACTATTTAATCTATAGACCTTATTCAGATTTTGCTATTTATCCCAATAATTTCCTTTGTACTCCCAGGTCATGAAAT
+ACATTCGCTTGCCCCATCTCTTTAGTCTCTTTTAGTTTAGAACAACCTGTATTTCTTGTTTGTTTGTTTGTTTGTTTGTT
+TATGACAGTATATAAGAAGAACACAAGCCACTTTTATCTTGTAAAATGTCTCTCAACTTTGGTTTGTCTTAAATGTCTTC
+AAGAATAGATTCAGGTGATGCTTTTGGGGCAGCAGTGCTACAGATGTCATTGCACCCTTCTCAGTACATCATATCAGGAG
+GCACATGATGTTGATACCTGATATAAACTTTAACTGCATTTGCAGTTGAGAGAAATGTAAATAAAAGTAAAGCTGCAGTA
+ATAAATCATAACTGCATAAAATTAACTGTAGTACATACTGTACAACTGTAATAATTTCATAGCCACCTCCTGTTGCTATT
+GTGTTGAGTTCAATTGTTGTCTTTTCTCAAGCTTACTTTATTGCACAAATACAGTATTTATAACATACAAAATACGTGTT
+AATTGATTATGCTATGGGTAAGGCTTTTGGTCAACAGTAAGTTATTAGTATCTAAGTTTTTTGGGAGGCAAAACTTATAC
+ATGGATTTTTTTATTGTAAGGAGAGTTGGTGCCGGTAACTCTCTAGTAGTTTTCCTTGTACTTAATGAATATCTGATAGA
+TAGATACTTTGAAACTATGGAAATTTCCTTCTACTTTCATACTTTCACCTACTAATTTTAGCATCCATGGATGATTCTCG
+CCTAAAATAATTATTACAGTGACAGTTGCCATAAAATAATTTCTACATTTTATCATTCTTTCCATTTCTTACTTGGCCTT
+CTGCTGTTGGGCAGAGCTTTCTTTTCTTCCTTATTAATTTATGTACTCAAATCAGTACAGACTCATGCATTCCCTTTTTA
+TTTAATGCGAATTATATATGTGTGGGGACCAACTCAGAACTCCAGAGTTGGTATAAAGATTATTTTAAGCCAAAGACATT
+TGAGATTTAACATACCATATGCAGGGGAAAAAAAAGTCTTCTCAGACCTTTGCTTATCTGACTACAAGCAGTAACTTCTA
+GGAAATAAGGCTGCCATAAATTCCTCTTCAGGTTTGATCTACTTCTAGAACACAAGTAATCCTACCCCAAATCCCTTCTC
+CAGAAGTGTTGTATGGCCCCGAAGGAAATTTAAGGACCATTTATACCTGTATAGATAAACATTATCACGTACTTTTTTAT
+CTCCTGTTTGTTCACCTGAAAACTCATGTATCTTTGTAAAAAGTCATTGTGTTTACATAAATGTCTTCTCCCCCTCCCCT
+TTGCTTATCAAGATGATTATAAGCCACCGATTTTAACTATCCCTTTCAGTTACTCATCACTGAGTTCTCCCAAGTGTATA
+TGCCTTGCATGCATAAACTTGTTTTCTCCTGTTAAAATGCCTTTTGTCAGTTTAATTCACAGGCTCCAGGCATTGAACCT
+AAGAGGGTAGAGGAAAAGTTAATCCTCCCCTATGTGATCCTTTACTCTCATTATTAATTTTGATGCTCAAATTGTCCCTG
+ATTTAACTAGGGGAAAACCCTTCAAGTTGGCTCCTGATAAAAAGACATTTTGTTTTCTAAAACAATCAGAGGACTCTTTA
+TTTCTTGAATGTGAGGGAAAATGCCTACCTTTGGCTTCGTTCAGGAACAAGGAAAATGAATCTGCAACAGCAGATTTTCA
+GAGGTGATCACTTTCTGTTTTCACTCTAGTGAGTACATTTTTTAAATGTCAGTTTGATAGATAGTGGTGTCCTGGGATCA
+CCTTCAGCCACTTGCAAATATAAACTGAGAGTAAGCATTAATGTTAGGTCCTATTAACTCTCTTTCCCCACATACGGAGT
+GTTTGTATTCATTTTAATGGCTCAACATGTTTAAAGGGGAATTGTGTGAGGGGAACAGTCACACAATTGACTGAGTTTAT
+TTGGAGTTTAATGCTAAACCCAAACATAGCATAGCCACAGTTCTAACAATGGCCCATAAGAACATCAGTGCTATTCAAAG
+AGTGATCCACAGGCCAGCAGCATCACAGGGAGCTTGCTCCCACTACCTGCCGCCCAAAATTATGGTACTGAAGTCTATAT
+GAGCGTAACGAAAATTTTGTAGAGGAAATCTTACTGGCATAAAAAGACTGTCAATGTTGCATGTAAAGTACCTACATTAT
+AAGTAGTATAATATCAGACTTGCAAATGCACAACATGTACATAAATAGTAATTTTTCTGGGTCTTTATGAGTTATCTGAC
+CTTTATTGTTTTATTTTGGCTTACCTGTATTTTCTGTTTTTCTTTTTCTTTTCTTTTTTTTACAATGTACACGTATTACT
+TTGGAATTAACAATTTTTTTTTAAAAAAGAAAAAACACAAGTTAACACCAAAGTCCATAATACCAAAGTGAGTTTATTAG
+TATTTGCCTCTTTTATGCCTTTTTGTGCATTGCATTAAGATTCTCTCAGAAGAACGGAGCCAAGAGTTCAGGAGCAAATG
+ATGTGTGAAGGAAGTGCTCCCAGGGGAGAGCAGTATGGGGTGAGGGGTATGAAACAGGACACGGAAATGAAAGGAGCCAA
+ACAAGGGTGCGCTTTCAGATGAAACCACAGCTTCTGAGTAGCCCGTGGGGAGCTGTAGCGCACAGGCAAGGAGCTGGGCT
+TCTGGGTTCCTGTATCAGTGAGCAGCTGCAGGATTGAGGGTGTACTGGCGAGGCTGTTCCAGTGCTCGAGGGCAAGTCTG
+CAAAGGTCATGGTGGGAACTATTGGCTAGAAAGCATGCAGAAGCTGTAACTGCGTGCATAGCACAGGAAAGGGAACACAT
+GCACTTATCTTTAAATTTACACTTCGGTGAACTGGGTAAGACTGTGGGTTTTATATTAGCAGACCTGGATTTGAATCTGC
+ATATTGTTAGTCATCTGCATTTACTGCCTGCTTGCCTATGGATCAAGTTATGAAGGGTCATGACTCCGAATTTCATCATT
+TATAAAGTAGGGATAATAATACGTCCCTCAATGAACTCTTATGAGACTCTAGTGGAATAATGTCTTTAAGGATGCTCAGT
+CTGTGCCCATGCCTGGTTGGTGGTCAACATATGGTTTGTTTTTAACCATCTTATAGTCTCTATCTGAAAAGTGAACCAAA
+AGAAGGCACAGGAAGGAATAAGGACCCAGACCTGAACAGTCAGAACAGAAAAGGCTTCCTGGAGCAGATTCACTTTCAGA
+CCATCGAAGTCATTTAAACATATGTGTGTGTGTGTGTGTGTGTGTGTGCATGTGTGTGTGCGTGTATATATATATATTTT
+TTTTTGAGACAGGGTCTCATGGAGCGCAGTAGCATGATCTCAGTTCATTGCAACCTCCACCTCCTGGGTTCAAGAGATTC
+TCCTGCCTCAGCCTACCGATTTTTTGTATTTTTAGTAGAGATGGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCC
+TGACCTCAAGTGATCCTCCTGTTTCTGCCTCCCAAAATACTGGGATTACAGGTGTGAGCCATCGCACCTGGCCCTAAACA
+TGTAGATATTTTATCTACATATTTATCTACTTTATCCAGTTTCTGATAATCAGAGAAAAATGAAAAATCCCTATTTGCCA
+TGAGATATACTTACTAATTTCAGTATAGTTCCTGTGGATAGTGGCCCAAAGGTTATCTAGGTAATTACTAGTTAGGTAGT
+TAAAATAATGTCTTAGATCAAGCTTCCTAGAAATAGAAGCTGAGATGGGGACTCTAGTGCAAGTGACTTATTAAGAAGGA
+ACATGTCAGGGAATGAGAGAAGCAGAATAAAGCAGGGAAAGATCTGAGGGAAGGTTGTGATCTCAGGAGGGCATTAGCAC
+CAGCCTAATTTCCTGGAGAGCTTTGGAGGATGAATGGAACCACAGAGGTTTGTCCTGCCCAGAGGGAAGAAGCCTGCCTG
+TTACATCCTGCATCAATCAGTCATTAACCAGCCCCTTGTCAGGAGAGAAACCTTTCAAGTGTCACCTGGCTGATTCGCAA
+GGTGGCTCTGATCAGCCAAAGGCAATCCCCAGAGAAGTGGGTAGCACTTAGCCTCCAACCACAGCAGCTGAGCACTGGGT
+GGGCCAGCTGGTTAAGGGGATCTGGGCAGCGTACCAACAGCATCTGCTACAAATGGCATGCTCTTGTGTGCAAAGTTCAG
+GTGCAAAATTTCAGATGGTGACTCCTTGAGATCACTTTGCATGACACACTTTGTTTTTAAAAAGAATATTAACTTCAACA
+AACTTCTGAACTTGCCTCATTGTTAAGAATAAAGATGAATGAGCAAGAACAGCATTGATAACTGATTTGGCAGCTAGTTG
+AAAGGGGAATCACAACTTGAGTGTTACCAACTTTAGTGTTATCAATGGTTAAAAATGATAAACATGAGAGACAGAAAGAT
+GTACTCAGCCTTATGCTCAGGATAAGGTAATGTCTAATTGGTACAAATCAATCATCCCCAGTGAAGTTAGTTTAGCAACT
+TTTATTAAAGTCACAATGTAGCCGTGAAAAGTGCCCAAAAGAACTACTGAGCCTGTACTTTTTTTCAAAACGGGTGGTGT
+AAAAAGAGAGAAATGGGAAAATTCAAATGCCTTCTCCACACTGGCATAAATTTGAAGTAAACAGTCATCTGAACTATGGT
+ATTATTAACTATTTACACAGTATGTATATTTCATTATATGGATTTGTATTTCTGTAACAATGCATCCTAGGGGAAGTTTA
+CAAATGAAGGTAGATGGGGTGTTTGTGTATATCAAATAACCATGTGCTAATATTCATAGATCCTTAGGTAATTATTACTA
+CATCATGAATTTTATCTGAGTAATAAGGAAGAACTGAATATTCTGTTGAGATTCTGGAACTTACTGGAGATTTCTTTTAT
+CCTAGATAAATAATTGAGTATTGTGTGATGTATGATGTGATACGTGTGTGTAGGTATGTGTTTATTCAGGATTAATGATG
+GTATTTTCCTTTTATCTTCAGTAAATAAGACTTTGAAGAGGAATTAATATGCAAAAAAAGATTTATTTTTCATAGGCTTA
+ATGACTTTTTGTAAGATGGTTTGAAATAGCCATATATTTTTCTAAATGTGGGAGATTGTTATGTTGTTCATTATTGAGAC
+TCGCAGGCTGCAGTTCGATGCTTTCCTCTGAAAAGCATTAGAAGTGAGAAAAATAAGTGAGAGAAATAAGTATTCCCTCA
+TAAAAATTAAATAATGAAATTAACACATAAAGAATTAAATGAGCAATGAGTATTATTGATGTCTGAATATATCTCACACT
+AAAGCAACACTTCCCAACACTTCCCAAAGCTAACAAGTCAGCCAGAAGGAGAATTGATCAGCAATAGAAAATCATGGAAT
+GAGAGTACCTAGGCAGACCTTGTGTGTTCAGACTCTGCAGACCTCTGGTCCATATAACACATGCGTCTCAGTTAGAGATT
+GAGACTTTTCATGATGTCCAAGGTTGCTACTGCCAGAATGCTGAATCTATCAGCAGCCCTTGTTCTACATACAATTTGGT
+TTTATTCATTCAGCATATATTTATTGAGCACCGATAATGAAAATCCTAGGTCTGTTCCTTGCTCTCTTCATGGTGCATGA
+CAGGAGAGCAATTCATCCAGTTTGCATGTCCCAGGTTTCTGCATTATTTGGCTTCCAGTAGGATCCAGACAACAGGAGAC
+AACAAAAGATGAGAGAGGGAGGAGAGGAAAAGCCAGGTAGTTCCCCCTTTCCATCTTTGCCTTTGTCGGCATCCCCGTCA
+ACAGCTGTGTCTGTCTCCCCCATGACTCCACTCCCCACTGTGTGGGGAGCTGTGTATCCTGCTGTGGTCCTAGCCTCCCT
+CATGTGACCCTGGCCCCTAGGCTCTGGTAGTACTACCCACTCCCTCTGTCCCTTAAGCCTCCAAGGGTGGTACCAGCTTC
+TTTTGTTGTTAGTCTCAGGGCTGCCTCACTGTGCTCTGGTTGTTTCCTCAGCCTCTTCTATTGCTTGTGCAACCAAATCC
+TCCCATTAACTTCCATTCTGTAGGAACTCAGAGTTGTTTCTGCTTTCCCATGTGGATCCTGATTAATCACGTATCAAGCA
+CAATGGCATGTCTGGATAAAATGGAATGCAAAACCAGGAATAGCCCCTGCCCTCATGGAGTTTATAGTTTCGTAGGAAAA
+GCATATTTTCATAGAATAATCCTAAAATACTTATGAGATGATTGCACAAGATTGAATAATTGAAGATTATCAAAAGACTC
+ACAAGATTTCACAGTGTGTATTTGAGTAAGGTTTTAATACAGGTGAAGGATACATTAGGACACAGAGCACAGATGATCAC
+AGAGGGGTCTAGGGCTTCCAGGCACAGCCCCCCAGGGTCCATTTTAATCACTCGGTACAGGACACATTTTATCTTCAGAT
+TATATACGATCACCGTAAGTGTGAGTCCAGGTATATAATTTATTGAGGGGTCTTTCATACCCACCAGCCATGTAGCTAAA
+CCCAGGCTGTATACCCTGTTAGACCAGGTACAAACTATCAGTCTATACAGTCCTAAACAATGTAGACAAGCTAATACAGG
+CTGCCTCCAGGGGAATTTCGAGCTTTAAATAGCACATTGCACATTATAAAGTCCTCAGTAGCACATTTCATTCTTACCTT
+GACCAGACTTCAAGCATCCCAGGAGACAAACAGAGGGCTACCCCAGGATAGCTGCAAGGTAACGCTACCTTATACAACAT
+ATGAACTTATAGTGGTGCTACAGAATAGGGGAAATTTTCACTAATGAAATGGTATTTTAGCTGAACTCTAAAGAATGAGT
+GAGGAATTAAGTTGACCCAGAGGGAAGAGAGGAATTGAGTAGAAGCAGCAGCATTTTGCAGTGCAGACCCATTCCAGCAA
+GTGAAATAAAAGCAATGTGACAAGAAATAAATAAAAAATGGTCAGGGAAAAGAGGCCAAAGCCACATCCTGGAGAGATTT
+GTAAGACAACCATATGGATTTGGAAAGTGATGGATTTCCATTTGATTCTGCTGATTCGTCCCTTGTCCTAGAAATTTTCT
+TGACTTTAGCAACACCACAGTTTCCTGGTGCTTCTCAATATCCTTTGTAGCCATGCTTTTTGCATCAGGACTCTCAACAC
+CAGCAGACAATAACAAGAGAATTATTAAGACTTAGAAAAAGAGAATTCTTAATACAGAATTTCAAGGGGTGGATTGAGCA
+AGACTGGAGGCAAAACATCTTCCCTGAAAACCATTTCTCTCTTCCTTGCTTGCTGACATTCTCACTGTCTGGCTCCCTTG
+CTCTCTCTACTTGGCTTTAGTCTGTGTGAGCTCCAACCTTAGCTAGAATGTAGCAAAGATGTCCACCAGAAGCTCCAGGG
+TTACATCCTAGAACTCCAAGTGAAATGAGGTACTCTTTCCCAGCTGGTCCTGTGAAAACCCTGATCGCACTCTCAATGGT
+CAGGCTTGAGTCAAGGGTCTTTCTCCACACCAGTCATTATACAACAGGACACAAGATTCTGATTGGCCAGATCATTTTGT
+CAGCCCATGACTCAGGGAATGAGTCACTCTACCCAAAACACCAGACTGCTACTTGTATAGGTGTGGGCTCCCAAGAAAAA
+GTAGGGATTATTCTTCCCCAAAAAAAGAGTTAGGGATGCTGGAAAGGCAGAAAGAAAAGGGAAGGGAAGAGGAAGGGGAA
+AAGGAAAGAAAAAAAAGAAAGAAAGAAAGAGAAAGAAAGAGAAAGAGAAAGAAAGAGGAAAGAGAGAAAGAGAAAGAGAA
+GGAAAGAGAAAGAAAGAGAAGGAAAGAGAGAAAGAGAAAGAGAAAGAGAGAGAAAGAAAGGAAGGAAGGAAGAAAAATGA
+AGGGAGGAAGGAAAAAGGAGACAGGAAGGAAGAAAGAAAAAGAAGAAAAAAGAGAGGGGAAGAAAAGAAAGGAATAAAGA
+AAGAAGAGAAAGATGGAGAAGAAAACAAACCAAAAAAGTGGCTACTACAACCTCCGCCTGACCATTAAATGTCGGAATTT
+CTCAAGACTCAGTTTCAAACCTTCATCTCACACTACGAGCTCTCCCTAGGCAAGCTCATTTGCACTCTCTCCTTGGAATG
+ACTCCCACATTTGTGTCTTAAGCGTAGTTATTTCATCTAAGCCTACAGAAACATATCTAATTATACTTGACATCAGTTCT
+TGAATTTACAAACGCTCCTGAAACTCAACAATTCCCAAATCAAACTTTAGACCTCTACTGCCCATCCTTAAGTCTAGTTT
+ACTTCGAGTGTTTTCTAGTTCAGTAATGGAACCACCAACATTCTGCTATGAAAGCCAAACTCCCAGATATCATCATGACT
+TGCTTCCCCAAGCACCCACCCACCCCAATTCAACTCCATTTAATCTCATAAATTCTTGTGACTCTTCCTCCTACTATCTC
+CACCCTTACCGCCGTAGTCTACCTTATCATCTTACCCAGAAAACTGTAAGAAATATTCTGATCTGTTCTTTTATCTCCTT
+CTGTCCTGCAGTCTATCTATACATTGATGCCAGACAAGTCTTTTCAAGGATGAAAAGTGATTACTTCAGTACTCTACCTA
+CCATCTAATTAAAATACTTTCAAAATTACTGATTTTCTTAATAATATAAGAAAATGTATGTGAACATGGCCTATGATCAC
+CTGAGCGGTCTGAGCATACTTACTCCTCCCCCAGATCAGGCCTTTAAACACTCTGCTGTAGACAGTCTGTTATTTTATTT
+TATGTACATGCTCCCACCTTCCATTAAGTTTTTGCATAAACTGTATCCTCCTTGTCCAGGAATTCATTCTCCCTCCTCTT
+CCCCAATACCTCATCTGCTTAATTTTCCATTCCTTTTTCATCTCAGAGAAATCATCTCTTTGTTAGGGCAGACTTCCTCA
+CCTTCCTCACGAAGTTAAATTCTCTTCCTATATACATTCATAGATCACAATGCACCTTCCTTCACGGCACTTACCGCAGT
+TGCAACTATATCTTCACCTGTGCAATTATTTGAATAATATTTAATTATTTGACACCATAACTGTCATGACTGAACATTCC
+AAAATATACCACATATTTTCTGGCCACATTGATGGGATTATAACATAATATAAACTTGAATTATTTTAAAATTTGTGCAC
+AACTGAAAATCACGTATGTACTTTCTCAGTGCCTCTTTATGTGGAGAGTTTATCTTATACTAATTCATCCCTGACTATTG
+TACTCCTGAGTTCCTGCCTCTGTTCTTTAGAATGGTTTCTATTTTTTTTCTGCTGGACTGTATTAGCCTTTCCTTTTGCC
+ACTGGGAACATAGCTGTTTGTAGCTATCCCAGGTGTATAAGTAACCTAAATGGCTGCTTATAATTTCTGCCCACTAGGAG
+AGGGACTCCTTTACTCCATTAGTCTCCTGTTTGAATCTTAACTCTGCCCCAGGCTTCAGGATACATAAACACTGCCAATC
+ATTCTTAAGAAGTGAAGGGATTGCAAATAAGCAAAGAAAAAGTAATTTTAAACATTCTTTCCAAATGACCTTGCAAGAGT
+TTTTTCCATTTTTAGAGGGAATAGTGGGAGTCAGAATTGGCAGAATAAAAGCAGCGTATTCCTAGACCATATATCAGCAG
+ATAAATGTGTTCTGCAGCATTAAAATTACCCTTGTGGTGCTCATTCCTGGATGTACCATGTGTTCTGTGATGTAAATCAG
+TACATAGCCTTATATTTTCCTCTGTGTCATGGACCCACAGAATTGCAGTAGCATGAACTGGTTGGCAACAAAATTCTTTC
+CAGTGCTTCATAAAGCCTTTGGCTTTTCTTCATTTGATATGTGTGAGTAGGAATGCACAAAAAACAAGAACAGATGATAT
+TATTTAAACCGTTTGGTAAGAAATGTCTGACAGGAAAAAAGAAAACATATGTATCGATTCAAGTGCAGTAATTCGCTTGG
+CGGTTTTCTATATTATCCTTTCTACTTGTAAAAATCATCTCCAATGCAGCAAAATGTTCTTAAATCCTTTATACTTCATT
+CTAAAAATCAATAGTCCAAAGATGTTAACTCTTTTTGGGCTTTTGGAAAACAAAACAAAAACCAGAAAAGAGAGGATAAA
+TGTTTTCATTTTTATCTAAATTTTTCTGGTTGTTTCTGGTTTTTCCGTCTCCACTTCATTATTACTAACATGGAATTATT
+GATAAAGCTTAAAAGAAGAGAAACACAAATTCAGACTTTAGGAACTCACTTGAAATCCTTACTTAAAGACTAAAAGTCAT
+TCATTTATTCATTTCATTCAATAAATACTAAGGATCTACTATTTTCCAGTTGGCCTCTTTGGCACTGGTGTATAATGCTA
+CACGCAGCACTCACTGCTCCCTTGGTTCCTGCCACCATAATAGGGAGATAGCAATTAAATAAACACAGCTATATTATTAT
+AAATTGGGATGCATGTTACCATGGAAAAGTACTGGGTGGAGTTTTAGTTTGAGAGTCAGGGAGTGCCTCTCTGAAGAAAT
+GTCTACTATGAGACCTAAAAAATGAGTAGAAGTTGTCAGATTAAGGGGAGGAAAGGTGTTTTAGCCTGTTTTCTGCTACT
+ATAACAGAATACCACAAACTGTGTAATTTATAAACAATAGACATTTATTTGGTTCATAGTTCTGGAGACTGGGAAGTCCA
+AGGTCAAGGGGCTGCATCTGGTGAGAGCCTTCTTGTTGCACCATAACATGGTGGAAGGGCAAACAAGCAAGTGAGACAGA
+GAGAAAAAGTAGGCCAAACTCCCAAGATAACTAGCCTACTGCCAAGATAACAGCATTAACCCATTCATAAGGACAGAGCC
+CTCATGACTAACCTCTTAAAAGCCCCACTGCCTGATAGAACTATATTGACAGTTAAATTTCAATATGAACTATGGAGAAC
+AGTTTGGAGGTTCCTCAAGAAACTAAAAAAATAGCTACCATATGATCTAGCAAGCCCACTGCTGGGTATATACCCAAAGT
+AAAGGAAATCAATATATTGAAGGGATATCTGCACTTCCATGTTTGTTGCAGCTCTGTTCACAATAGCCAAGATTCAGAAG
+CAACCTAAGTGTCCATCAAGAGATGAATGGATAAAGAAAATGTGGTACTTATACACAATGGAGTACTATTCAGCCATAAA
+AAAGAATGAGACCCTGTCATTTGCAGCAACATGGGTGGAACTGGAGGTCATTATATTAAGAGAAATAAGCCAGGCAAGAA
+AAGACAAACATTGCATGTTGTCACCTATTTGTACGATCTAAAAATCAAAACAATGGAACCCAAGGAGATAGAGAGTAGAA
+GGATGGTTACCAGAGGCTGGGAAGGGTAGTGGGGAAATGGGAGACAGGGTAGAAAAGGTTAATGAGTACGAACAATAATT
+AGAATGAATGAATAAGGCCTGGTATTTGATAGCACAATAGGGTGACTGTAGTCAATAGTAATTTAATTGTAAATTTAAAA
+ATAACTGTAATTGGATTGTTTATACCAGAAAAGACAGACGCTTGGGGTGATGGATAGCCCATTTTCCATGATGTGATTAC
+ACATTGCATACCTGTATCTAAACATCTCATGTACCCCATGATGCAACTACCATATATCCACAAAAATTAAATATAAAACA
+TTATATAAAAATTTCCACATAATTTTCAAAAGGGATACTCTATAGTGGCGTGGGGAGGCAATCTACACATGAGGGGGAAA
+TACCTGCCACAACTCTGAGGCCAGAAAGAGCCTGGCTTGGTGAAGAGCAGGAAGAAAACCAGAAAAGCTTGCACAAAGAT
+GGTGGTAGCAAGGGGAAGCCAGCTTAGGACAAGGTTGACGAAGAGGACGGGGCCAGATATCGAGGAACTCGCAGGGAAGA
+GTAAGGAGTGTGGATTTCTTTCCAAGTTCAATTGGTTTTCACATTGCTGATAGCATCTCCCAAGTGATTACAAGTCACAT
+TGCTCTGTATCATCCATAAATTATCCTCAAGAACTCCTAGGGCCATAACAATTTTATGAAGGACTCAGCTCTGATAATAG
+AAAGTCAATATTTTTAGTCTCTAGCTTTTCAAGTAGGCAGCTGGAAGTATAGCCAAAGGATTTCAGATTTCACAAATGTG
+AGATCAAGAGACATCTGCTAGTCTCTGTTCAATCTCTAATTTTCCTGTGTCTTTCTAAGTGACACAGAATTAGTAGGCTT
+AGAATTTGCTGTAAAAACTTGTTAAATCCATGCTTTGTTCTTTAACAGCATCTTAGCTCTTCTGCTTACTAGCTTTGTGA
+CTTGGCAAGTTGTTTGACCTCTCAGATTCTGTTTTCTCATGTATAAAATAGGGACACACTACAGTAGTGATTAGCACTAT
+TCACCAAATATTTCCTGCTCTCTCCACTTTGGACACACGGTAGGATTATACTCTCTGCCCCATTCCCTTGTAGTTAGGTG
+GTGTCAGGTGACTAGTTCTGACGAATGAGCTATGCATGGAAGTGAGGTTTGCCATTTCCAGGCTGGAGCTTTTGATTGCA
+AGTGTGAGGCTCTCTGAGCAATCCTCCCCTCTGCTGCCGGCCGCTCTGTCAGCCTAGGAAATATGGAGTAGGATGCAGAG
+TTGCCCATCAATGGACATGGTGGGTTATAAGCAACTGGGACTTTAGGTTATTGACCAGCCTCACTAACAGACTTACTTCA
+TAGGTTTGTGATTAGAACTTGTATTCATCCATTCTTACACTGCTATTAAAAATACCTGAGACTGGGTAATTTATAAACAA
+AAGAGGTTTAATTGGCTCACAGTTCTGCAGGCTATACAGGAAGCATAGCAACTTCTGGCTCAGGGGAGACTTCAGAAAGC
+TTCCAATTATGGCAGAAGGCAAAGGGGCAGCGAGGCACTTCACGGCCAGAGTAGGAAGAAGAGAGAGGGTGGGGAGGTGC
+CACACACTTTTAAACAACCAGCTCTCATGAGAATTCACTCACTATACAGTATCAAGCAGCGGACGATGCTAAACCATTCA
+CGATAACTTTGTCCCCATGATCCACTCACCTCCCACCAGGCCCCTTCTCCAACACTGGGGACATGGTGATTTGGGTGGGG
+ACATGCATGATTTGACATGAGATTTGGGTGGGGACACAGATCCATACCATATCAGAACTTAATGGGATAATAGACACTAG
+GCAATTGTTATACTATCTGGCACACTATGAAGTACTAAATATAGGTTTCTATATTTCAAGTAAGCATTACCGGATTTCTC
+TGCCCAAGCATTTGCGTAGCTTTCAGGATTTAATTTTCAAGCCCAGAATTGAATGATTCAAGTTACCGTTAGAAATATTT
+ACCCTGACATTTAATTATACATGCAAATTATTGGCCATCGTTTGTAAATCATATTTTTGGTTTATGATAATAGGTGATCT
+GCAAGAACCTCCAAGAACTGAACATTACTGTGCTCACAAAAGTAAACTCATAATGGCTGAAAACATATCTAATCTCCTTT
+TCAATAAATAATGATATGCAACTGATAGCTCTTGCCAAATGTAATTGAGATAAATCTTAACTGTTTGAAAATATTAAAGA
+AGTATTTACTTTTAATTCATTACAGATGCAAAATGTTTGGGTTTAAAGGAGAAAAATCAAAAGAAAACATTCTAAAATAA
+TATCAATTTCTTTCTCTCCTTGCCATTTTTACAAACATACTCTACCTATAAACATGCACCTGTATAAATTATGACTAAAT
+CTATTTTTACTGCTTCACAGTAAATCTTCTGCTGAATATATGGGGCCTTAATCTCCGGTGTATTATGTACTAAAGGCTAA
+TAGATTTGGCAGCAAAAATAGACAGTTATATTCCATTAAAAACCAACTCCATAATTCATGACTGGACTCGGGGTCTGCTT
+CCAGCCCAATGGCACACCTGGCTCCCAAAGCTCTAAAACAAAATTTCCAGTTTTATAATTGCCAGAGTCATAGTTTTCTC
+CATTTAAAAAAACACATCAATATTTATGTTGTTTTCCATGTGCTCTCAAAATGACAGCTTAAATGATTTGACAATAAGTA
+AGGTTTGATTAAATTCAGTAGTCCCTTGACAGGGGTGGTTTATTTGATTATTCAAATTATTAATCATCATGCCATCTATT
+TAATTTTTTAATACTTAAATGAATTTTTTCTTGTAAAAGAAAATCTTGTAAACAAAATCATATCAGAGAAAGCCCTGCAA
+ATTACAAGGAAGAAATAATCACTCATAATTCCAACCCTAGAAAAATATCACTGTTGACCACTTGAGGACTTTACCCTAGT
+CCTTTTATTCTGCAAACATTTGCCATAGAGGTGCAATAGAACACAAAATAATATATGCTGCTTTTAAATATTTGTGTATA
+ATTATAAATGCAATCTATATAGTTCCTTATAAGGAAATATGAAAATTATTAAGTCAAAAACATAACCCAGGGAAAAAACC
+ATCAGAGATAGTCATTATTCATATTTTAGTGCTTCCCCTAAAAGCAGCCATTACTTAGCTACTTTAGCATTCACTCAAAA
+GGTGTATTCTGCAGAATGGTCCTTAGCAGTGCAATTGTATTTTCCAGAGTGCCAAACATTTATTCACTTTTGAGTTCGAG
+CACCCACACACGTCATGCATTTGCAATATGAAGCAACAACAACGTGTTACACCCTGGTATTATCAATGAAATTAAACCTA
+GCCTGTCCTCTCCAAGAGCTCCAGGTCTGAGGAGGATGTAGACTAGGAAACAATTATAATACAGAGTGATTAATGCTAGG
+ATTTGAGATATGCTCAGGATGCCATGGCACTGCGATGGAACACAAACCTCGTCTGATGGAGGAGAAGGTTTCAGGAAAAC
+CAAAAGCTTCCTGGGGGCTTTGCAGTAAGGCTAGTTCTCAATGGATGCAGCCAAAGACAGAGGAATAGAGTCACAAGTGA
+GCATATGCAATCTAGGATCAAAGGCAATTCCATATGGTTGGAGGATAGAAAGATAGTGACAGGAGATGAGGTGGGAGAAG
+TTGGCCTGCAGGATTTGTATTCCATAATAAGAAGCCTGAATTTTATGCAAAATACAACAAAGCAGTTAAGCAGCAGAGTG
+ACATATTCAGGTATGCATTTAAGAAAGAGCACTCTGGCTGCAGGAGGGGACAGATAAGACATGTGGCAAGGCTGAAGCTA
+AAGAGAACAGTAAACAATGGTCATCATCCAGATTAGAAATGGAGAGAGCCTAAATGAAGGCAGCAGTAATATAATGAATA
+TTAAGAAAGTCAAGGAGGCTTAGACTATTCCTATCTACATGCAGCTTGGGGATTAACCTCAGATGTTGATCATGCTTGCC
+AAATTATTAATAAGACAATATCTCAAGACCTGCCTTGATAATCTCCTTTCAGTCTTATGATATGAGGTTAAAAACAAGGC
+AAATAATATATCTTGGATTCTATCCTAAAGGGATCAATGTAAGAAACCTGGCTAAAATTCTGACACCATAATTGAAATCC
+AGGAACAGAGGCCTATGCTACATCACATACGGAGAGCAGATGGCTCAGAGTGATCATCTCAAGAACTTCTCGTTGGGCAA
+CGTGACTTTTGCAAAAGAACTAGGAAAGTGTTCCTTTGGGCCATGATGATTTATCAAGGCTAGAAGTTGAAATAATAAAT
+CTATTCTGAACAATATACTTACTAAATCGTAGTCATTTATAGCCCTCTAATGAGACAGGTACTCAAATGGGTACACATTT
+GCTAGGTAAACAAATGTATTGCATGGGGAATTAGGGATTTACAGCTAGCAGGGGACATTGAGGCTCAATATTGATGGTTT
+GCTTTTTCCATTTCGGTTGCTAATGTGATTTATTTGCAGTTTTCTTAATCAAGGTGACTGACACAGAAACTATCTAAATT
+TTTTAGCACAGCAGTTAAAACAGTCCACAATACATAATTTGGCTCCTAAATAATTTCTCCCCCAACAATTAGAACTTTCC
+GAAACTGCTTTAAATCGCCTTATGAGGGTATCATGAGTTTGTGGTCATTGGAAGTATTAAAAGAAAAAAGCCTGGAATAT
+CAGTAATGAAATGCATCAAGCAGGAATCATACATACATTAAAATACTGGTTTATAGTTTTGAGGGCGTTTTCTTTTTTTT
+CTGTTGCAGCTATGCGAAGATATGCTTATGTATTAATTGTTTTTCTTGCTAACACTGTATAAGCAAATTCAACTAAATTG
+ATATAAGATTTTGCATACAAGATTAGAAAACCTGATACCTAGAATTATTTTAACACACATTTTTCAAGTTTGCTTATGAC
+TTCACTAGGTTAGAACTTCTTAAAAAGCAATGCTTTAAACATTTTATTCATTCAACCTTTATTGAAAGCCAACATTGTGT
+TGCTGGATATGCAAAAATAAATCATGTATAGTGAGCTATTAAAAATTTTTATCAACTTACAAACTGAGTACAGGTACCTA
+CTCTCACTGTAAAGCCTTAGCAAAAAAAACTGATTAAAATAATGCATAATAGTACATAAAAAATTAAAATCTTCCATTAG
+AGGAACAAAAATTATGAAGAATTTCTAGGAGATAGAAAATAATTATGTACAGAGAAGATAATTCAAAATACATGTTGGAG
+AAAATTACTCTTTCAAAAGTCAGACATGTCCAGAGAAACTGGAAGGGAGGAAGATAGAACTGATGGGAAACATGGCAGAG
+GGGGACACCCCCATCATGTGATGGCAAGAAATGCTGTTTCCACCTGAATGAATTCCCAAAATCTAAAAAACAGGAAAGTG
+AGATCTGAGCTGTATTCATAATTCTTTAACGTTTTCAGTATTCTCTCCAGCACTGCTTACTGAACTTGTTCAGGAGTGAA
+GCCACGTGTCCCCTGGGAGTGTGCTGCACTGGCAAGAAGAAATGGATGACTCACAATTTTGACAGTGACTCTATAGAAAG
+AAAAAAAATCTGTGACTCAGGGCCTTGGGGAGAAGTCATCTCTTCAAAAACTCATCTGATTTTTTTTCATGCCATGTAAT
+TACTCAGAGAACATTAAGAGTCTAAAAAATTTCAACACTGGTGTCTAAGAGATATGGAATTACCTTTTGCCTGAGATGCT
+GGAAGCAGTGGGGGCAAGTTTGTAACTAGAAAGATTTGAGCAAGTCCAGGCTCAGGTATCCAAATTGCCCTCTTTGCTGT
+TGGCATAGGGGATTGAGATTTTCTGGGTCTTAAATAAGGTTTTATTTTTATCTAAGGAACTATAGTTTCTATGCAAATCC
+GAGGGGGCTTAGAGAAACCTAGCGTGGAAAAAAGAAACTGTCAAAGCAGTGTTTGGGGAAATGAGATGGGGAAATTTCAC
+GAATAGTGGGCATAGAGAAACTGACTAGACAAGAGAACGTGAACATTATAATGATATCAAATGGAAAATTTGAAAGTTTA
+TTTTGAAGGAGATGGCTAAAAACAATTTTATTCTGAATGAGAATGTGATTGCCAAGAATCAGTCTTCAAGCAATCCAGGA
+TTTAAATTTCCTTGTTCTAGGCTCAGAATTTAAATAGGCAAAATGCAGGAGTTCGAGGCTGTATGAGCTATGATCATGCC
+TCTGCACTCCAGCATGGGTACAACAGCGAGACTGTATCTAAAAGAATAAAAATAAATAAAATAAAAAATAAATGGGCAAC
+AGGAAAGACTTTAAGACCACTCTCCCCTCGCTGTTCACATGGCTGGGTTCTTCCCATTATCCAGGTTTCAATTTTAAGAT
+CACATCCTTACGTATAGTTTTCACTGAATACTTAGCAATTTGATCAGCAATTTGATCTTATATAACACATTGAAATGCAA
+CACGTGTGTTTCAAAGTCAAGTGATTAGAAGACTGTGGGCCCTAACTGCTGTTTATGGAAGTATGCATCCAAAGCAGATG
+CTGTCAGAGGTCCCATGTACACCCAATTCCATTAACAGTCTCTTCTGATTCTTTTTCTCACTGACAGCCTCTCAGCAAGT
+ACTGGTGCTCTCCCTGAGGATTTTCTCTGGCTGCAGGAGCGTGTTTCAGCGTGCAGGGCAGACTGGAAGAATCTGTGAGT
+TGATGCCTCAGGAAGCAGCCCTCAACAGACATCAGACAGGAAATGCTGGATAAATATCCCAACTTCTCACCTCTTGGGTA
+GGACAACATTGAGGAATGTCCTGCAAAGTCTCCCTAGAGTCCCAAAGGGCCTGAGCCCCAGTTACCCACAGCAGCAATTC
+TGCACATTAGCACACTCCTTTAGTAATTTTCCTCCTTTCCTGTCTTATTTCCTCACTCCTGTCCATTGTTTCCTGGGATC
+ACTCCCGAAGCAAATCACTTGCACTCACATCCACATCTCTACTTCTGGGGAGTCCCAAACTCACAAGACATTTTTCTACA
+AAGGAAAATCAAATATAAAAGAAAGTAGAGTAAGAAGAGTTGGGACCAACAATGACTGATGATTAAGCAGCAAGAATAGC
+GGCAATTAAAAACTGTAGATAACACTATGGATGCACTGATGACACAGTGTCTTGGCCAAGCCCTGTGGGAATTCATACTG
+TTTGCCTGTGTGCAGCAGCGGTGCAAGTTTTGCAGTGTTGTGAAATATTTAGTGGACGCTGACTTTCACAGAATCCTGAA
+TTACTGTTCTCAATAAGCTCCAGTATTGTGCTATTTCAAATGTAGAAATACCAAAGTTTACTAATTGTGAGCTCTTAATA
+TAGTAACTTACTAGCAGGTTATATAATGAAATGATATCATATGGCATATCATTTCCCATAATAAAGAAAAAGCTCTCAGT
+GTTCTACCAGTTTGGGCCCTTAAGACTCCTGATAGGCAGGTAGATGCTGGACCACACTCTCTTCCTGCACTAAATAGAGG
+TTGCAGACACTGGGTAAAACCCCAAGAACCACAAGCAGATGCCCACTGCTTTCCTGGCAGCCTGACCCTTGCACCAAAAA
+TACTTTAAACAATAAGTTCCCAGTTCTTAAAAGAGCAAACATACCTTTAAAGGAAAAAGAAAAAATACACAAAAGTCTTA
+CCGACCTTGGGTTTAATATTTACTCTTCTCTTTCTTAGGTCAAAGCTACGGAGTATAAACTGGGCTCCTGCCAGGTTCTT
+ACCATATTCCTTCAATAGTTGTTTTTCAGGAAGCTCTGTTTTCTGGAAGAGATAAGAAAATAGTACCTGGGAGGAAATGA
+GCCATGCTTGTGTTGCTGAGTGATTCTAACATGTTTGGTGGGGGAGAGGCACATAGATGTTAAATCATTCACCCTTTGCA
+GAGAACATCCTTTGGGTAGAGTGTTTCTTATCCTCCTCCACCCAGAAGCCTTGTTTCTAGTTTCCTGTGCTTGCAAGCTT
+GTCTCTCCCAACTAGGCTGTAAATGAATTGAGGGTAAGGATCTTGTCTAATTTACCTCTACATCCCCAGGACAATAAGCA
+AGGGTGTAAATGCCTAGTTCCCTTGCCCAGGTAAAGAGAAAAGCGCACCTGAGGCAAGATTCCACTGCAGCGCCCCCTGC
+AGGCTCCACAGACGTGGTCTGAATTCACCGCCAGCTTCACTTCCTTCCATTCCCTGTCCTACTTCCCCCATTCCTTTACT
+GTCTCTGTGGAGCCTTCCTTAGTAAACCACTTGCACACAATTCTCAAGGTTTGCTTATGAAGATCAACAGCTATGACATC
+AACCCATAGGACAAAAAAACAGAGCAATAGAAAAATTAGTAGGCAATAGAAAAATTAATGACTGCCATGTCATCTTAGAG
+AAGGATCCTCGATTTCCACTCAAATTAGCTTGAGTGGAAAGTTTACAGAGGACTCTACATTGCTAGAAGTACTGGACGGG
+CTTGGGGGGATTCGCAAGTCACCTTTACTTGCTAGGGCCACATGGTGTCTCTCCTCTGTTTCTCTTAGCACTTGTACTCG
+TCTTTTTTTGCTGTAGTCCCATATTGCCTATGATGTTTTACTCTAATGTAATGTGTTTGCACATGACTGTGGCTTCCTAA
+GGCAAAGAAGCTCTTGCCCTGGCTCTATAGAGAACCAGCTTATCCAGTCCCTTAATTCTGTAGGTCCAAATGCCAGACAA
+AAACAAAATAAAATAAAAAAACAGACTCTGATGAGCCCATCCAAGGTCAAGTCCCTGCCCCATCCAAATCAGATTCAACA
+GTGGAAACAGAGTCATCTGCCATGACACAACCACTTCTTTGACGGTACTGTGGGCATGAGTGGACTTTTTCAGGAGGAAT
+TATGCACTCAGGGTAATGACTGACATCTGTAGGGCAATAACAAGCCAACACTGCTTGTTAGATTCCCAGTCCTCTTGAAC
+ATGTTCCATCAACTCCATTCTCATGGGCTTGCTCACTGTCTACCAACATACTGGGCTAAATTTGCCTCCAGGCTTCCTCT
+CATTGTCCCTCAAATTATATAGGCGTTGGCTCTTTTCTGAAAACAATATCCTTCATTTCCTACTTTCCCCATGAAGCCTT
+TCCCAGAATTCCAATTGTCCCCTAATGCCTAAATCAACATTTTATTTTGTCCATTTACCCACTGGTGCATGGTAGCACCA
+GGGTTGTCTTCCTTACTTAGAGAATAAGTTCCTCCAAATCAGGGAGGGACCATGCCTCCTATTTCTTCTGTTTCACTTAT
+ATCATTTGGCACGATTCTGAGCACGTAGCCAGCAAAACATAGCTGCTTCTCCCTAATGTACTGAATGAAGTAGATAAAGC
+TTTTCTACAGCACCTATTTCAGTGCCTTAATCATAGGCTGGATAAGAAACTTTTTGTACTACTGCCCCTTGGGCCTCACC
+ACTACAGCATGAGCTTATTCCACTTCCAACTGCCAGGATCTGTATCTCTTTGCCAGAGGGCTTTTTCCCAAGCAGCAGGA
+AGCCTTTCTGGCCTTGGGCATAGTAGACTGGATAGTCTGGAATATTAATTGCTGGCAGCACTCAATTCAGGAGCTGGTGG
+AAGTAACCCAGCTCCCTCACCCCTCCATGGGTGTCAAGACTTGGTTCCTTCCAGTGGGTTCATGGTCTCGCTGACTTCAA
+GAATGGAGCTGCGGACCTTCGCAGTGAGTGTTACAGCTCTTAAAGATGACACGGACCCAAAGAGTGAGCAGTAGCAAGGT
+TTATTGTGAAGAGCGAAAGGACAAAGCTTCCACACAGCGTGGAAGGGGACCCAAGCAGGTTGCCACTGCTGGGTGGGGTG
+GCCAGCTTTTATTCCCTTATTTGCCCCTCCTGTGTTCCATTTCTGTCCTATCAGAGTGCCCTTTCTTCAATCCTCCCCAT
+GATTGGCTACTTTTAGAATCCTGCTGATTGGTGCATTTTTACAGAGTGCAGATTGGTGCATTTTATAATCCCTTTGCTAG
+CTACAGAGCACTGACTGGTGCATTTTACAATCCTAGCTACAGAGTGCTGATTGGTGCATTTTACAATCCTCTTGTAAGAC
+AGAAAAGTTCTCCAAGTTCCCACTTGACCCGGGAAGTCCAGCTGGCTTCACTTCTCAGTAGGTTAACTCTGAGTGTTCTA
+CACTGTTTCCCAGAGTTCCCCAGTGGGCTTGAGCCCCAGCTGCCCACAGTGGTGACTTACTTGATAATGGCCTCCTGTGT
+CTGCCTTTCCTTCCTGTCTCACTTCTACACTTCCCTGTTAGTGTTTCCTGGGATCACCTCCCAAGTAAACCACTTACACT
+TGAATTCTTGTCTCCGAGTCTGCTTTTAAGGAAACCTAAACTAAGACAATGTTCAATAAATGCCAATGGAAAGGTTGAAA
+AGGTAATTTTCACCTTCCCCCTCACAGGTAGAGAAGCTAGGCCATTGACAGGGTTGGATTTCTCAAGACTGGCAGGATAG
+AGGACTCCACTCCTATGGAAGCACCTTTCAAGAAGAGACGGTATGGGAATCAGTTTAAAACACAAAGCACTGCATCCTTG
+GGGGCATCTGTTAAGTTCATTGCACCCAACCAGACCTCCTAGAGAGGGCAGGTGCTGACAACATAACTGCCAACAATTTA
+CTGGTCCAATTTGTGATAGGCAATATATAACTCGGCAGCCCAGAAGCCACATCCCCATTTGTCTTGGTTTTCATTAATTC
+AAAAAAGAGGAAGAAACCAATAATGATAATAATAATAACAATAACAACAACTATTAGACTTTCTGCTAACCTGGGTGAGC
+AGCATTTGACTTGAGCTATGAAATCAGAAAAATGTCCCTTTCTTGCATCTTAATTTTGCAACAAATTCAACCTGTTAATA
+TAGTCCTTCAGGAATTAAACAGACAAAACCATACAAATGCTTGGAGGATGATGGGAAAAACCAGTTACTTCTGACTAACT
+GGAAGGGCAGCTTGATCATTTCTGATGGTTGATTGCATAATTTCAGTCCTTGAACATTTAGTGTGCCATCCCCTTGGTTG
+CTGGAGGAATGCAATTGCTCTATGAAATGTGAAGAGACTTGACAAGAACTATTTTCATGGTTATGTGCCAAGTTCTTGCT
+TTAACTTGCTTGCTTAATCAAATGTCTACTCATGATATCTTTGGGAGTTTCTAAGTTTCATTAGCTCTTTCCTTAAAGAG
+AAATTAATCTACTTGGCAAGGAAGGTTTATTCATCATAGGTGAGCAAACATTAGCACTTTCTAATAATTAGTTCCAAGAT
+CCAGTTTCTAGATACCTACCAAGTTCAGCCCTGGTTCACAAGAAATTGACCTATTTTCAGTGGAAGCACTGTAACCAGTG
+CAATAAACTCTATCTCATAGAACAGTGTTAATCTGTCATCACAGCCAAAATGATTTGTTATAGCAAAGCAACAATCAGAG
+TAAATGATGCCTGTCCCTTCATTAAGGAAATCTAGAAGAATGCTCTGTCATGAGTTTAATCTGAGATTTGCCTGGCTATC
+CTGATGAAAACCATCTAATGAATATGTAAATTTATCATTGTCTTCTATTGTAAAAATGCACAGCAAATTTTCTGCTTGGC
+TATGCATAATCAGAAGAGAAATCCTCACTGCTGTGAATTTGAACTTGAATAAAAATTGTCTTCTCAGGAAAAGAGCATGC
+TAGATGGAGGAGGGGTTAACAAGAGCCTTCAGTTTACAGATGTAGGATGTCTTCCTGCTGTTAATTATTGTGGTAGGCAG
+TGTAATGGCCTTCTCCCCAAAGACATCCACATTCTAATCCTTGGAAACTGAATATGTTATCTCACACAGCATGGGGCCCA
+CTGGCACTGGACCCCTCCAGTTGGCAGGTCCACTGAGGCTTGGGTACATGGGTAGACATTCACCCTCATTTACCATTCTG
+CTGAAACTGTATCTCTAGGTTTCTGAATGGGAAGAAGGAGCCTACTGGAAGGAAGAGTCATGGCTCCTGGAAAGTGGGGG
+AAATGAAGCAAAATGGAGTTCTTTCCAGAGTTGCCAGATTATTTCAATAAAAATATCAGACATGTGTGAGTAAAAAACAT
+GGACAAAATAGCAACAAAAATGCAATATAGAAAGATTTCTGGAAACAATCCATAACGTCTTATATTTTGCACTGAATACA
+ATGACATTTTTTCATTTCTTTATTTTTCAGAAACTCAGGTTTTGTACACTTGAACTAGAAAAGCATCACATAGGTCAAGA
+CAAGAAAAAAAAATTTTCAAACATCAGTTAATGACATGAAAAATAATAAAAACTATTTAGTTAGATTGTTTAGACTTTTA
+TAAGGAAGACTAATATAAATTTACTGACACATTTCTTCAATGTGTTTCCAGAATAAAAGCTCAAACTCTCAATGCTTTCA
+GAATAAAAGACCACACATGATCTTCGGTGTGGTAAACACCCTCCAGAGTACTTCAAATCATGGGTTAGACAGAAACTACA
+GACCAGCAATATCCAATGGAAATATAGCATGAACCATGTTCAAAGAAGTTGAAGGAAACATATGAAATTATTTGTAAGAA
+TCTATCTTATTTAACTTGATATAGCAAAAACATCATTTCAATGTGTAAACAATATAAAAATTAACTAAATATTTTGCTTT
+TTCAAATCTATTCTTGAAATCCGATGTGTAATGTACTTAAACCATATCTCAATTTAGATAGTAAAATTTATCAGAAATAA
+TTAATCTGTAGTTACATTTCATAAAATTTATCATTGGCAAGTAAGATTCACATACTCAAGTTGTTCTAAGCATACATTAA
+AAAGTTTTCAATAACTGAATCAGTTTTTTATTTTAATTTTTAATTACAATTAAATAAAATTAGAAATTCAGTTCTTTAAT
+CACAGTAGGCAGCCGGGCACGGTGGCTCACACCTGTGATCCCAGCACTTTGGGAGGTCGAGATGGGTGGATCACGAGGTC
+AGGAGGTTGAGACCATCCTGGCCAACATGGTGAAACCTTGTCGCTGGTAAAATACACACACAAAAAAATTAGTCTGGCGT
+GGTGGAGCGCAACAGTCGTCCCATCTAGTCAGGAGGCTGAGACAGGGGAATCGCTTGAACCCAGGAGGTAGAGGTTGCAG
+TGAGCCAAGATCGTGCCACTGCACTCCAGCCTAGTGACAGAGTGAGACTCTGTCTCAAAAAAAAAAAAAAAAAAAATCGC
+AGTAGCCACCTTTTATATGCCTAGTTGACACCTGTAGCTTATGGCTACCATATTGAACATCACAACACTAGCTCATACAC
+AATGATTATAAGAATGTGTGAGATTTGTTTCACTTTCATAAATTTTTTGTGCTAATTTTCTATGTGACCCATTTTTTTCT
+CCATGTTTAATCTGAAGCCTTTAAATGAGAGGCCCTTGTGTTCTTGTCAGTGGTGTCAATAAGGAAGTGAAGAAGGTAAG
+AAGGAAAGGGGAGAAGTTTCTATGAAAGAATGGAGGATGTTGGCAGAAGCTTCAGAGAAAGGAAATCAGCCTAACTCTCC
+AAGATCTGTTGGGCAGAAAGAACAGAGGTTCCCAGGGCAGTAGGATGGGCCTGGAAATTGTTCTAGCACACTTTCAGACT
+GGGCTCTTGTTTACAAATGACAGAAACTTGATTCAAACTGGCTTTGACAATGAAAGTACTCTATCAATTCAAATAAATAA
+AACATCTACATGTTCTGTAGTGAGAAGCTCAGGCATGGCTAAATCCAGAGGATCAAATAATGTCATCACAAATTTATTTT
+TCTTAATCTCTCAGCATCAATGTCCCTGTGTTGCTTTAAATCCAATCAGGGTTTCACCATAGTAAAAAGATGGCCACCAG
+CTACCCCAACCTATGTCCTATTCTTAACAACTCTACCAAAAAGAGTACTTCTTTCTGAGAAGCCCCAGCCAATGTCCTGG
+GCTGATACTCCCTGGTCTGGGCATAGATCTTATGCTCATTCCTCAACTAATTCCCAGGCCTGGTGAATTTCCCAGCTTTG
+GAGATGGAGGGTTGGATCTGACCACTCAACCCCCATGAATTAGAAAAGGGAGAGTGATTTGTTCTTCCAAGGGAAAATCA
+GGCTGCTATAGCTAGGAGAAGAGGGAATGGATGCTGGGCAGGAAAAATTTCTTGTGTGTTAAATCAAGACTTTTGACAAT
+GAGGAGGCTAGTCAGTTCCCCCAAAATAATTTATTCAGACTTTAGGTGTTTCATGAAGAAAATTGTATCATCTGCCAAGC
+ACCTAATCTCCTTTCCCCATTAGTCAGTCACATGGGAATTCATACCTTTTTAAAGCTACCTAACCTTGAAAAAGTTGTTT
+AATCTCACCATGATTCAGATTTTCCATCTATAAAATGGAAATGATAATAGGGCCTACCTCATACATTTATAATAAAGATT
+ACATTTAATCTTATATAATAAGATAATATAATAAAGATTATATTTAATCTTTTATAAATGTATGAGGTAGGTCCTATTAT
+CATTCCTATTTTATTTGTTCTTATTTGTGAAGTGCTTGCAATAAAGTCTGGCAGATGACAAGGGTTTAAAAAAACCCCAC
+TAAATAAATACACAAAAGTGTGATATTATACATTTTTCTCAGAAAAGAAGATTCATTGACTTCTCAAATGAATTCATCTA
+GGCTTCTTCCCATCAAGGTGGCCTCCAGAGAGATGAGAAACTACTGGAAATGTTTACTAAATAAATGAGTCTCATTTTCA
+AAGGAAAGGTCATAGACACTTTTTAAGACACACAGATGTCCTTTAATGCAGCTCATCTGTATTGTAATTTCTGTGAGCTT
+GCATTAGAATGTAATTAAATTAACACCAATGCAGTCCTCCTGTCAAGGAAGCTCTGCCTAATGTAATGGCGTCAAGGAAC
+AATCTGAGCATTACACATATCATGAGGAATTGTTTCCATCCCCATTCTATTCTTATTGTCACAATATTTGGTTTAATTAA
+GTTATATAGGATAGTAATTATAGCAAATGTAGTTTGGCTGGGAAAATACTCCTTGTGATATGAAGGTCGCATCTCCAGAC
+ACCACTGCTGGGGAAGGCAGAATTCAGTCCCTACTTAGGACATAAAAACTCTCAGACACATAAGAAGACTGAGAATAGGA
+ATTGCCTGCTCCTGAAAATGCCAAATTGATATATGGTGAGAAGCCGAGTCTTAGAGTCCCTTTATTGTTGGATACATTCA
+ATAAATAAAAGGAAAGAGCCACCTATAAAAGGTAATTCAAGGGACCATATGCTCTACCTTTCCAAAAAGATAAGTCTAAA
+ATTACAGCCCTAGGGTCAGTTCTCTGATTTGGCATTGCCTCAAGTAGGCCCTTGCCTGTTACAGGGACATTTGCTACCTG
+GAGGGAAACACATTAACATTCTAGCACCACCACCACTGTCATGTAGGTAATGTACTAGATTACTCAGCCGGGTGTGGGAA
+TGTTCCCATTCCTCACTCTTCCCCATACCCATGTCCTTGCCCAAGAACTCTCCAGTTCCTTCCTCTAGAAAGTGTGCTTT
+GTCCCATGTGGCTTGCTTTGGCCAGTGACGTATGGGAAAGAGTGAGGATGTACTGTTCTGACCCTACACCCAAAGAAGTG
+CTATCCCTTTCTGCTCACTCCTCTTGGTTTCTGCCACACCATTCTTCTATTCTAGAAGAATGTGAGACATATGGAACAGA
+GGCACCCTGGCCAATCAGCAACTCTGTGAGTGTGATCATCAATGCTTATCATTAAACGCCACTGAGATTTGGGGTGGTTT
+GTTGCGCAGTATGGCTGAGGTAACAGTTGACTAATACACTGGTTATTCAGCTAAGACAGAGAGACGCTCTGAGTGTCTTA
+ACCATAGCAAGTAAAGTTTCCTGTAAGACAAGGCAGAATTTGTAGCCTTCATTGTAGCCATTCCCTGGCCCTTTTTCCTT
+GCTTTGTATAATTATAGAATCTAAGGAGTGAAATACTCAAGTTCCAGAATTTTTTACAGCTAGGGTGGCCATATGATGCA
+TTTGTAGCCAGTGATACATGTTTTTTTTTTTTTTTTTTTTTTTGCAGGGGACTCCTGGAAATACTTTTGCCGTCATGAAA
+AAAGCAGACTTTTGTTACTGCTCTTCTCCCTTCTTCCTGCCTTGAAGGCAGTCATGATTCCTGAAACTAATGCAGCCAAG
+GTGCAGCCAGGAGGGGAGCAGTGTGAGGTTGAAAAATCAACAGCTTAAGGAGGGCAGAGTAGGAAGATGGAAAAGGCATG
+AGTCTTTGATGATGTTGCTGAGTTAGCTAAACCCCAGTAACAAATGCCTCAGTCTTCTTTTTCTTAGAGCTAAAATTAGT
+GTCCATTTTGTTTAAGCCCCTATTAGATGGGTGATATCTAATGTGATATCTAAAATTAAATGAATTTCTAACTGATATGC
+CTAATTTACAGTTTGGCTTGGAACTGGCTTAGTCTGTTTGCATCAGAAAGATGGCATAGTTTCCCCCATTGGGTATTAGA
+GAAACTAAAATAGAAAGAAACTGAAAGTTTTATGGCCTACTGAGGTCAGAAGGAGGGGAGAGCAGGGAAGCAGCAGTCTC
+AACCTTTAACAGCAAATCGAAGTCCCCTAGGAATTGAGATGGGGAAGACTTGGAACACCTCTCTTCTCTGTGTCTTGTCC
+TGTTTACCTATTAAAACCCAACCCATCCATTCCTAGCCTCTTCCATGAAGCTTTCTCAAAATCCAGCCAGAGTGAACCAC
+AGCTTTCTCTGAATATCTAGAGTTGTTTATTGGTTTTGATCTATGTGTGCCCCTCAATTGACACATCACACTTGTCCCTG
+CATGAGTCCAGCTGTGTAATGTGTCTTATCTTCCCTACAAAGATAACATCTTAATCTCTGCAGAATCTCCTGCAGAACCT
+AGTAAATACGCAGCATACATCAAGTCTTTAGTAACCAGTAATAACAATTAATTACATAAAATGAATATAAACTTGCTTTA
+TAACAAGCCTATCATCACACCTCTGCTTATGGGTGTATCTGGAAACTTAATTAGGAGTTGGTAGCAAAAACAGTCAGCTA
+TTTCTCTTCTATTGCAGAGGAAATAGATTAGGAGAGAAATGAGAGAGGACAACAGAGAGGAGAAACAAAGAACAGTTCAA
+AGTCACCTCCAAAGAGACCAGAGTCTCTGTATAATTTATTCCCTCTTGGGAGCATTCCTATCAGTAGTTCTGATGAAAAG
+AGAGTTAGCAATCTGGCACCGTTTTGCATATTTACACTGTTCCTTAAAATATATGTTGAATCATTTAGACTTTTTAGCTT
+CCACTTTAGTAAAAAAAAAAAAAAAAAAAAAAAAAGAGGGAAAGTCTAAGCTGGAGAGTGCTAGAACTCATAATTAAGAA
+GGAGGGGAGTCTTACACTGGTCCCCCAGAAGCAGACACTGAAACAAGGGTTTGAGTCCAGATGGTTTATTTGTGAGGTGA
+CCCCAGGAACCATTGTAGGAAAATGTGGAAATGAGACAGTGAAGGAAAGGAAGCTAACACAGGTGCATTAATGAGTGGGT
+AATTGGAAATCAGTCCCCACGGAAAGCTCTATGTAGAACACAGCTCCAAATTGTTCCCCATGTGTGATGAGGAGCTGGGA
+TATTTATCTACTAACTCCCATTTGCACTTGGGCCCATTGGGCACTCCTAGACTGCCCCACGCATAACTGATCATGCTCCT
+GCAGCCAAAGAAAGTCCTTAAGCAGAGAATCCAAGGTGCTTGCAGTAAGAAACCATTGGCATGTGCAGGAATACTGAGTA
+TTGATGATATGTAAATAGGGCATCAATCGTGTCTGCTATATGGGGAAAAGTAAAGGGTAGCAGAGTAGGGGCTGGGGGAT
+GAGAAAGCAAGGAATGAGGAGCACTCACTTGGGGAGGACTGCCAACTGTGTAGCCAGGTCTCAATGAACAAAAGTAGGCA
+TTCTGTTGAAACTGCTGGGGAAGTCTATAATATAATACCTCTACCATTTATCTTAATTGTCTTTAGATATAGAATGCCTT
+CAAAATCATAAGGGCCAAAACTTATATTTATCAACCTTCCTGTCCCGTATTGAGGGAATAATAGAGTACATCCATATAGC
+AGAATATTCTGAATCTGTTTAAAATTATGATGCATAGTTTCAATAAAAAGAAAAGGCATTTATAATTTAAAGTTAAGTGG
+TTTTTTTAAAAGGCAAGATCTAAAATTGTATATCTGGTAGGATTTCCACACTGTAAAAAACATGTACTGAACCAATATTG
+GAAGAAAATGTTAACTTTTTAAAAGTTTTTGAGTGGTGGGATTGTAGTTACTTTCCTCTGCTTCCTTATAATTTTCTAAA
+GTTTACCTGCTTTCCACAAGGACATGTATTACTTATAACTATAAAGATTTTTTATGAAATCTCTAAATATAAATAAATGT
+GTATATATATATGTATATATATATATATATATATATATATATATAGCCACTCATTGCCACCAATGGTCCTGCAATAACTT
+GGCCCATCTGAGCCTTTGTTGTTAGATTCTGTTTAGGACTGTGCTCAAGGGCACTGGCTCCCAGGAGATTAACATCTTCA
+GCTTTGCTCTCATTTCCCAAGAAGTCGGTGTCACATTTCAGTTCAGCCAACGAACAGGTATTCACTAATTTAATAAATAT
+TGATAGATTACCCAGTGTATGCCAGGCACTAAATAAAACCAAATATAGTTTCCAGCCCTCATGGAGGGAACTGAGTAATA
+AAAGCAAATGAATAAAACTGTCCATCTTTGAGTAGTCCTATGCAGGAGAGATGCCTGGTTGTATTAGAATCTGCAGTAGG
+AGATTTGGATCTAATGGGTTGGGCTGCCCTCAGGAAGTGAGGGTCAACTTAAGATCCCAAGGATGCATGGAAGTTAACCA
+GGCAAATGTTAATTGGGCTTCTAATGTGTGCAAGGGGTCACCATGCTGGAGTCTTGGGACACAAACTTCTGTCCTCAAGG
+ATCTAGCCATCTGATTGCCAAGCCACAATCAGGATTCCTAAAATAAAAGAAAAATCCCCACCGGTATTAGATTAAGGCCC
+CGGATGTCTATAAGAAGTTCAAAGGAGGAGCAATCAATAAGGGCTAAAGTAGTCCAGAAAGACTCCAGAGAAAAAGTAGC
+TTAGGTGCACTTTTTTGTCAGCCAGAAGGACAGGCTTGCCTAGCAAGGGGAACTGCATGAGCAAAAGTCCAGAAGGGTCA
+ATGCCAATAATCTTTATATAAGAGAGTAAGATGAGAGGTTGCAACTGAAACTGTTACTGGAAAGGCATCCCGATCCTGAC
+CCCAAGAGAGGGTTCTTGCATCTCACACAAGAAAAACTTCTAGGCCAATCCATAAAGTGAAAGCAAGTTTACTTAAAAAG
+TAAGGGAATAAAGAATGGCTACTCCATAGGCAGAGCAGCCCTGAGGGCTGCTGGTTGCCCATTTTTATGGTTTTTTCTTG
+ATTATATGCTAAACAAAGGGTGGATTAATAATGAGCTTTCCAGGAAAGGACTGGGCAATTCCTGTAACTAACGGTTCCTC
+AGGCTTTTAGACCATATAGGGTAAATTCCTGACGTTGCCATGCACCTGTACATTGTCATGGTGCTGGTGGGAGTGTCTCT
+TAGCATGCTAAAGCATTATAATTGACCTATAATGAGCAATGAGGATGACCAGAGGTCACTCTCATTGCTCTCTTGGTTTT
+GGTGGGTTTTGGCCATCTTCTTTACTGTAATCTGTTTTATCAGCAAGGTCTTTATGGCCTATATCTTGTACTGACCTCCC
+ATCTCATGCTGTGACTTAGAATGCCTAACCATCTGGGAATGAGGACAAGAGGACAGCTTCAACTTTTCATGACTTCATCT
+CCAACCCAACCAATCAGCACTTTCCACTTCCTGCACCCCCTACCCATCAATTTATCCTTAAAATCCCCAGTCTCCAAATT
+TTTGGTAAGATTGACTTGAGTAGTAATAAAACTCTGGCCTCGCATTCTGCCGGCTCTGCGAGAATTAAACTCTTTATGGC
+AATTTCTCTGTCTTGATAAATCGATTCTATCTGGACAGCTGGCAAAATGAACCTGTTGAGCAGTTACAAGAGTAAAGGAT
+GAGTACTGAGTGGAAAGGATGGCCTGTCCTTGGGAGTCGGGTAAGGACACTTGAACTCATGATAGAAGCTAGGGAGCAAA
+GAGTGATCAGACAAAAGTTGTATCTATAGGAGGATGGGTGTGGATTACTGAGGCCAGTCCCTATGACCTCTCTCTTTCCA
+GGGCCAGGAACTATTACTGTTTTTTGTGACTTTGGTAAGTAAGTGAACCCAGGGCATGGTTTGCAACTGGCTGCGTTGGT
+CATTTTTATGAATCGATTTGTGCAAACCCATGGCAAGCTTTCCTAATATAAACAGGTGGAAAGAGAGAAGTCAGAGGGGC
+TGGCCTCAGTAATCCAGACAGCAAATCTTACAGGGAAGACCCTCCTTATATCCTGGGGAAATTGGTATATGAGAAGTTAA
+CTGGGTTTTTACCCTCTGAAAAGGAAATTTCTATAAGAAAGGGAAGCATCACTGCATTGCTTGATAACATCTGTTTTATC
+TTGCTTGTTTTATCTGACTTTCTCCCATGGCCAAGGAAACATGATGAAAGAACCAATTTCTTTCCTCTTTTAGAAGCTCT
+GATCATGGAGGAGACCATATCTGCAAGTCATCTAGGACTCAGCTCTTTTAGAGCTATCCATGGACTGGGAGACCCTTCTC
+CCACTTCAAGGAGCCCCTCTTTTGTACCCCCCAAACTTCCTGGCTTACAAAATTTGCTGCCAGATGTGGTGGTGTGGGAG
+TGAGGGTGGGTGGCATAGGTCATGTTGGTAAAAGGCCTCCCATAGGCGGGAAAACTGCAGAGCCAGGGGGCCTAGGGGAA
+GGCATCTCTTGCCTAGCAGATGCTGACAGAGGAAATGGCACATTCTGCCCATGCATCTGTCCTCATGATTCACATGCCAG
+CAATTCCTCAGGCTAGAACATACCTTGTAGGTCATTTTCTAGCACATGCCGTAACTAGATAGCCCCTAGGTCTCTCTAGG
+AAAGGTGACTTTCTAAACTTTTTTTGTATTCTGTCTGTAAAGTACTGCATACTTTAAGGACTTTTTAAACAATAATAGAG
+AAATCAGCTTCATCCCTGCTTCAAGACACATCACAAAGCTCCAATCACAAAAACTCTAGAGTTTCTGTAGAGGGATTAAA
+GTGTGGCAGTCTAGTGACCTAAAGGGATTTGACTTGAAGCTGTATTTGCTTAGCACACACGCTGTTTTTATCTTGATTTT
+AAGTTTATTGGTGCAGCACATGGAAATAATGGGCATCAAAGCCCACACCTCCCAAGCTGGTCCTTGATGCCATCACTGAG
+CCCTAAGTGAGAGGCCTGCTTATAATAGTCCTGCATTCTCAGCTTCCCTTAACCCCAAGAATAGAACGGTGTCTTGCATG
+CAGAAATAGGTTATTGGTGAATAAATTGCTTAATGAATTATCTAGTTTCCCTAACAGAGCTGGTTACTTCTTTCTCATGT
+GAAGGTGAGCACCTGGTGAGGTTTTCAGGAATCAGGTTCACCTTGAACACCATACTCAGGCCTTGGCGGCTCTCTGGGCC
+TGCCTGGCTCCTACAAACACCATGCAGTGTTTCTGGATCTCCTAGGCCCTCTAGTGGCTATTCTTAGGCTGAAATCCCTC
+AGAACTCAGCCTCTCAGCTCCTGCTAAAATTTCAACCAGTCCCTGAATTTAGAAATATGTATATATACTTATTTTGCTTC
+TCGTAAACTGCAAAGAAATAAATTTAATTATGTCTAGTTTCTGATCAAGATTTATCTGGTCTCATAGCCTTCCCCTGGTT
+TGTGTTTTAATGAGCTTTTTATTTTTCAGTTGAAAGATTATACCAGATGTCAAGACTTAACAGGAATCAAGACAGCATGG
+ATTGATATAAGGGTAGACATAATTATCAATGGACAGAGTACAGACTCCAGACACAGACCTACACCTATATAAGCAACTGA
+TTTTCAAAAGTATCCTGCTTGCCTGCTTGTACCCCCAAATGGGCTCTCCCTCACCCCAAATGCATCTTTAGGTTTGTTTC
+AGAGAATTTCTGATGGTCTTTCTCGCACAGAAACACACAAGGGGTCTAAATGGGTTCTTTATGCAGAAGAGCGAAGATTT
+CCAGGTCCAGTGGGCCAAAGTGCCTTCCCCTGCAAAGCCTTTGAGTGATTCCAGGCACCAGGGGCATCAAAGCCTTGGTG
+GGACCAGAAACTATTGCTGTTTTTTATGACTTTGGTAAGCACGTGAACTCAGGGCATGGTTTGAAACTGGCTGTGTTGGT
+CATTTTTATGAACTGATTTGTACAAACCCATGGGAAGCTTTCCTAAATAGAAACAGTCTGCATTATGGTGGTATCACCTT
+CATCTGACCCCTGGGAGCTCTAAAGAAAAGCCCATGATACTCACCTGGCCTTCAGCCTGACCAGGGGATTAGGCCACTGG
+CCAGGATGAGACTGAAGGAGGAAAGGAACCTATACAACTACTCTTCCCTTGGTTGAAAGGAAAGTCCAGAGAGAATAGCT
+ATTTGCAAATATCTGGGCTTCTACTGGGTGCTTACTGTTGAGTATCTTATATTGGGACACAGGTATGTTCATCTCCATTT
+TACAGATGAGGAAATCGAGGCTCAGAGTTAAATATATTTCCCAAGGTCAACTAAGTAAGTGGCAGGATTAGGATCCAAAC
+CCAATTCTAACACTCTCTAAAACCCACTCTTTAGCTAGCCCCTGCCTCTTAGCTTTGGGGTTCAGGTTCTTGACTCAGTC
+TATGCAAATGTTGCCTTCTGGAATTTGGCAGTGCACAACCTGCACATCTGTACAGTGTAGCTGAAACCAACTTCTGGGTT
+GAAACCCTTCCATATTCAAGATTCTAAGCCTGGGCTAAAATCTAGAGCCTTGTTACTCAAAGTGGTCCCTGAACCTGCAA
+CATCAGCCGTGTTTGGGAGCTTTTTAGAAATGTAAATTTCTGCTCTCCTGCTGCCACTCCCCCCAACCCCTTTCTATTGG
+ATTACACTCTCCAAGAATGGAGGCTGCACCCAGGATTTCTCAGCTGTTATGTGCATTCAAATCACTGGGAATCTTGTTAA
+ATGGAGATTCTGGCACAGGAGTTCTAAAGAGGTAGTGCTAAGCTTTTGCTTCTTCGGGCAGCTCCCAGATGATGCCGATG
+ATGTGAGCCACACTTGGAATAACACAGCTTTAGATCAGTAAGTCTCAAACTTGAACCTGTATTCAAATCACCTGAAGAGC
+TCGTTAATACACAAATTGCTGGGCCCTACTCTCACACTCTCTTACAATAGCTGTGCACCTTCACCTTTACTTTCACCCCA
+AGAATAAAACAGTTTCTTGCACACAGAAATAGGTTATCCATGAATAAGTTGGTTAATTAATTATCTACCTTTGCTAATAG
+AGCTGGTTACCTCTTTCTCATGTGAACATGAGTACCTGCTGAGAGAGTGCTTCAGAGGGTCTGGAGTGGAGCCTGAAAAT
+TTGCATTTCTAACAAGTTCTGGGCCCAGCATGGTGGCTCATGCCTGTAATCCCAGTGCTTTGGTAAGCCGAGGCAGGAGT
+ATCACTCGAGGCCAGGAGTTTGAGACCAGCCTGGGCAATATAGTGAGACCCCATCTCTACAAAAACTTTTTAAAAATTAG
+CCAGGCATTGTGGTGTGAGCCTGTTGTCCCATCTACATGGGAGGAAGATCCCTTCAGCTGAGAAGTTCGAGGCTGCAGTG
+AGCTGTGATCTAACAAGTTCCTAGGTGATGTTGCGATGCTGCAGGTTTAGGGACCCCACTTTGAGAACCAGTGTTCTAGC
+CTATCAGAACTTCCTCTAATTAGACAACTAACTCCATGCTTTCTGGCATTTCATTTTTATAAAGCCCTATTCGGTCCTTG
+AATTCTAGCATTTTAGAGCTGAAAGGGACTTCAGAGGCTACCTTGTCTCAATTAAAGAAATGCAGGTCAAGGGTCCCACT
+CAAGGCCATACAATTAGCCAAGAGGCAAAGTCAGGATTAAGCATCCTGACTCACTCTATTTCTATGCTGTTTACTCTGCA
+TCCTGCTGGCTCCAAAGCTCAATAAATTCCCTGGCATCCAGCAACTGCAGTATTTTTGGTGAGCTCTCCATCCCAGGCCA
+GCTTTGAGGTGCTAGAATCCCTGTCAGACACATATGTCATCCGCACAATCATGGAAACGCAGTGGCCTCCTACCTAACAG
+CTCTGTGAAAGAGGGCTTTGGAAAAAAACAGCAGCTGTGCTGGCCACTGTAGCAGAGAGAAAATGCAGCAGCTAGCAAGC
+AAACCCAGACAAGGTGAGTCAGCATAGAGGCCCTGGGGCTAAGGAATATACCAGGATACTGCCAACAACTGGCACCCAGA
+TTCTCCTGAGCAAATCTGAGAAGGTCAATCAACTTCTCTGTGCCTGAAAAGATGCTTTTCTTTAGAGAAAACAATACTCT
+TATTCAAGGGAATGAACACACAAGAGGATATCAAAAATGTGCTTGGGGTGGGAGTTGTATCAGTCCTGCTGCATGACAAA
+TCACCCAAAACTTAGTTGCTTTAAACGACAACCATATTCTTGCTCACAAGTCTCTGAGAAAAAGGAGAAATGCTTAAGAT
+CCCTTGAAGCCTAGACTTGGAACTGGCATATCATCACTTCCTCCATACTCTACTGGCCAAAGCAAGACCCAAGGTGAGCC
+CACTTCTTAATGAAAGGAACTGTAACTTTACAGTATAAGGATGTAGAGAGGCGGAAGGTTTGTGGCCATTTTTGCAATCT
+ACGACAGAGAGGTCTTCTTAGTTTAGGAAAGTCTGAGACAAAGATTCCAAAGCAAATGGTTTACTTGGAAAGGAAAGGAA
+ATACCGGTAGGGGAGAGAAAAATGGAGACACAGAGAAGCAGCAGCCAGTAAAGGGTGCAAAGCCAAGTCGGCTGGCGCTC
+TGGGCAGCTGGAGTATAATTCCACTGGAGAACTCTGGAAGCTGGTGCAGGACACAGGCCTCATCATCATCCTACCTAAAG
+GAGGACGCCAGGATGTTTATACATCAATCCATGATATGGTTTGGATCTGGGTCCCTGCCCAAAACTCACATTGAATTGCA
+ATCCCCGATGATGGAGATGGATCCTGGATCCTGGTGGGATGTGATGGGATCACAGTGGTGGATTCCTCATGAATGGCTTA
+GCATCCCCGTGGTGCTGCTCCCATGATAATGAGTGAGTTCTAATGAGATCTAGTTGTTTAAAAGTATATAGCACCTCCCT
+CACTCCAACTTTCTCTTGCTCCTACTCCTGCCGTGTGAGACATCTTATACCTCCTTCCTCTTCCACCGTGATTGGAAACT
+TCCTGAGGTGTCCTCAGAAGCAGAAGCCATTATGCTTGCTGTACAGCCCGCAGAACTGTGAATCCATTAAACCTCTTTTC
+TTTACAAATTACCCAGTCTCAGATATTTCTCAATGCACGAACACACTAATACAATCCGTATCAGTCATTTACTGAGGCTG
+CTCTCAGAGCACAGAAATTCCCTGTAATTTCAGCCCTCCACTCTGGGCAATGTGGCTTAAGAGGTCAGAGCAAGACCTTG
+ACAAAAAAATAAAAAAAGAAAAGCAGATGCAGCTAGCTTAGCTGGACCGTTGTGCCCTGCAGCGGTTAGACAAAGAGGAT
+AAAAGGGAAGGAGGATATGAGTAGGGCACCACCTACAACATATCCTTCACTGTGATGGTGATTAAAAAGAAAAAAAAAAA
+CTCAAGTTTCAGATTTATGTTATTTTAGTCAAAACATCTTTTTTTCTTTTTTTAAGACTGTTTACTGTATTTTGGATGAT
+GCCTCAGTTCTCTTGCTGGCAGAGGTTGGAACACTAGATGGCATTGTATTAAGTTGTCCTATAGCTCAGGAAAAAAAAAA
+ATGCAAAGGGATGTTCCTAGTTGTCTTTGGAATTTAACAATTATCAGCATGAGTTGAGAGCAAACAACTGTCCCAGCCAA
+AGTGATGATGGGTCGCTTCAGCTCTACCAAGGCCACCACTCCATCTTGATCTCACACCTTGGCAGCAACATCACAAACCA
+TGGGGGCCAAAGCACAAAGGGAGCAGGCTAAGATAGGAGGCATTTTCTAAAAACTAGGATTCGCAAAACCCATGGGAGGA
+GAAGCAGTAAGGAGAACGAAGCCTTCTGCCTCAAGAAAGGTCTTTGAGAGGCACAGAAAAATGGACTGCAAGAAAAAAGC
+AGCAAACTTATAGTAAAGATTTATTCTTACTTTAGTAATAGTAAAGATTTATTCTTTGCTGTAAGTTAGGTGTCTTGGTG
+TTGCCTGAAAAGAGATACTGCCTTTCTCCCTTGAATGTAAATGAGAGAGAACTCTTGATGTCTTTTACCAACTTAGAGAA
+AGCCCATACCCAGCCTTCACAAAATGCCATGAGAAAAAGAGGGGTCAGGTGTGGTGGCTCACGCCTGTAATCGCAGCACT
+TTGGGAGGCCAAAGCGGGCAGATCACCTGAGGAGATCAGGAGTTCAAGACCAGGCTGGCCAACATGGTGAAACCCCGTCT
+CTGCTAAAAGTATGAAAATAAAAAAAATGAAGAATTTACTCTTGGGGGCAGGAATGGAAATGAGAGTGGCAATAGGATGG
+GGTTGAAAGTGAGGGTCAAAGGCATTTAACATTTTCTTTAAAATATTTTAAACAAGTGAGATAAAATGTCAACAATGATC
+AATTCCTCATAATGGATATTAGGTGGATGTCATCTTATTATCTTTATCTTAAATTTCAAAAAAGAAGAGAAAAGAATATG
+ATGTTCTCTTGTAATTAGCAAGGGCTGATAGGGTTCAAGAGGTGACACTGATATGGATCTTGAGTAACTGTTGACAAATA
+CTTCCCTTTACTGAGCTTCCATTTTCTTGTTTATAAACTGAGGATAATGATAGTAGTGATGTCAGGGAATTGTCATAAAC
+CTCGACTAAAATAATGTACTCGGAACAGTGCCTAAAGCAAGGTGGGAACAGTGCCCAAAGCAAGGGGGTCCCTCAAAAAA
+TGCTTGCTTGCCCAGGACCCTCACCAGCCACCACCACCAATGTGAGTACAAGGATTAGTTAGGCTCATCTTTCTCACTGA
+GTCAAACCATACGATTGGCTGGTTGTCTGGATGTGATTAGAAACAGAAAAATCTTAAATAAAGCCTCATCCTGAATCCTC
+TCAATCCAGAGACAGAGTTGACCTAAGACCAAAAAAAAAAAAAAAAAATAGAAAATAGTTTTTGGAAATATGGGTGAAGA
+GACATCTCCTCTTATGGAAAAAGGGATTCTAGAATTTAACAATAAATATTCCCAACTTTCCCCAAGGCTTTAAAATCTAC
+CTTGAAGGAGCAGCTGATGTATTTCTAGAACAGACTTAGGTGTCTTGGTGTGGCCTGTAAAGAGATACTGTCTTTCTCTT
+TTGAGTGTAAGAGAGAAAGGACAGTCTACTCAATAAAGAGTGCTGGGAAAACTGAATATCCACACACAGAATAATAAAAC
+TAGATCCTATCTCTCACCATATACAAAGATCAACTCAAAACAAATTAAAGACCTAAATGTAAGACAAGAAATTATAAAAC
+TACTAGAAAAAAACACAAGGGAAATGCTTCAGGACATTGGCCTGGGCAAAGATTTTATAAATAAGACCTCAATAGCACAA
+ACCACAAAAGTAAAAATAGGCAAATAGGATTATATCAAACTAAAAAGCTTCTGCACAGCAAAGAAAACAATCAACAGAGT
+AAAGAGACAACCTGAAGAATGAGAGAAAATATTTGCAAAGTATTCATCTGACAAGGGACTAACAACCAAAATATACAAGT
+AACTCAACAGCAAACCATCGTCGTCATCATCATCATCATCATCCAATTTTTATAATGGACAAATGGTCTGAATAGACATT
+TCTCAAAAGGAGACACACAAATGGCCAACAAGTATAAGAAAGAATGCTCAACACCACTGATCATCAGGAAAATGCAAATC
+AAAACCACAGTGAGATATCATGTCATCCCAGTTACGATAGCTAATTATTTTTTTCTTTTTTCTTTTTTATTATTTCAATA
+GTTTGGGGGGAGCAGGTGGTATTTGGTTACATGAATAAGTTCTTTTGTGGTGATTTCTGATATTTTGGTGTACCCATCAC
+CTGAGCAGTGTACACTGTACTCAATGTGTAGTACCACCCCTGAGTCCCCAGAGTTCATTATATCATTCTTACACCTTTGC
+ATCCTCATAGCTTAGCTCCCATTTATAAGTGAGAACATATAAGGTTTAGTTTTCCATTCCTGAGTTACTTCACTTAGAAA
+AATGGGCTCCAACTTCATCCAGGTTGCTGTGAATGCCATTATTTCATTCCTTTTTATGGCTGAGTAGTATTCCATGGTAT
+ATATATACCACATTTTCTTTATCCACTCATTGGTCAATGGTCCTTTAGGCTGGTTCCATATTTTGCAATTGCAAATTGTG
+CTTCTATAAATATGTGTGTGCTGCTATAAAAACATGTATACTTTTCATACAATGATTTCTTTTCATCTGAGTAGATCTAG
+TAGTGGGATTGCTGGATCAAATGGCAGTTCTACTTTTAGTTCTTTAAGGAGTCTCCATACAGTTTTCCACAGTGGTTGGA
+CTAGTTTACATTCCCACCAGCAGAAGTAAGGTGGTATCACATTTTGGTTTTAATTTGCATTTCCCTGATAATTAGTGATG
+TTGAGCATTTTCTCATATGTTTATTGGCCATTTGTGTATCTTCTTTTGAGAATTGTCTATTCATGTCTTTAGCCCACTTT
+TTGATGGGATTATTTGTTTGTTCTTGCTGATTTGTTTGAGTTCCTTGTAGATTATGGATATTAGTCCTTTGTTGGATGCA
+TAGTTTGTGAATATTTTCTCCCACTTTGTGGGTTGTCTGTTAACTCTGTGGATTATTTCTTTTGCTGTGCAAGAGCTTTT
+TAGTTTAATTAAGTCCCATCTGTTTATCTTTGTTTTTGTTGCATTTGCTTTTGGGTTCTTGGTCATGAACTCTTTGCCTA
+AGCCAATGCCTAGAAGAGTTTTGGGGATGTTACCCTCTAAAATTTTTATAGTTTCAAGGCTTAGATTTAAGTCTTTGATC
+CATAAGATGGCTAATTATAAAAAAAAGATAAAAAATAGCAAATGTTGGCAAAGATGTAGAGAAAAGGGGAACTCTTGTAC
+ACTGTTGGTGGGAATATAAATTAGTACAGACATTATGAGAAACAGTAGGGAGTTTTCTCAAAAAACTAAAAATAGAACTA
+TCATATGATACAGCAATTCCACTACTGAATATATATTTCAAGGAAAGGATATCAGTATGTCAAAGAGATATGTGCACTTC
+TATGTTCATTATTTATAAATAAATAAATAAATTATTTATTTATAAATGAACTCCAATGTTCATTTATTCACAATGGCCAA
+GATACAGATCAACCTAAGTATTCATCAACAGATGAATGGAAATGAATGTTTGGTAACAAAAAAAAAAGATGAATGGATAA
+AGAAAATGTGATATATATGTATGCAATGGGATACTATTCAGCCATATAAAAAATAAAATCCTGCCATTTGTGGCAACATG
+GATGAGCCTGGAGGACATTATGTTAAGTGAAATACCACATGTTCTCACTTATGTGTGGAAGCTAAAAAAGGTGAGGTATG
+GAAGTAGAGAGTAAAACAGTGGTTACTAGAGGCTGGTAAGGGAAGGGGGAAGGAGGAAATAGGGAGAGGTTGGTTAACAG
+ATACAAAATTACAGCTAGGTAAGGTAAATAAGTTCTAGTGTTTTTCAGCACTCTAAGGTTACTATAGTTAGCAATATTTT
+ATATTTCAAAATAACTAGAAGTAAGAATTTTAAACATTCCTAACACAAAGAAATGATAAATGTTTGAGAAAATAGGTATG
+TTAATTACCTTGATTTGATCATTACACATTGTATATAGGTGTCAAAATATCATACTATACTGCATAAATATGTATAATTA
+TTGTGTCAATTAAAACATGTGTAGGACACAGCTAAAGTAGTACTGAGAGTAAAACGTATATCACTAAGTGCATACATTGC
+AAAAGGAGAAAATCCTCAAATAAAAATTCTAACCTCCCACCTCAAAGAGAAGGGAGGGCATGACAAAAATAAGAGCAAAA
+AAATCAACAAAATTGAAAACTTAAAATCAATAAAGAGCTGGTTCCTTGAAAACATCAATGATATTTACAAACATCAAAGC
+AGATAAAAAGAATAAAGGTCGGTATCAGGAATGAAACAGGGAATATCACTACAGATGTTTTCAACAAAAGGTGATGGCAC
+AATTAAACATCCATAAACTATAAAATGAACTCTAAGTTTCACATCTTATACAAAAATTAACTAAAATTTATTATCCACTT
+GTGATATGGTTTGGCTGTGTCACCACCCAAATCTCACCTTGAATTGTAGCTTCCATAATTGCCAGGTGTTGTGGGAGGGA
+CCCAGTGGGAGGTAATTGAATTATGGGGGTGGGTTTTTCCATGCTGTTCTCATGATAGTGAATAAGTCTCACAAGATCTG
+ATGGTTTTATAAAGGGCAGCTCCCCTGCACATGGTCTTTTGCCTGCCACCATGTAAGACATGCCTTTGCTCCTCCTTCGT
+CTTCCACTATGATTGTGAGGCCTCCCTAGCCATGCGGAACTGATGAGTCTATTAAACCTCTTTTTTCTTTATAAATCACC
+CAGTCTTGGGTATTTTTTCATAGCAGTATGAAAATGGGCTACTACAACTTGTAAACCTATAAAACCTTTAGAAAAAAAAA
+AACACAAGAAAAACTCTTTGGTATATAAAACTAGGCAAAGGGTTCTTAGACACCAAAAATGTGATTCATAAAAAGAAAAA
+TTGAAAAAGTGAACTCCACCAAAGTTAGAAAATTTTGCTCTGTGAAAGAACCTATTAAGGAGATAAAAAGACAAGGTACA
+AACTAGAAGAAAATATTTGCAAACCACATACATGTCAAAGGACTAGTACCTAGGATATATAAAAAGCTCTTAAATGAAAC
+AGTAAACAAACAATTCAATTAGAAAATAGGCAAAAGAAAGAGATATTTCACTGAAGATGTTATATTGATGGAAATTAAAC
+ACATAAAAAGAAGTTCAACAACATTAGCCACTTGGAAAAAGCAAATTAAAACCACATTGAGATATCATTACACATCTATC
+AGAATAGCTAAAATTAAAAAAGAATTAACAAAACCAAATATTGGAAAGGATGCAGAGATATCATATCACTCAAATATTGC
+TGCTGGGAATATAAAGTGGTGTGGCTATTCTGGAAACAGTCCCACAGTTTCTTAAAAAAGGAATAAACATGCACCTGTGA
+GGCTGGGTTCTCATGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCAGATCACAAGGTCAGGAGTTCAAGACCAG
+CCTGGCCAACATGGTGAAACCCCATCTCTACTAAAAATGCAATAAATTAACTGGACGTGGTGGTGGGTGCCTGTAATCCC
+AGCTACTCAGGAGGCTGAGGCAGGAGAATCGCTTGAACGTGGGAGGTGGAGGTTGCAGTGAGCCAAGATCATGCCACTGC
+AGTCCAGCCTGGGCAACACTGTGAGACTTGTTCTCAAAAAAAGAAAAAAAAATGCACTTGCTATATGGCCCAGCAATTGC
+TCTCCTGAGCATTTATCTCAGATAAATAAAAATTTATGTTCCCACAAAAACCTGTACATGAATACTTATAGCAGATTTAT
+TCATAATAGCACAAAACTACAAGGCACCCAGATGTTCTTCAGATGATGCATAGTTAAACAAACCATTCCATACAATGAAA
+TATTACTCAGCAAGACAATGGAAGGAGTTATTGATACATGTAATAATCTGAATGAATCTCCAGGGAATTATACTGAGTGA
+AAAGAAAAGCCAATCCCCGAAGATTTCACACGATAAGCCTTTCTTTATATAGCATTCTTAAAATGACAAACTTAAAGAAA
+TGAAAAACAGATTGCTAGTTGCCAGGGGTTAAGGACCAGGCAAGGGTGGAAGGGAAGTTGGTTTGACTATAAAAGTATAG
+CATGAATGATCCTTGTGGTGATGGAAATGTTCTGTATCCTGGCTGCATCAATATCAATATCCTTGTTGTGATATTACACT
+ACAGTGTTGTAAGATGTCACCACTGGAGGACACTGGGTAATTGGTATACAAGATTCCTTTGTATCATTTTTTATAAATTC
+ATGTGAATCTACAATTATCTCAAAAAGTTTATTTTAAAAAAATGTTAAGGCTAAAGTGAATTTCATATATGAAAGAGGTC
+AAGTGGAGAAATAATTGGAAAGAGCACTGGACGTTGGGTCAGGAGTTTTAGATTCAGGTTTTGATTCAATCAACTTAAGT
+AAATAACTCCTCTTGTCTAAAATTATTTGTATAATGCAATTGACAAGACCTGCTACAGTTATCTCCCAGAGTTGATAATA
+GGTAAAAATTATATTAATTAAGAGAAAAACTTTAAAAACATAAGAACCAAGGCTCTATATAAATATTTGGTATTTAAAAG
+CTTGCATTATATAAATGATGCCTCAAAGCCTATTTCTCCTGGTAGTTTAGAAATATAATTACCTGGATAAGACCACCAAC
+TAATTTCACTTTCACCGTCATTCAGTAAATCTCAGAAATATAAGCAAAGAACAATCTTGGACAAGAGAAAAGAAGAACCT
+GATCTCTTTTCCAGCCCTATGACTCACTGAAGAAACCAGGAATATGCCACGTGTTCTCTTTCTGCTGCAAGGGTTGCTGT
+GAAATAACCTCATTTAAGCTGTGTTGTACAGAATGGACCACCTTAATGCTGAGATTCCAACATTTTCTATTTCAGGACTG
+GTTTGGGGAGATTGCACAGAGTTCATCCTATGATCCTTCATGTGAGAAGGAAGCCATTCGGCCATTTATAGCCTAAAGAA
+GACAGAGCTAGAAGGGATAAAGCCAATCTTGGCTATTGCTGCTATGGGAACAGAGGGTAGAGATCATTTGCTCCATTGTA
+GAAATTTGGAACTACAGTTGACCCTCGAACAACATGGATTTGAACTGTTTGGGTCACATACATGTGGACTTTTTTCAATA
+AAAGCACCTGTCCCTCTTGCCTCCCTTTCCACCTCCTCCACCTCTTCTACCTCTGCCACCCCTGAGACAGCAAGACCACC
+CCCTCCTCTTCCTCCTCATCCTACTCAAAGTGAGAAGAGGATGAAGGCCTTTATGATGATTCACTTCTTAATGAATAGTA
+ATGATTTTCTTAATAACATTTCCTTTTCTCTAGGTTACTTTAGAATACAGTTACTTTAGAATACAGTGTATAATATATAT
+ATAATATATAAAATATGTGATAATTAGCTGTTTATGTTATCAGGAAGGCTTCTGGTCCAGAGTTGGCTGCTACTAGTTAG
+GTTTTGGAGGAGTCAAAAGTTAGACGTGGATTTTTGGCTGCACAGGATGTCTACACTCCTAACCCCAACCTTGTTCAAGG
+GTCAACTCTAATGGGAAGCTTTTGCAGCCATTAACTTTCAGTGATTTCAGTACACGCTAAACTGGCTTTTTAAAATGAGG
+GAATATATAAGCTCATTTAACAGGAAATTTAATAAAAGCTGGTTGATCCAGTACCCCAGTGTTATTACCTAGGACTTAGG
+TTCTCTACATTTCCTGTCTTTGCTATACACATGGTTGACTTCATTTTTAAGGCTGGTTCCCACCAGAGGACAATTGTGGA
+TTCATCCACTTTCATTCAAATCTGGGGAGAGAGCGGGAGAATAGCTTGTTCTTTAGAGCAAGGCAGAACTTTCCAAGAAG
+TCTTCCACGAACCTCCCTTCACATATCATTGGCCAGAACTGAGTCACAGGACTTTTTATAAACGAATCACAGACAAGGGG
+GTATGAGCTAACCCTTAACCCAATCAGAGTTACTCCTGAAGCTGAGGGTATTCCTTGGAGTCACATGGACTACATGAAGA
+CAGGGCACTTGAACAAAATTGAGGTTCTATTAGGAAGAAAAACTGAGGTTGTAAGTGCTATGTAAGTAGTTGGTGCTCAG
+TAATGCTGCCATCCTGTCTTAACTTAGCTAACAGGACTAAGTGACATATTTCTGAGGACTCTGATACTAAATGGTGTAAA
+TATAGGGTAGAGTTTGATTTTTTTAGTTTTGACTCTTTCTTTAAACTGACTGTGAAACTTCAGGAAGTCAAGTTATTGAG
+CCCTTGTTTTCTTACTTCTGAAAGGAAATAAAACTGTATAACTTCGGGCAAGTTACTTACCTTTACAATGTGTCAGCTTT
+CTCTTTTGTGGATTCCTTATCTAAAGTTTAATAGTACTTTACTCATAAAGTTGTATTAGTTTTCTAGGTTTTCCATAACA
+AAGTACCACAGACTTTGTGGCTTAAACAACAGAAATTTGTTTTCTCATATTTGGAAAGCTAGAAGGCCAAAATCAAGGTG
+TCGACAGGGTTGATTTATTCTGAGGACCTTTCTCCTTGGCTCGTAGATGACCATCTTTTCTCTATATCTTCGCCTGATCT
+TGCTTCTGTGTGTGTCTGTGTCCTATCTCCTCTTCTTATAAGGACACCAGTCATATTGGATTAAGGACTACTTTAATGGC
+TTCATTTCACATCTTTAAAGACCATATCTCCAAACAGGTCACATTGTGAATACTAAAGGTGAGGACTTCAACATATGAAT
+CTTGGGGTGATACAATTCCATGCTTTTGATGTAAGAATTACGTGATTTAATACATGTTAAGGATGTAAATCTATATAGTA
+GATGCATACTCAACATTAGTTATAAAATTATCTTATTTTCAGTAGTAGCAACAGATATAAAAACAATAGTATTTGAAGTA
+GTAATAGCATCACTAGCAGGGATAATAGCAGAACTGTCTGAGAGAAAAGGAGAAAGAATCCCTATGCAAGGAAGGGGGAA
+GATATACTTTATAGAGAAATGAGACACAAGAGAATAGTATTTGTTTCACACTCTTCCAGCTACTATGGCTGTGTAACATA
+GTATGTCAAAACTTACTGGCTTAAGTCAACAACACTTATTTTCTTCATGAAATCTGCAGTTTTGACAGGCCCAACAGGGA
+TAGCTCGCTCAAACTGTACTTGTCATCAGCTGGAACGTTCAGAGTCTCACTCACATGTCAATGCCAGCTGTCAGCTGGGA
+CCTCAGTTGGACACGTATGTGGGGCCTCTCCACATGGCCTGGGTTTCTGCACACATTGTTGCTGGTCTCCAAGGGTGAGC
+ATCCTGAAAGAGAAAAAGTGGAATTGTAGTGCTTTTTATGACCTAACTTCAAAAGTCACTAAAAATTCCTTTGGTTGCAT
+TCTACTCATTTGAGGCATTTATAAAGACTGTCAAAGTTCAAGAGATGGGGAAATAGACTCTTCCTTTTGATGGAGAGTGT
+CAAGGTCTGGAAGAGAATATGGGACTGCATTTACTGCTGTAGTCATGTTTGGAGAGTATAATCCTCCATTCATACCTAAC
+TTCCCACACGAAGCTTCTCTTTCTGGAGCTTACTTTCAAACACTTGGGTTTGTGATTTTGTTCCAGATCAGAATTGGATC
+CTTATGCTATGCTACCCAACTTCCAGAGATTTGAGGATAAAATGGCACATTTGGCAGAAAACTTTTGAGAAGATCTTTAT
+CAAAGAGGGAACTCTCTAAATAAAGGTTCAGTGTGCCTCATAGTTCTCTTTCAAACAGGAACATACAGAAAGGGGGTTGA
+CATTTTGATTTCCCAGATAATCTTTTGTTTTTATGATTTTGGATATAAACATGTCTCAGACCTGAACAAGGAACTCAGAT
+TTTTATCCTTAAAATTAAATCTGAAGAATAAGGAGAACCACATGCCCTTATCAGCTTTAATATTCTAAGAAAATGAGAAA
+AAAAATGATTTAAGAACATTTAAAAGTTGTGCACTGGGTTCATCAAAGATTCTCCCATTGAAGAGAAAATCTGAGCTGTT
+TCTTGCTTTATATTTGAATGCTGTGCATGGTTTTCATGATGGCAATAATAATTTACTGCAAAATGCTTTTATTGAGTTGC
+CTTTACTGAAGAGGCCCTGCATGCCTTTTCACCAGCTTGCTACAATTAAAAGCAGCAGTAAACAGAGCCTTTCTATTTAG
+ACTGAAAAGCAATGATTAAGCTTACCCAGCTATAAATAGGAGGCAGCTTCACATCTATGGCCCCAACTGCACACATTAAA
+GACTCCAGCTCCAAATGCCCCCAATTTATTTCTACAGGACAAACCAATAAATTGTCAAACTCTGACCTTAAAAGTGACAA
+TTTCTTAAAAAATGAAAGTAGGTAAATTATGCATAATTATAGATAATTACTGAGATGTCTATGAGGCCAGGAAATCAGTA
+CACATGCTGGAAATCTAAATTTGCACGCTGATAGCTTCTCCTTTTACCATTATCAAACCATGGTTTGCCTTGAGCAGCTG
+TCAGAGATGGCTTTTCTTTGCTTGGGGGCACAGAAGAAAATAAGGGACTTAAGAGAGAAGATGGAAAATACAAAGTTATT
+TTTTAAGATTCCCGTTAGTTCAAGTGATTCAGCAAGGAAACCAGAATATAGGCAGTTACTGTTAAACGTTTTTGAATAAA
+AATGTTTAAATGCAAGCCATTTATCCCACCTATAAGCTGTAGCTATCTGAGTGTTTAGAGATTTGAAGGCATTTGATTCA
+ACCTCCTCACTTTCTGCAGCAGGAAACTGGAGGCTAAGAAAGCCCTGTGAATTGTCCAAGGTCACATGGCTTGACAGCAG
+CCGTGCTGGAACTCAGTAGCTTTTCTGACTGTCACTGCAGCATCCACTGAATGGAAAAATTTGTTCCCTTTTTATTATTT
+TTAGTATAAAACCCTAGATTAATGAGATCTAAGTCATAAAAGTCCATAGGGTGCTGTATTTCTTTGAGTGCAAGACAAGC
+TTGCAAAGGTGACAGGATTTTCCCACTTGAAGAAACGTGGTCCCCTCTGCCTCAGCATCCTCCATCTGAGCCAAGAGTTG
+CCAATTTCCAAAAGCCCAAGAAAGTAAGTTTCCAAAATATGTCTTGGGTGATCGTATCTTTTTTAAACTTGTGAGTCCTC
+TTCATTTCGTGTTTGCATGTGGCCTTTGTCTCCCAGTGATTTTCCAACTGCCCATGACTCCCCTTGGGTCATGCGCTGGA
+TATTCTCTGTTGCCTTCTGTCACCTTCTCAACTCTGTGCCCAGGAGACTGGCATTTATGTATGTATCAATGTCACCCTAG
+TCCTTTGGCTATAATCAGGCTGAGCTATCAAGGATACTAACAGAAGACCGGATAGCAGGGAGATAAAGAAGCCACAGTTT
+CTATTCCCCCAGTTCACTTCCTGCAGAATAAAGCACAGTTGCCCAGCTGAGCCCAGCCCACATTACCAAACCCCAGAAAA
+ATTAGTGAGAATTAACAACTGCTGTACTAAACTACTCCATTTTTAGGAGTTTTTTTAAAATTCAGCATTATCTGACTGAT
+ACAAAATATGACACCTGCAAAGAATTTAAGACAACATAACTAACTTGAAATAGAGATCAATAGTTCTGAGACCCAGAAAT
+GGAAGTGAACTCAGACTGGGATTGAGTTTGCAGATTCACTTGACTCCACGTCCAGAAGCAAATGGAGACATCTGGGGCCC
+TGTGGGGTAGCAAAACTAAAAGTATCCCATGGGAAGTAAGAGATTCACTCTTCAAACTCATTGCTTGGAACTAGTAACAA
+GTTTGAGGTTCTCTCATGCATTAAAGGGGGCTGAAATAAGCTCTTAAAACCACCGTCTGAACTATAAAACTGGATAGCAG
+GGAAGCCGTAGAAAATAAAGTCTTTTATCCTGAACTTAGGCTTAACTGCTGACTTGGTAGCTGAATGTGCGAAAACCTCA
+ATTTCAGTGCATTTTGGGAGCCTTGAGCTACCAATATAAGACCTGGTTTTTGACTAAATGTTATTGAGAAATGGTGGAAG
+AAGAAATTAAATATAAATCTGCAAGACTGTGAGACATAAAATGGCGGAGAGAGAAAGAGACAGAGAGAGAGAGAGAGAGA
+GAAAGAGAGAGAGAGAGACCTCCATTCAATAATAAAATTCCAAAACACATAAGGAAGTAATGCTAAGAAAGACAGCAACA
+AAATAAACTAATAGAACTGAGTTCACTCCAGATGAAATGGAAATAATAAAATAGACTATTACCCACTAGTCTTCACTAAA
+ATAACTGCTGAAGAATGTATTTCAGGCAGAAGAAAACACCCTAAGAGATATAGAAGAATGGAGCGGAATGCAGAAACAAC
+AGTACAGAATTTCGTAAAATATGATGATAAATTTAATTAACTATTGACTCTTAAAAAGAATGGTATTTTTCACATCATGA
+GATAAAATTCAAATAACACCAAGAAAGACATAGATGGGAAAGATATTATGTGTGTAATAAAAGCATACTTAGTTATTGGG
+TTATTTGGGAGAACAGCAGAGATACTAAATCACTTAAATTCTGTTCAGAATTTTTTTTACTTAAAAATGTTAAAATGTTA
+AAACTATCAATCAAACAGGAAGAAAGAAAAGAGGTAAAATAAGCAAAAAAAAAGAGGATGGTAAACATAAAATATATAAT
+AACTAAGTCCAAATACAGCAGTAATCAAAATAAATGTAAAGGAACCAACTTACTTATAAGAGCAAGATTATCAGGCTAGG
+TATTTAAAAATCCAACCATATGTAGTTTATAATAAACACACTTAAAACATAATGGTCCACAAAGGTTGAAAATAAAGAAA
+TGGAAAAAAACAAGTAATTCAATCACTAACCAAAAGAAAGCTGATATGGCAATATGAATGTCAGATGGCACTGGATCTAG
+GCCAAAAAGCATAAAAAGAGACATTATGTAGTGATAAAAAGAACAGTCCTGGCCAGGCGCGGTGGCTCACGCCTGTAATC
+CTAGCACTTTGGGAGGACGAGAAGGGCAGATCACGAGGTCAGGAGATCAAGACCATCCTGGCTAACATGGTGAAACCCCG
+TCTCTACTAAAAATACAAAAAAGTAGCCTGGCGTAGTGGCGGGCACCTGTAATCCCAGCTACTCGGGAGGCTGAGACAGG
+AGAATGGCGTGAACCCGGGAGGCGGAGCCTGCAGTGAGCCGAGATAGCGCCCCTGCACTCCAGCCTGGATGACTGAACGA
+GACCGTCTCAAAAAAAAAAATAAAATAAAAACCAGTCCTGTATTTGAATGGCTAAGATTATGGCTTGAAATATAAAAGAA
+AAATGCTGACAAAATTACAAAACAAAATTGAAAAATCTACTATCTTGTCTGAGATTTTAAAACACATATCAAAAGCCAGC
+GGAGTCAGCAGAAAACACCAACTTGTATTAAAAAGCTGGAAAATAATAAACCCACTTTATAGATGTAGAAACTGGGGCCT
+AGGCAAGTTGAGTGATTCATGCAAGATCACAAAGTTTGTATGAATTCAAGCCAGGACTAAAACTCACACTTTCTAAATAG
+TAATCCTATGCCCTCTCCATTTTATAACATTAACACATGTAGAATAAACTGCGTATCTGCAGAAATGATTAGCCTTTTCT
+ATGTCCTTAAGAGAATCCTTAGTGTGGTAGCATATCTGTCAATGTTCCACTCGGTTTACCTTGAATTTCTTTTACTTTTT
+CAGTGTGCCTTTTCTTACCTCCTACATATTATGTTTCTAATTATCCATCTCTGTGATTCTCTATGGAGAACCACCCTTAG
+GCCACCGGGGTGTCTACTGTGAAACAGAAGTTCCTGGGATGTTATGCCCTCAATCACGGCCCTTATTCAATGACTGACCG
+GTGTGGGAAAATGGAAGCCCAGCTCCTTTGCCTTAGAGTTTGAACAAATCTAAGGTATAATTTTCACTCCGAAGCTCCCC
+CGTGGGATCAGGCTGAGGCTGAATCTCTGCTTGGCTTCTTCCCTTATCTTTCCTGCTTTTTACACCCAGTTCTACCTCGA
+AAATTTTCCTCAATAAATTGAGTTGCATGTGATTTCTCATCTCAAGGTCTGCTTCTGGGAACCCTTTCCTAAAATGAGGA
+CCATACAGCTCTGTACTAGGAGTAAAAAGAGAGTAGAAAGTGGTTCTACAGAAAACCAAATACAGCATGTTCTCATAAGT
+GGGAGCTAAATGATGAGAACACAGGGACACAAAGAGGGAAATAACAGACACTGGGGCCTATTGGAAGGTGGAGGGTGGGA
+GGAGGGAGAGGATCAGGAAAAATAACTAATGGGTACTAGGTTTAATACCTGGGTGATGAAGTAATCTATACAACAAACTT
+CTGTGACACAAGTTTACCTATATAACAAACCTACACAAGTAGCCCTGAACTTAAAAACAAACAAACAAACAGGATTATTC
+CATAAAAGTATGTTCTCTATTTAAAAAAATTTGAAGTAAATGAAAGTCACCCATAATCCCACTATTTGTAAGGCTATTTT
+GGTCATTTACCTAAGTATATATACTGTACAAGTAAAAGTGTGATAATGCCAAAAAAAAAAAAAAAAGTAGTTCTAGGGAA
+AGAATATTTGAGTGGTTAAGGTTTCCTGCTGCTTTTATCTTTGGGAAATTCCAGTTTGATTTGGGCACTAGGAACCTGGC
+ATAGTGAATGTAGTGTCTGCCAAGCTAGAAGAACACTGCTGCAACTTCAGACTTCAGGTCAATTACTATTGACTGGCCAA
+CTTTCTGTCCCGAGCATACCCTCTTGGGTCCAATTCCAGATTAATAATGGCCCAATTCCAAATCTGGAGGATTCCTGGCC
+TCTCTCTCTCTCTCTCTTTCAATGCAGTTGCTTACTTCCCTTAAGACTGGCTCTGAATATACAAGAAGGCAACTCCGGGA
+TTCTGGCCTTTAAGATGCAAGACCATGGGTAACCATACAAGCAACATCATGGGCTTTCCAGTTGGTTGAGCCATTATTTC
+AGCCTCTACTTTGTATCTCATAGCTTCACATTTCTTTGCTTTCCAAGTCTGGATTTAGAGGTGGGCCTTGTCTAGAAAGC
+CAGGTCTACCTTTTTTCAGGCAAAAAGTTTCATTAACCACTGTGGCTTAGAATAATATTTATGTAATAAAATATTTACTA
+GGTATTGAATGCATGCTATATGCCACTATACAGATGAAGAAATGGAAGTTTAGATCTGAAGATTAACTTGCCCAGATTTA
+TGTGGCCAGCTCTTGCCACAAACCAACCAAGGATTTAGTGAGTCAGACATGAGCCTGACTCTGCACGTATCCTACAAGTC
+ATAGCATTCCCTACCCTGGTTTTGCTGACCCCAGGCTCTAGCACTGGATTTATCCTTGGTTCTCTTACTGAGTGCTCTAG
+TACTTTCCTAGTTGAGCCCCTGACACCCATCCTCTGTCAACTCATTTTCCCCAGGAGCTGACACCTGCTCCTCAAAGTAT
+GTCTTCAACTTCTACCTGCCATTTTTCACAGAGACACATGGAGAATTTGCCCCAAATACACATCTCTAGGGACTTTCTGC
+TTCAATGCCCTGCCCTCCTCCTGCCAGACTGAACCATGGCAATGGGGCCTAAAATTCTCAATGCTATCTTAGGTTGGGTT
+CCCCAGAAGCAGAACCCAAGAAAGGGAAAATCAGGCACACATTGCTTCAGTCCTCTTGAGGAAAGACTCTCAAAAGAAGC
+ATCTAAAAGAAGAAAAGCAGACAGGACAACTAGGGTAAGAAGCCAAGCAAAGATGCGTGCTCAGGAAAAATCTAAACCTG
+GCCTGATATGAGAGAGGATCTGGAGCATAAATTACAACACTGACATGATGGTCCTCATCATTAAGACAAGGTGGTTCGCC
+TTTTGTACTCCTGTATCAGCCAGTCATTGCCATAGACTTCTCCTGATAAAACCATTTAGGGAGTGTAACTTTCTGGGCAT
+TAACTTAGGGTGATACTTGGGGAAGAGGACCTCTGTTAGCTTTTAGCAGTCAATACAGCAGCTGGAGGCTAGAGCCCCTG
+CTCATCCACCAGCATCTACTACAAGTGCCTGGGCCTTGGGCAATGAACGTGGGACTTCAGCAATCAGATCCCTTTTCTCA
+TCCCACCTTCCTGCTCCTGGCCTCCTCCTACTCCCCATCATCCTGTGTGGCTGGGGCCCCCCCATGCCATGTGCCTATCC
+ACAGGCTCCTGGATTTTAGTACCTGCTCAGAGTCCACTGCATTGGCGCCTGACACTTTATTAGTGTTCTAAAGCTTTGTT
+TACAGTTACCAGGACTCTCTAGATAGCAGCAAAAAGTCAACAAGGAAATAAAACTCCACCAGCTGGTGTATTTGTTGGGG
+ATAAACTCACCACACAGGAAACAGCTGGGTACATGCTCACAAACGTCAGTACCTTGCTGGTCCCAAAGGCTCATCTGAAG
+TTGCAAGTTTGAATCCCCCTTTCCAAGCTGGTCTGTTTATACGCACTCCTCCACACCTTCAGTTCCTTTACACCATGTGA
+GCCTCAGGAAGCTGAGGCAGTAATGAGCTTTGAAGCTGTTCTACTGCCTGTGACAGCCCACAGGCCAGTCATTTGATCCA
+ATTGCTTGATGCTACCTCCAGCAGCCTCTTGGCAAAATCTCTGCCATTATATACCATTGTTTCCTTACTCTAAAGCTCAC
+ATCTTCTCATGCTGAAGTCTATATGCTTCCTAGTGTATGAAAGTAACACTTTAATGTAGCCCTGAAGAAAACTGTCCAGC
+CCATCTGGACATCTGCCAGTCACTAGGAAGAGAATAGATTTTTCAGGCACAAATGTTAGCATACTAAGGAGTAGAAGGCT
+GAAGGCCATCTTCACTGACTTTATAGTCAGTGCGTTCTTTGTTCAGCAAAACTTAGCTCCTAGTCCTGAGCCAATTCCAA
+AATGCTGAATTTCATCCTTCCACTACTTTGAAAGCATTTAAGGTCTAATGAAAGGGATTAGGCCTGGAATTGGTGCCAGA
+AATTAAAATGAAGGCATAATTTGCATGTTTTGACAATACCTTCACACTTACACCCATGCCATGCTTTCTTAAACACTCTT
+GAAAATATGTGAGAATCAAGATTTCATAATCCACTTCTTCTGGAATGCCATGTAGAAACATTCTCATTTTGCTTTCTCCC
+TGCCTTCTGATTTTACTCACAGGCTTAATAATCACTGTAAGGATGGGGTAGCCATAAACAGCAGAGCATTGAGCTTTAGT
+GCACTCAGATGTTTGTGTATTTCCCCATTTACCTTGTATATAAAGCCATGCTAAGATTCGCAGTTAATGCTGGAGCTCAC
+CTTGAAAATAGCACCATCTTTCTCCTGAAGATGCTCAAACTCCAGGTAGTAACCTCAGGTTGTATCATAGCTCCTTCAGG
+GTACCTCACTGTTTTTAATAAGCACATGATAAAGATAATGTCTTCCTCGATTCTAGTGGTATTTCTTAACAAAAGCCTAA
+GCTGCACCAGGATGAGGTGTTGTAATATTTTCACAAGTGTTAAAAAGGATTACTGCTCTTCCTAATATTGGTCAATGTGC
+CCAGATGGAGACTATTGCTTTTGCGCATAACGTCTAAGCTTCTTTGCCAGCGCACAATTGTTTAAGAAACCACTTTTCTT
+GCTATGCCCTGATAATCTTGCCTCTTGAATAACTCATTTCCATCAAGAGCAGGACAGAATCAGAACAGCATGGACAAGGA
+TAATCTCTAAAATGTGGTTTCAAATGGAGAAGTATGATATGGTTTGTCAGAGGCCTGGAAGGTGCACATTGTTTAGTAGG
+TTGACCCAAAGCCATCCTCAAGAGGTAGGATGGGTTTTTTTGTGTGTGGTTTTTTTTTTTTTGGTTTGTTTGTTTTGTTT
+TGTTTTGTTTTGTCTTTTGAGACAGAGTCTCACTCTGTCACCAGGCTGGAGTGCAGTGGAGCAATCTCAGCTCACTGCAA
+CCTCCGCCTCCCGGGTTCAAGTGATTCTCCTGCCTTAGCCTCCCGAGTAGCTTGGACTACAGGCTCACGCCACCACATCC
+AGCTAATTTTTGTATTTTTAGTAGAGATGGGGTTTCACCATGTTGGCCAGGATGGTCTTGATCTCTTGACCTCATGATCC
+ACTCGCCTCGGCCTCCTAAAGTGCTGGGATTACAGGCATGAGCCATTGCACACGGCCTAGAAGTAGGATGGTTTTAAGAG
+CACTGCACTTTCTTTGACACTTCTACCCCTGAGAGTAGGGTCTATGTTCCCACCCCTTGAACCTGGGTGGGCTTTGTTAC
+TGCCTTGATCAATAACAAAGTGCAGCAGAAGTGACACTATAATTTCCAAGGCTGGGTCTGAAGAAAGTCATGTAGCTTTT
+TCCTTGTTCTCCTGGAACACATACTCTTTGTATCTTCAGCCTCCATGTAAGAAATTTTACTGTCCCGAGGCTGCCCTGCT
+GGACATACTAGGTGGAGAGACCACTAAGAGAGAGAGAGATGATCAACAAGCCTTAGCTATTCAGGCATCATACATGGGAG
+CGACAAAAGCTTCAAGATGGCACTAGCCCTAGCTACTGCTCAACTGTAATCACAGAGAACCCAAGGCCAGAAATGCCCAG
+AGGAGCCTAGTGGACCCCAGAACTGTGAGAGATACTAATAAATTGTTATTGATTTAAGCTACTGTGTTTTGAAGATGTTG
+GTTCCACTGCAATACATAATGGGAACAGGAGGTGCATTACACATAATATAATGTACAATTCAACCCAAAGGACTCTCCTA
+CCCACCACTGGATACATGTCTATGAGAGTTGGCACTGCTGTGGGTGCACAGAGCCAAGGCCATGCCTCACAAGACAGTAG
+AGTGGGTGGATGTCTGCATGCTATGATTTGAGAATGACTCAAAGTAATCTCCCTCAGGGGGCTGCTCAGGTCCCTGACCT
+CTTCCACAGTAATTGCCTTGAGAGGATATGCAATCTTGGAAACTTGCCAAATGTTAGAAATCCCCTTGTTCAGTGGGTTT
+CAAACTAAGAACACTCAGGTTTTGAGAAGGGATTCCAAGGAAGTACAGTGAAAGCAAGCCTTCTACAGACACAGTATTCT
+CTCTCTCTGGCCTCAGAGGGAGGATTTCTTCCCAGCCTCAGGGGTGGAACTGACCCAAGGTGCCCATCAGTGCAACACAT
+TCTTCTTGCCATTGTGATTGGTATAGGGCTGGACACGTAGTCCACATCAGGCCAATCAAAGTGAGTATCAGGACAGTCCC
+AGGGAGTTCTGGGAAAACAATGTTCTCCCTTTTTCACTAGATTGGAATTTAGAAGTATAGTACTTTGAGAGCTGCTAGCA
+ACCATAGGAAAAGTGGCAGAGAATCCTAGGAATGGAGTGGAAAGAGAGGAAGTAGATCTCAGAAGAGACAGAAAAAAAAA
+GTTGATCCTAACGGCTTTCAACCTGATATTCAGCTGTTCTTGAAGCAGATCTCCCCTGGACTATTTTGTTATGTGAGCTC
+ACAAATTTATTTTGTTTAGTGATTTAAGCAACTAGAATTTTAAAAATCAATGTGTACATATATGCTTAGCTTCTCTTGGA
+GTTTGTGTTTTAGCAGTAGAAGAAAATGATTTCTGCTGCTAAAAACATGTTTGGAAATAGCTGATCTTATCCTAGTGATC
+TAGTCCTTTTTGTTGTGCAAAAGGAAGTGCTAAAATCAGAATGAGAACCATGGTCACCTGACATAGACACAAGTGATGAT
+GATGATGATGATGATGATGATGATGATGATGATGATGATATCCATGTTCAAGTACTAATTCTGGGCAAGACACTGTTCTA
+AGTGCTATGAATATATTACCTCATTTAATCATCTCCAAAACATTATGAGATATTCTGTTACTATCACCATTTACAAATGA
+TGGGATTGAAGTACAGAGAGGATAAATAGCATCCTAAGATCACACAGCTAGTAAGTTGTGATGCTAGGATTGAACACAGC
+TGTTCTCTTAGCCATACCCTACACCCCTCTGACAAGGTAGAGTGCCTTCTCATTGGCCCCTAGGTTCTGGGCTCTCTTTC
+CACGATGCCTCTATTCTTAAAGCATGCAACCCAGAAGGTTCCTGTTAAATGTAAGTGAGAGAATTTCTATCACTGTGGAC
+CTGCTTCCATCATCTGGTTTGTGGTTTTATTCTAACCACAAAGTATATGACTTACTCATGTCTGCTTGGTTTTGACTCTT
+TACTAAGAGTAAGGGAATGCAGTGATCAAAGGGAAGGAAGATTTTTAACTTAAGTTGTCAGCTTGCTTCTGCTTCCAAAA
+AAGTTTCATGCTTAAAAATATGGCACCATGCTTTATTCTTCAAAAGAAGAAAAGTCAAATTGTAGCTGACTCCAACTCCT
+TGTTTTACTTAGGTGGAGGAAGATTCCATCTCAGTGAGTATGATTTGTTCCTGCCCACCCCTCAGCTCCCAAAATGGCCC
+AGGAAGTTGTATGCAGCATAGAATTTGATGTCACTGTTATTTGGTCAACTTGTATGATCATTGTTCAGATTTGTGTAGTC
+CCTGGAAAGAAGGTGCCTCTGACAGTTCTCTGTCATTTTGGGGTACAGGGCACCTTGTTATATGGCTGCTTCCCCGGGGC
+CTAGTGTCAAGCTTCTCTTCGTGGGTGTTATATATATATCCTATTCTAAGAAGATGCTAAGGTGTCAGATTGGAAGCCAC
+CATCCCTCTCTTTGTCTCCATTAATCTCCACCTCTCTGCCTTTGCTATTTCCGCCCATCCCTTCCTTTCAGTGAAGAACA
+CAGATCCCCTCACTGCTCTGCTCCATTCTTTATAGCTCTTTGCATATTGCATGGGTTTTGCCTAAATTAGAATGGACCTT
+AATAGAACTGTGGGGCTTTTAAACATTCAAGCATCACTTATTTTAATATCCCCCTAAATACCTTTTGAAAAATCTCTTAG
+AAGTCCAACCGATTTAAGGGTTAGTGTGCATTTTAATTGACCATTCAATTTCAAATAAATGTGGAGGAAATTACAACCCT
+CTTAACAGAGAAACGACAATTTAAGGAGAATGAGACCTTGATTGAAACAATTAGGAACTTAGAAACCAGACTCTGATGTG
+GTCAAAGAAAACAGCCCAGGAATTTATAAAGCCTGCCTTATTTTGCTTTGAAGAAAAACTTGAAAACCATAAAACAAGGT
+GAGATGTTATCCAAAATTTGGAAGCTAATTTAATAAACAGATTAATGCAGTTAGTTTTGTTTGGCATTGAGAAACATTGG
+TTAACCAAAATTTTATTACAAAAATAAAATAAAATAGTTAGTTCTCACCCAGCCTATGGACTTGAGGCTTAGAAGAAAGA
+CATTTTAATCTCCTGTCATCTATTAACATATTTTTTATCAAATCACATAATCAGATAAATTCTTTTTTTTTTCTTTTTGA
+GATGGAATTTCGCTCTGTCGCTAGGCTGGAGTGCAGTGGCATGATCTCAACTCACTGCAACCTCCCCCTCCTGGGTTCAA
+GTGATTCCCCTGCCTCAGCCTCCCAAATAGCTGGGATTACAGGCACACACTACTACGCCCAGCTAATTTTTGTATTTTAG
+TAGAGACGGGGTCTCACCATGTTGGCCAAGATGGTCTCGATCTCCTGACCTCATGACCTACCTGCCTTGGCCTCCCAAAG
+TGCTTGGATTACAGGCATGAGCCACCGCACCTGGCCTCAGACAAATTCTTGATCAGATGGGCTCCCTAAGAACACTCTTT
+TCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTTTTCACATTTTTTTCCCGTAGGTTATT
+GGGGTACAGGTGGTATTTGGTTACATGAGTAAGTTCTTTAGTGGTTATTTGTGAGCTCCTGGTGCACCCATCACCTGAGC
+AGTATACACTGAACCCTGTTTGTAGCCTTTTATCCCTCATCCCCCTCCCACCCTTCTGCTAAAGTCTCCAAAGTCCATTG
+TATCATTCTTTACTTCGATTTTTTGTTTGCTTGCTTTGAGACAGAGTCTCACTCTGTCGTCCACGCTGGAGTGCAGTGGT
+GCAATCTCAGCTCACTGCAACCTCTGCCTTCCAGGTTCAAATGATTCTCCTGCCTCAGCCTCCTGAGTAGCTGGGACTAC
+AGGCATGTGCCACCACTCCGGGCTAATTTTTGTGTTTTCAGTAGATACAGGGTTTTTACCACGTTGGCCAGGCTGGTCTT
+GAACTCCTGACCTCAGCTGATCCACCTGCCTCAGCCTCCCAAAATGCTGGGATTACAGGCATGAGCCACTGTACCTGGCC
+CATTGTATCATTCTTATGCTTTTGCATCCTCATAGTTTAGCTCCCACATCTCAGTGAGAACATAGCAATGTTTGGTTTTC
+TATTCCTGAGTTACTTCACTTAGAATAATAGTCTCCAGTCTCATCCAGGTTGCTGCAAATGCCATTAATTCATTCCTTTT
+TATGGCTGAGTGGTATTCCATTATATATATATATAAAAAACTGGGTATTTGTGTGTGTGTATATATATTATATATATAAT
+ATATATAATATATATGTAATATATATAATATATATATTAAATATATATAATATGTATAATTTATATGTAATATATATAAT
+ATATATATTAAATATATATAATATATATAATTTATATGTAATATATATAATATATAATATATATAATATATTATATATAA
+TGTGTATATATATATAATATATATAATATTATATATATTATATATAATGTTTTATATATATATAATATATATAATATATA
+TATAATATATATAATATATAATGTGTGTATATATAATATAATATATAATATATAATGTGTATATATAATATATATTATAT
+ATATAATGTGTATATATAATATATATATTATATATAATGTGTATATATAATATATATTATATATAATGTGTATATATAAT
+ATATATTATATATAATGTGTGTATATATAATATATATTATATATAATGTGTATATATATAATATATATATTATATATATA
+ATGTGTATATATATATATATTATATATATAATGTGTATATATATATATATAATATATATATATATACACTGAGATTTTTT
+GTTTTGTTTTGTTTTTTGAGATGAAGTTTCACTCTTGTTCCCCAGGCTGGAGTGCAATGGTGTGATCTCGGCTCACTGCA
+ACCTCCCCCACCGGGTTCAAGTGATTCTCCTGCCTCAGCCTCCCGAGTAGCTGGGATTACAGGTGTCTGCCACCACACCC
+GGCTAATTTTTGTATTTTTAGTAAAGACGAGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGATCTTCCGGTGA
+TCTGACTGCCTCGGCTTCCCAAAGTGCTGAGATTACAGGCGTGAGCCACCACACCCAGCCTGGTTTTTTTGTTTTGTTTG
+TTTGTTTGTTTTTTGTTTTTGGGGGATGAAATTTCACTCTTGTTGCCCAGGATGGAGTGCAATGGTGAGAGCTCAGCTCA
+CTGCAACCTCCGCCTCCTGGATTCAAGCAATTCTTCTTTCTCAGTCTCCCGAGTAGCTGGGATTACAGGCACCTGCCACG
+ACACCTGGCTAATTTTTTATATTTTAAGTAGAGATGGGGTTTCATCATTTTGGCCAGGCTGATCTTGAACTCCTAACCTT
+CAGGTGATCCACCCACCTCGGCCTCCCAAAGTGCTGGGATTACAGGCATGAACCATGGCACCCAGCCTATACCAGCTTTT
+TATCCGCTCGTTGATTGATGGGCATTTGGGTTGAGAACACCCTTTTCTATCCGTCCATTTCATTTATTTCCCATTTCAAA
+AATTACTCCATAAACGTGATCATCCCCATAAACACAGGATAAACACTTGACAAAATTCAGTGCCTTTTCATGATAACAAA
+TAGTAAAGAAGGAAATTTTCTTAACTAATAAAAGTCAATACACAAAATTCACAGGTAACATCATACTACACAAAATTGAA
+AAGATTCCCCACTAAACCACGAACAAGACAAGGAGGGCCACTCTTACCATTTCTATTAAACATTATACTGGAGCTCCTAG
+CTGGGGAATTAAGCAAAAAGAAGTAAGAAAAAATGGAAATAAGTAAATGACATCCAGATTGGGAAGGAAGAAATAAAATC
+ATGTCTATTTGCAGATGGCATGATCTTCTATGTAGAAAATCCTAAAAAATCCATGCATACACACAAAAAGCTATTATAAC
+TAATAAATGAATTCAGCAAGACTACAGGATAAAACATCAAAAATCAAAAATCAATTCTCCTTTTGTACACTAGCAATGAA
+CAACCCAAAAATGGAATTAAGAAAGACATTTACTTTATAATAACACCAAAAAGAATAAAATACTTAGGAAAAAATGTAAG
+CAAAGAAGTGTGTGGTCATTTATTTTTTATGTCCTCTGGACTAGGCCTTGGGCTACCCAGACATTTGTTCAAGCATTATT
+CTAGGCATGTCTAGGCATGTTTGTGAGAGTGTCTCTGGAGGAAATCAACATTTGAACCAGTAGGCAGAGTAAAGCAGATT
+GCTCTCCCTAACGTGTGTTGGTTTCAACGCATCAACTGAAGACCTCAACAGAACAAAAAAGCTGAGTAAAAGGAAATTCC
+TGCCTGACTACTTGAGCTCGTCTTTTCTGGCTTTTGGACTCAAACTGAAACATCAGCTCTCCTGGGTTCTCCAGCTTGCT
+GACTGAAAATCTTGGAACTTCTCAGCCTCCATAATTCCATGAGCGAATTCCTTATAATCGATTATTCTTTCTTACATAGG
+TACATATGGGGTGTGTATATATATAAATATATGTATATATATACACACATATATGTATACTTCTAGTTCTCTTTCTCTGG
+AGAACTCTAATGCAAGATTATAAAAAAAATTACTGAAAGAATTCTTAAAATACATACATAAATTGAAAGTCATTTTATGT
+TCATGGATTGGAAGACAATATTGTTAAGCTAGCAACACTCCCTACATTAATTTACAGATTAAACCCAAGTCTTATCAAAA
+TCTCAACTGCCTTGCTTACAGAATTTGACAAACCTATCCTCAAATTCACATGGAAATGTAGTAAACTCAGAATAGCCAAA
+ATAGTCCTGAAAAAGAGGAATAAAGTTGGAAGACTTACACTCCCTGATTTCAAAACTTACTACGTAGCTACAGGAATCAT
+CCTGAATACTTTGTACTGGCATAAGGATAGTTATATAGATCTATAGAACAAAATTGAGAGTCTAGAAATAAACCCTTACA
+TTTATAATCAATTGATTTTGGACAAAGATGTCAACAAAATTCAGTGAGGAAAGAATAGTCTGGTGCTGGGGTGACTGGAT
+ATCCTCATGCAAAAGAATAAAGTTGGACCCTACCTCATATCATATACAAAAATTAACTCAAAATGTAGGAGAGATCTGAA
+TGTAAGAACTAAAATATAAAAATCTTAGCAGAAAACCTAGGTATAAATATTCGTGATCTCATATTAAGTGATACTTTCAT
+ATGTATGACACCAAAAGCACAAATCAAAAACTTTTGTGTGTCAGAGAACATCATGAAATAAGTGAAAATAAAAAAACCAC
+AGAATGGGAGAAAATACTTGCAAGCCATATATCTGATAAAACACTTGCCGTATAAAGAACTCTTCAAAGTCAACAAGAAA
+ATGACAAACAACTCAATTAACAAATGGGGAAATGATCTGAATGAACATTTTTCCAAAGAAGATATTCAGATGGCTAATAA
+GCACATGTAAATATCTTCAACATCATTAGCCACTATGCAATTATAAATTAAAATTACAATGATATATCACTTTGTATCCC
+TTAGGATGATTAAAATAAAAGACAGACAGTAACAAACATTGGCAAGGGTAGAGAATTGGAACCCTCATATATTGCTGATG
+GTATTGTAAAATGGTGTAGCCATTTCTGAGTAACAGTCTGGCAGTTCTTTAAAAGTTAAAAATAGAGTTACTGTTTGTGT
+GTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTATTTGAGACAGGGTCTCACTCTGTTGCCCAGGCTGGAGTGCAGT
+GGCGCGATCTCAGCCCACTGCAATCTCCACCTCCCAGGTTCAAGCAATTCTCCCACCTCAGCCTTAGAATTACTGTTTGA
+CTCAGCAATTCCACATCTAGGTAATAATCACCAAAAAAGTAGAAACAACCCCAATGTCCATCAACTGATTAACAGATACA
+CAAAATGTGGTATACTCCATACGGATTATCATTTGGCCATAAAAAGGAATGAAGTACTGGCTTATACATTCTAAACATGA
+ATGAACCTTGAAAACATTTTGCTAAGTGAAAGAAGCCCATCACAAAAGACCTTATATTTTATGATTCATTTATATGCAAT
+ATCCAGAATAGGCAACATCATAGAGACAGCAAGTAGATTAGTGGTTGCCAGGCAATCGGAGGGGAGAGAAGGGAGATAGC
+AAATGACTGTTAGTGGGTACGGAGTTTCTTTTGGGGGTAATGAAAATGTTCGGGAATTAGTGGTGATGGTTGCACAACTC
+TGTGACTGTACTAAACAGTATTGAATTACACACCTTAATATATTTTATAATATATGAAGTACATCTTGATAAAGCTGTTC
+CAAACACTTTATATAATATATGCTAAATATATATTATATAACAATATAATCAATATAATATATATCTCATATTTATTATT
+GTGTATTATATATTGTATAAATGTATATATAACTATATAGTATAAAATTATAACAATTGGACATGTTATAATTGTTATGA
+GCCCTATATATATCTCCTGTTGCAGCTTAAAAGCTGGAAGTCCCTGAATTCATTTGAAACCTAAAATAATTTCCTTCTTA
+CTTAAATATGCTGCCTTAACATCCTTTAAAGGTCTGAGAATTAAAACAAAACAATACCTATCAAAAACTTCCTTGTGACT
+AGTTTATCCCTAAGTCAAGTTGTTTTCTATTTATTTGTTTTCTTTTGGTTTGGATCAGATTGGTTTGTACATTATGTGGT
+TAAGAGCGTCTGCTGGGCTTCTAGTACTGGCCATCACCACTAGTAGCTATGATACAGGGAAAGTTATGTCCCTCTCTTTG
+CCCAGGTTACCCATCTGTAAAATGGGAAAATTTTTGAGGCCATCACTAAATATCAGTAGACTAAAGGACTAAATGACCTC
+TTAAAAATTCAGGGTTCAAGACAACCTATAGAATGGGAGAAAAATTTTGCAATCTATCCACCTGACAAACACCTAATATC
+CAGAATCTACAAGGAATTTAAACAAATTTACAAGAAAAACACAAATAACTGCATTAAAAAGTGGGCAAAGGACATTAACA
+GACACTTCTTTGAAGAAGGCATTTATGTGGCCAACAAACATGAAAAAACGTTCAACATCACTTATCGTTAGAGAAATACA
+AATCAAAACCACAATGAGATACCATCTCATGCCAGTCAGAATGGCGATTATTAAAAAGTCAAGAAACAACAGATGCTGGC
+AAAGTTGCTGAGAAAAAGAAATGCTTTTACACTGTTGGTGGGAATGTAAATTAGTTCAACCATTGTAGAAGACAGTATGG
+TGATTCCTCAAAGACCTAGTACCAGAAATACCATTTGACCGAGCAATCCCATTGCTGGGTATACATCCAAAGGAATATGA
+ATCATTCTATTATAAAGATACATGCCCACGTATGTTTACTGCAGCACCATTCACAATAGCAAAGACAGGGAATCAACCCA
+AATGCCCATCAATGACAGATGGGATAAAGAAAATGTGGTACATATACACCATGGAATACTATGCAGCCACAAAAAGAAAT
+GAGATCATGTCCTTTGCAGGGACATGGATGCATTTGGAAGCCATTATCCTCAGCAAACTAACACAGGAACAGAAAACCAA
+ATATCGCATGTTCTCACTTATAAGTGGGAGCTGAACAATGAGAGCACATGAATACAGGGAGGGGAACAACACACACTGGG
+GCCTGTCAGGGGGCCGGGAGAAGGGAGATTATCAGGATAAATAGCTAATGCATGCAGGGCTTAATACAGAGGTAATAGGT
+TGATAAGCACAGCAAACCACCATGGCACACATTTACACATGTAACTGTATTAGTCTGTTTTCATGCTGTTGATAAAGACA
+TACCCAAGATGGGGCAATTTATAAAAGAAAGAGGTTTATTGGAATTATAGTTCCACATGGCTGGGGAGGTCCCACAATCA
+TAGTCGAAGGCAAGGAGGAGCAAGTCACACCTTACGTGGATGGCAACAGGGAAAAAGAGAGCTTGTGCAGGGAAACTCCT
+GTTTTTAAAACCATCAGACCTCATGAGACCCATTCACTATCACAAGAAAAGCATGGGAAAGACCCGCCCCCATAATTCAA
+TCATCTCCCACTGGGTCCCTCCCACAACATGTGGGAATTATGGGAGCTACAAGATAAGATTTGGGTTGGGACACAGAGCC
+AAACCATATCATTCTGCCCCTGGCCCCTCCCAAATCTCATGTTCTCACATTTCAAAACCAGTCATGCCTTCCCAACAGTC
+CCCCAAAGTCTTAACTCATTTCAGCATTAACTCAAAAGTTCACAGTCCAAAGTCTCATCCGAGACAAGGTAAGTCCCTTC
+TGCCTATGAGCCTGTAAAACCAAAAGCAAGCTAGTTACTTCCTAGATACAATGGGGGTACAGGCATTGGGTAAATACAGC
+CATTTCCAATGGAAGAAATTGGTCAAAACAAAGGAGCCACAGTCCCCATGCAAGTTCAAAATCCAGCAGAGCAGTCAATG
+TTAAAGCTCCAAATGATCTCCTTTGACTCCATGTCTCACATCCAGGTCATGCTGATGCAAGAAGTGGGCTTCCATGGTCT
+TGGGCAGCTCCACTCCTTTAGCTTCGCAGGATAGAGGCTCCCTCCCAGCTGCCTTCACAGGCTGGCATTGAGTGTCTGCA
+GCTTTTCCAGGTGCACAGTGCAAGCTGTCGGTGGATCTACCATTCCAGGGTCTGTAGGATGGTGGCCCTTTTCTCACAGC
+TCCACTAGGCAGAGCCCCCATAAGGACTCTATGTTGGGGATCTGACCCCACATTTCCCTTCTGCATTTCCCTAGCAGAGG
+TTCTCCATGAGGGCACTGCCCCTGCAGCAAACTTCTGCCTGGGCATCCAGGCATTTCCATACATCCTCTGAAATCCAGGC
+AGAGGTTCCCAAACCCCAATTCTTGACTTCTGTGCACTTGCAGGCTCAATACCACATGGAAGCTCCCAAGGCTTGGGGCT
+TGCACCCTCTGAAACCACAGCCCAACCTCTACATTGGCTCCTTTCAGCCATGGCTGGAGCAAACGGAATGCAGGGCACCA
+AGTCCCTAGGCTGGACACAGCATGGGGACCCTGGGCCGGGCCCACAAAACCACTTATTCCTCCTAGGCCTCTGGGCTTGT
+GATGGGAGGGGCTGCCATGAAGCCCTCTGACATGCCCTGTAGACATTTTCCCCATTGTCTTGGGGATTAACATTAGGCTC
+CTCATTACTTATGCAAATTTCTGCAGCAGGATTGAATTTCTTCTCAGAAAATGGGATTTTCTTTTCTAATGCATTGTCAG
+GCTGCAATTTTTCCAAACTTTTATGGTCTGTTTCCCTTTTAAAACTGAATTCCTTTCACAGCACCCAAGTCACCTCTCAA
+ATACTTTGCTGCTTAGAAATTTCTTCTACCAGATACCCTAAATCATCTCTCTCAACCTCAAATTTCCACAAATCTCTAGG
+GCAGGGGCAAAATGACACCAGTCTCTTTGCTAAAACATAACCAGACTCACCTTTGCTCCAGTTTCCAACAAATTCCTCAT
+TTCCATCTGAGACCACTTCAGCCTGGACTTTATTGTCCATATTGCTATTAGCATTTTGGGCAAAGCCATTCAACAAGTCT
+CTAGGAAATTCCGAACTTTCCCAGATTTTCCTGTCTTCTTCTGAGCTCTCCAAACTGTTCCAACCTCTGCCTATTACCCA
+GTTCCAAAGTTGCTTCCACATTTTCAGGTATCTTTTCAGTAGCACCCCACTCCTGGTACCAATTTATTGTATTAGTCCAT
+TTTCATGTCGCTGATAAAGATATACCTGAGACTGGGTAATTTATAAAGGAAAGAGGTTTATTGGACTTACATTTCCACAT
+GGTGGGGAGGCCTCGCAATCATGATGGAAGGCAAGTAGGAGCAAGTTACATCTTACATGGATGGCAGCAGGCAAAAAAGA
+GAGCTTGTGCAGGGAAACTCCTGTTTTTAAAACCATGAGATCTTGTAAGACCCATTCACAATTGTGAGAACAGCACAAGA
+AAGACCCACCCCCATGATTTGATCATCTCCTGTCGGGTCCCTCACACAACACATGGAAATTATGGGAGCTACAAGATGAG
+ATTTGGATTGGGACACAGAGCCAAACCATATTAGCAACAAAGCTGCATGCCCTGCACATGTATCCTGGAACTTAAAATAA
+AATATAATACAATTTTTAAAATGAAAAATGAAAAAAATTTTTTAATTCAAGATTCAGTCATTAAAACAGGGACATATGCC
+CAATAAATGTATCATCATTATTTTAATACTTAGCACTCTGTTCTGTGGATTCTTAAAAGTAGCTTATATTTTAAACCACT
+TAGGCCTTATAACTGGGGGAAGATTCCGTCTTATCCCAAATTGGGAGAATGCAGGAAGCATGAAGGACAAAGGGTCAATG
+GAGTTACAACCTGTAGACAGCCTGAAAAACCAAATTTGGGGCATCCCTTTGTGCTGCTCTACAAATAGCCTTCATTTGCT
+TTTCCTATCTGGCTTCAAGATTAATTGCTAAACTTGGAAGGAGGACCTTGAAGTTTGCTCGTAAATTTTCTGTGTTTAAA
+AAGAAGAGTTTTATCATTAATATTCTGGAGTGAGCATGAGTCAGGGAGAGTGACTCAGGCCATGTTCTCAGAGGTGGCTG
+GCTATAAATCCACTTTTGCAGAATATTAATCTCCCCTGGCTCCTTTGAAAAGTTAGCAATTATTAGTAGATGGTGTGAAA
+GAGAGTCTTTAAAAGAGGAATGGTAAGACAATGTCACCTTGAGTTTCTTCCAAACTCATTGCTATTAGAGAAGAGAAAAG
+CTGTTCCCTCAAACAAACTTTGGAGAGGAGTTGTCCTGGCAGGCACAGGCAGCAAAAGCAAGTCACAACTCCACTAGACG
+GAGCCCCCACAGGGACTCTGTGGTGGGGGTCTGACCCCACATTTTCCTTCTGCACTGCCCTAGCAGAGGTTCTCCATGAG
+GGCGCTGCCCCTGCAACAAACTTCTGCCTGGGCATTTAGATGCCTAAAACTTACCGCATATGTGTACTGGGTCCCAAAAA
+AAACCCTAGTACCTTCAAGGAAAGATAAGGTTTCATGAAGCAATTTGACTCAATAATTAATTGTTCCTACCCGGTGATGG
+AATACCTAGGTTGCCTTGAAAATATTAAGCCAATCATTTGGAGAGTTTGAATCTCTTGAATGTAATAGTTAAAGGTAGAT
+AATATTTATGTCTTTGGGCAAGTGTAAACAGGGTCACAGAATTAGGGGATTCAAGAAATGGACATTGAAGTATCACCACC
+CATTCAAAGCCCTTGCCTGAGGAAGGGATACATTGAAACCACAACATACTGGTTTTTTTTTTTTCCTTTCCTGTATATTT
+ATATTAGTTGGCTCTTAAAATGTGGTGCCCAGAAGGCAGCATGGGCATCACCAGGGAACTTGTTAAAAATGCAAATTATC
+AGGCCCCTCCCCAGACTTACTGAATCAGAATCTCTGAGGTTGGGACTCAGAAACCTGTATTTTAACAACCCATCCAGATG
+ATTCTGCTGCTTGCAGAATTACTGCTCTAACTACAGCCAGTAACAGAGCTACTTTTAAAAAGCAAGTGATTATAACAGTT
+GGGACTATTGTAGGCCGAAAAAAGAAAAAGAAAGAAAGAAACTGGACTACAGTGGTCTAACTAAATAAGGGTTTATTTTT
+TTGTCACAAGAAGAATGGAGTTCAGCTACCCTGGGCTGATTCTGCAAAACCAGAATGTCATCGTTGTCCCAGGCTCCTTC
+TACCTTGGTAAGTGGCCTTTTCCCTCAGGTTCAACACTTCAAGGCCATAAGACAGCTGCTTCACCTCTGCCATGTTCAAG
+GCAGGAAGAAGAATGAGAAGTGCAAAAGGTAAAAGGTACATGCCAGCTGGGTCTCTCCACTTCTAAAAGGCTGCCTGGAA
+GCTTCAGCAAGTGACCACCACTCATGTGGCATAACATACAAAAACATTAATCAAATATTTCATCATCAGGGCTGGAAGGG
+ACAATAGGAAAGTACAAAGAGTTTAAAAAGAGTGGTTAAGGCCAGGCATGGTGGCTCATGCCTGTAATCCCAGCTCTTTG
+GGAGGCCGAGGCAGGCAGATCACTTGAGCTCAGGAGTTCGAGACCAGCCTGGGCAATGTGACGAGACCCCATCTCTACAA
+AATACACAAAAAGTTAGCTGGGCATGGTGGTACATGCCTGTATTCCCAGCTACTCAGTGGGGCTGAGGTAGGAGGATCAC
+TTGAGCATGGGAGGTGGAGGTTGAAGTGAGCTGAGATCACGCCACTGCACTCCAGCCTAGGCAACAGAGTAAGACCCTGT
+CTCCTAAAAGTAAAAAATAAAAGTAAAAAGAGTGGTTAGCAATTGTATTGCATAAGGTCTAATTTTGGTAAAGTGTGCTT
+GATTTTATTTTGGAAATTATTCTTCTTAAGATTGAAAATGCAACATTTGCCATTGTATTTACACCTTTCTTTATTATTTC
+TTCATCACTGGATGATTTTTACTCTTGATAAAAGTTTGGAAGTTGAATTGCTAGCACCAACTGTAATATCGGTGTGTAAT
+GCTGGGCCACTGTAGTTGAGGAAGATGGCTCATGTTGTCTTTTCTTGACTGACTTCATTCCACTTAATTATGGAATTTTG
+TTCATATCTTAGTGTTACTTCCTGGCCAGAAATTTTTATTTTGCAAACAAGTCCAAACAGTAAAATCTTTATTACAGAGA
+AATACTCTTAAGTCCATTTACTTTGATATATACATCTTTCATCAGCTACGTTTTTCTTCTTGGCAGCTAAGGTTTACATT
+TAATCAAGAAAATGTCCTTCTTGCACTTAGAGTAGGCACCCTCCTACCCAGCTCTGCCTCTAGTCCCAAAAAAGCCATCT
+GATGCTCACTACTGCCTGTCAAACATCCTAATGGATTATTGCTCCTCTCCATCAACCCCCATCTGAGGACCTAGCCAGCA
+GAATGCTAAAACACCCAATCCACACAGGCAAAATAAAATGATGGGGGGAGCAAAATCTGGAAACTTGGTACTCAGAGTAG
+GGTCTTCCCTCCAGGAGCATCAGCATTTCCTGGACCCTTGTTGGCAATGCAGACTCCCAGTCATCCTAAATCTGCTAAAT
+CAGAACCCACATTTTAAAAAGACCTCCAGTGGTGGTTGTGCATCTTAGTTTGAAAAGCACTGGCCTAAAGCAGTGGTTCT
+TGAAATATTGCTCTAGACATGCAGCATAATCATCTTCTGGGAAACTGTGAGAAATGCAAAATCTCAGTCCCATCCCAGCC
+CTGTTGAATTTGACACTTTAGGGGTGGAGCCCAGTAATCTGTGTTTTATCAAGCCTTCCAGGTGATTCTGACATAAGCCA
+AAATCTGAGAACTTCCCCTATAACAATGGTAGTCAACCTTACTTATACAAAGAAATCACCTAGGGTATTTAAACATTACA
+GCTGTCTCCTATCTTAAATATTTCTGCTCACAGGAGGAGTGTTTTAGGTCAAATTTCCCAGAAGAGCACCAGGAAAGACC
+ACATCTATGTCTAGCCCCACTCTTCTTCCTCCTCTGTCACATAACACTTAACAAGAATCCACATTTCAGACTGTTCCTTA
+TGAGCTCTCCCACACTGCCACACATTTGTTTCACCAGGCAAGATGAAAACAGCCTGCTGAGCCCCAAAGGAATAAAGCAT
+GGAAGACGACTGGAGAACACATTTGACCGCTTTGTAGGGTCAGTATCAGGGGCTGCAGCCTAGGAGGCCTGGATAAATGC
+TGCTTTCTACATAGACAGGTTGTGGAGGGGAAAAGACCTGATTCTCAACACCATTGTCTTGGCGCCCCTGCCACAGGTAG
+TAGATTAATTACAGAAAAATAAATAGTCCTGATTTCCCATTCCTCCTTGTATGCACACTCTTTGCAATGTGATTTTGTGA
+TTCTTCCAATCCAGAAATAGACTCTGTTTCCCTACCAGTTGAATCTGCTCTATGACTTGAATTCGCCATAGAATGTGGTG
+GAAAGGATGGTTTGCCAGTTCCAAGCCTGGCCTCCGCAGGCTTGAACACTCTCCTCTTCCTCTCCGACCCTTAGCCAGCA
+CCAAGTGATGATACCCAGACAAACTAGCGTGAGAGACTTGTGCCCCAGTCATGCCATCCCCCCCAAAACAGCCAGCTGAC
+CACCATACATGTGGGCAAGACCATCCTGGAGCAGCACTCCTCTAGTTGACCAATGACTCCCATATGAGCCCAGCTGAGAA
+CAACAGAGAAGGGCCCAGGGCAAAAGGGTCACACACTGACCTGTAACCTTGAGAACTAATAATGGCTGCTATTTGAAGAC
+ACTGACTTTTGGGGTGCCTTGTTAGGCAGAATTATTGTGACAACAGTTCAATAATACCTCACAACTTTCAGACCTTTCCT
+GGAGAGACCCAAATATGGGACAGAAACATTAAACTACAAACTACAGACAGCTCTGCACACCTGACTCTGGGTGTGAGGAA
+ATGGACCACGGAGAAGGCAGAGGCATTGGTGGTCAGGGAACACAGCACTCGAGACAGAGCAGGCTTAGAGCTCAGAGGGG
+CTCGTGAGCTGCGAACAGCACAGCCTGCAGCCTTCTGTGAAGTCACACGGGGAGGAAGTGATGAGAAGGCAGGTTCTATT
+TCTGTTATGTGATCTAGAAAAAGTACCTGGGAAGCCAGCAGGGAGCCAATCAGGGAAGTTGACTGATGGGGCCAACATGG
+GCTTCCGTTTTTATGAAAGATGTGGAAGAGAAAGAGAGAAGGGCCCAGAAGTGTGAGATTTGGGGGTAAAATTAGAACTA
+GGTTTTTTTTGTAGCTTTAAATTTTTTCTTTATTGAGGTATAACAAAAATACAGTAAATTGGCTGGGTGCAGTGGCTCAC
+GCCTGTAATCCCAACACTTTGGAAGGCCAAGGTGAGAGGATCACTTGAGCCCAGGAGTTCAAGGCGAACCTGGGCAATGT
+AGGGAGACCCTGTCTCTACAAAAAATTTTAAAATTAGCTGGACATGCATACGCCTGTGGACCCAGCTACTCAGGAGGCTG
+AGGTGGGAGGATCCCTTGAGCCCAGGAGTTCGAGGCTGCAGTGAGCTGTGATCATGCCACTGCACTCTAACCTGGACAAC
+AGAGTGAGACCCTGTCTCAAAAAAGCAAAACTAAAAAAAGCAAAACAAATGCAAACGCAGAACAGTACATTGCACAGATC
+TTAAGAGTCAAGCCTGGTGAATTGGTGGTGTAAGCTCCCAGATCAACACGAAAGGCTTTGTAGCACCCGGAAGTCTCCCT
+TGTGCCCGTTCTTGGTCAACTCTTCTTCTTCAGAGGCAACCCCTCTTCTAACTTCTACCACTGCAAACTAGTTTTGCCTG
+TTTTTGAACTTCATATAAATGAAATCATGCAGTATGTGCCCTCAACTCTGTTTTTTTTTTCTGCTCATCATTTTGCCTAT
+AGAATTCTTACATGGAGGTGCATGTTATAGTAAGATGTTCTTTTTTATCATTCTGTAATATTCCATTTTATGAATATACT
+ACAATTTATTCTGGTCTACTGTTATTAGACATTTTGACTTTATGAATAGGGCTGTTATGAACATTCTTGTACATGTCTTT
+TGGTGCACACATGTTCTCTTTCTCTTGCTAAACCTGTGGCTTTTAATCAATCCCTCCTTTGTTCATCAACTGGTTTGAGT
+AATGACCCGACACCCTTGGTCCCCCTTTCCCAGGCTCAGTAAGAATCTGCCCCTGAATTCAAAGCCCACTCTAGGAGGGA
+GGGCAAGAGGAGGAAAAGGGAGAGAAAGAAGAAGGTAGCTCCAGTGACTACAGAAATGATATCAGTTCTTCCTAACTTCA
+CGTTTTGTCTCTAGTGGCTATCTGAAGTGCCTAAAATGACATTCATTGGAAGTAAAGACGTGAAGATTCCTTTAGAGATA
+CAAATATGCCTTTTAGAAGGGTAAATACATAAAAGAATTTAAATGTTACCTTTGCTGGTCCTTCACAATGTTCAGATTAT
+CCTTTATTTATTAGCTCCCCCACTACCCAGAGTGGCCTCCCCACTACTTGCATCCATTACCTAGGAGTTTGTAAGAAATG
+CAGAATCTCGGGCCCCACCCCAGACCTGGAATCAGGATCTGCATTTTTAAAAGATCCCCAAGTAATTCCCGGGCATGTTA
+AAGTTTGTGAAGCCGTGTATTATCCCACTGATACTTTATACAAAACTCTATAAACAGAGAGAGGAGGAATAATTGTTCTC
+ATTTTACAGATAGGGAAATATGACGCGGAAAGGTGAATTTGATGTGCCAAGGGCACCTTGCTGGTTCTCTGCCAGGGGAA
+GGTCTCTAGGTGGAGGCTCTGTTGATTTTCTGAACACAGATATTTCCATTATTTCACACTGATACCCACTGGCACTCCTA
+ACGCTTTATCAGGCCAACCAGCCTGCCTCTCTCTAGAGAGCAGTGTCCTAAGGGCATGCACTGTGTAACTTTGTTTGAAA
+GTTGGCATGAGGAGCCACCAAGGACATAAATGTTGCAGTGTGGTAAGTGGGAGTAAAAGGGGCAGGGTAGAGGAAGGGAA
+AAGGAATGAGGAACCCCAGCCACCACGTGAAATGGCCCAGAGGGGCTTGGGAAAGAGGAATGAAGGGGCTCCAGAGACGC
+ATTTCAAGTTCCTGGCCTTGCTGTCTGCTCCTCCCTTGCCTGCAGACAGGAGGTCTGTGCTGTGACGCAGTCTGTTAGAG
+CCTGTTGGCAGCCAGCCTGGAGTTGGCCTGCACCCCCACTGCAGAGCTCTCAGCTACGCAGGGAGAGTGGAGGCTGCCCA
+GTGCACCTTTGCTGGGTACTGCAGAGCATCCATGCCACCTGCACCAGAGGGAACACAGCTCTGCAGTAATGCCAGTTAGA
+CTGCAGTAACCCGAGATGGGTGTAGCTCATACTGCAGTAACCCGAGATGGGTGTAGCTCCATGCCCTTGCACACCTGCAG
+GGTCCGAGGACCCTGGGTTTTTACCATCTGTTTCTGCACCCCCCTCCAGGAAGCAGCTCCAGGTTGCAGAGTTGCTCTGC
+CTACCCTGAAGATGTCTGGAGGCTCCCCATTGACCCTGAGCCCCACGACGGTCCCACAGCCAGGAAGACGGGCTCAGCAC
+GGCTCATGGTTCAGATCAGTGAAACTAGATTCCATGGCCTCCTTTCAAATCCAGCTGGAGGAGGCTGAAAAATACCCCCA
+GGGCTCTGCCTGCCTGTGTACCGGTATGGGAAGACTCCAAGAAAATGTACAGCCTCCCCCACACCACCCCCACAGGCGCC
+AGACATCAAAGGTATAGTTAGCAGTGGCCTTCCAAAGCCAGTGTCTGATTTGTTTCCTAACCTAAAAGGATGGCTCTAAT
+GGCAGAGGTTTTGGGACAGGCAAAGGGAAGGGAGTACCACTGGCTGGTGAGGGAGCTGTTCGGGGCAGGTAAAGAAGCTT
+GTGGGGAGGTAAAGCTACAATGTGTAGTGAGCTGTCCCCATGGAGTGTGAGCCCGATCAACCCCCTCAGTCTAAAGAATA
+GGACACCTGCCTCTGCCTTGTGCAGCCTCCCCGGTGCTCCCAGCTGCCCAGTTTGCCTCCATAAATGTTACTTTTCTGCC
+GTCCATATTAAATGTTACCCTTCCCCAGTGTTCATAGAGAGAGGAGACAGATGCCTTTCTCTATGAACCGCTGCAGACTG
+TATTCAGTCAGGGCCTAGTTGTGTGGCAAGGGCACTGAATCCAACCTCCCAGCAGACAAAGCCATGCAAGAAGGAGGGTC
+AAGTTTCCAGGCCCAGGTTAAAAATCGTTGTGAGTGTCAGGTGTATGTTGGCAGGTGATCAGAGGCGGGGAAAGGAGGTG
+GAGGTCCCTGAAAAGCATCAGCCAAAACCACTGCTTAGGGATTAACAGCTTTTCAAGCAGCCTGGGATGTCCTTCTCATA
+AGGTTTGCCAGAATGGATGCTACTCACCTTCTTCCCTACTCCCTGCTGTGTATACCTGTACACAACATCCAAAGCAGGAC
+ACATCAGGGAATTCAATCCAAACCCTATCACGACAGTGGAAATTTTAAAAAATCTTCATGACAGAAAAGGTTCAGTGTTG
+GATGTCACATGGCAAGGGTATAAAAGAGTTAATTCAGATACCATCCTATCTGAAATTCCATCGTTATTTTTCCCACCTGT
+TTAACTGAAGCAGACAGGGCTTAACAATCTCGCTCAGATTTCAGTCTTCTCTCACTAGTCACCTAGATACTGACAGTTCA
+CTCATTTATTCGATTAATAATTATTGAGTACCTGTCATGCTTGAGTCACTGTGCTAGTGAGAAATCAGTACACTGTCTAC
+CATATAGGCATTCACTTATTCTACAAATATTTATTGAGCACCACCTGCTTACATAGCACTGTCCAAGGTGCTAGGGAGGG
+TACAAAGGAAGACAGCAGACAAGCCCTCAAGAAGCTTCAGATACATGTGGAGAAACGTGCAAGCCCACAACGCAGCCCAT
+TTCTACTGTCAAAGAAATGGACCCTTGCTACAGAACTGTGGATCTCCTGAGAGCTTGTTAGAGATAGAGTCTCACACCCC
+ACCTGAGACCTGCTAAATCCGAACCTGCCTTTTAAGAAAATTCCCCAGGAGGTTCCACGCACATTAAAGTTTGAAATCTA
+CTGATCTAGACAAAAGAAGCTCTAAAAATTCAGGGGAGAGAGAGATTATTGTGCACATCAGTGGTCAAAAAAAAAAAATC
+CATGAAGGAGTTTGCCTTTAAAGGATGTCTGGGCTTTGGCCAAGTATAGAGAGCTCAAGGAATAGTAATTTCCCACCTCC
+AATGAGTTAAAAAAAAATATGTCTGCAAACTGCGGAAGGCAGTGACATTTTTCAGACACATTTTCCACAACAGCAAAAGC
+TTTGACTCAGGTAAGATGAGTCTGCAAATTCTTGTTGCTGATGCTATCAGGATCCCCAAATTGCAACTCGCCCAGTTTGG
+ACAGCTTCAAGGTCAGATTACCTGCTGTCCTGAAATGGACAGGCCCAGGAAAGATTCCTGGCCTTGTCCAACCCACCTGT
+CAGTCAAGTAGATCAGCCAGTGAGAAACTGGCTCATTATTTTCCAGCAGTTACTCAGCCAATTGGCAGTCCCCAGCAAAT
+TGAATTCTGGAGTGGCATCGTGTGAACATCGGTGTGGTGGCTGCTTTATCAAGCCAGTCCAACAGAGCAGATAAATCACT
+GAGAAGTTTTGATTAGAGGCTGTTGGCACCAAGACTATTGTTTCATAGTAGATCAGACTGAGCAGGTGTCTCAGTTTTTC
+TTACACTCATCCTTCCTGCATTCCATTTAAGAATTAAGATTTTTGCTTTCAATACAAAAATTGAAATAAATGAATAAACA
+GATCTAAACATGACTCTACTGTTATCTCTTGTTTCTAACTACCACCCACAATGTCTGTTTGTCCAACAATTGAATCCAAA
+AAAGATTGTACAGAACTTCACAATTGCATAGTTATATAGAAATTCTCATTCACCATTGAATGAACTGCTTACAATTCAGG
+CCTGTAGTCTATGGTGATAAAATCTGCCCATTGCTTCAGAAGTCATGCCAGGGAGGATTTGTCAGGTAAGCTCTAAAGAC
+AACACAGCCCAACTCATTTACCATTGGGCCATTATTTTCCAGAAGCAGGGATGGGTTTGTGATGAGCCATCTATACTCTC
+TAACCTCCGTGGCCATGGCTTATGTTTATAAGGCAGCTGGCAGGGATAGGGAGGTTGGGTTGAAGCTGAGTAGCAGGACC
+TGGCTGGCTCACATGAAGTGGATCCTAGGACTTTGACTCATCAGCTCTGTCTTAGTCCTGTTAATTTGATGCTTAATTAA
+TAACAGGGCATTAACATCTGCTGTGAACATACTGCTACTCATCACCCTCCCAACCCTCCCTGCCTTCTCCTCTCATACCT
+GTCTCCAGGAAGCTGGACCCACTTATCAGCCAACTTAGTCTCTCAATTATTACCAAAAATGTGTTTTCAACTTGGTGCAC
+ACTGGGGGTGTGCCTGAGCAGGATGGCGAGAAGGGGCAGGCACACACTGAGAAGCGTAGACATACTCCCCTTTATCTAAG
+GGCTCTGCTATTTAGGAAAGGGCTTTTGTGTGAGGAGGTTCGAGCCTCAAAGGCTCATCTTGTCATTTTTCCTGTGCCTT
+TCAGAGATGGAGCATATTGTGCTGTGTTTTTGAAATGATGAAATGAAAGTGTGAGGAATCTGAAAGCCATATAGATAAGG
+AGTGGTTGAAGGAACTGATGGAGCTGAGATGAAACGTTATTTCAAACGTTGGAGGCGTCGTCGTGGGAAGAGGCAGCAGG
+TGTGTTCTTTGTGGTTCCCGCAGGTGGGATATAGGTTCGGTAGAAGGAGGCACTTTCTCACAAAGAAAAACCACCCAACA
+ATGGGAGGCCAGGCCCTGCTTCCTGAAGAAGGAGCCCCCCCATGATGACAGGTACTTAAGCTGAAGCTCCACGACTATGT
+ATAGGAGTCACCTGCATTATAAGCAAAAGTTGGAATAGAACTCAGCATTCTTTTAACTCTGAAAGCCTACAACACATGCA
+AAGCTAACCTTCACAGAGTAGCTCCTATGGTCTCACTTTACAGGTATTATCCCACTTAACGCCCACAGCAGTCCAATAAG
+CTAGGTACAGTTATTATGTTTTGTTTTGTCTCTTTTTGTCCTCACTTCGGCTTGGAATGCTCTTGCCTCAAATATCTCCC
+TCCTTTCCCATTATTCTGATCTTCTCAGGGTACTATTTCCTCCCCAGTCCTTCCCAGTCTTCCCAAAGAGGGGATAATAT
+AACCACCCCTCCCCCATCTCTCCATCCCTTACACTGTTTGATTTGTCTTTATGGCATTTAGCACTGCCTAAAATAATGCT
+GCAATTGTATCTGTTTATGACCTGTCTCCTCTGGGAGAACAGAAGGACTTGGTTGGCTTTGCTCAGTGCTCCATTTCAGC
+ACAGAGAATCAAGGCCTGGCATAGCGTAGGTGCTCAGGACGTATGCATGGAAGGAATAAATCAGTCAGTTTTACAGATGA
+GGAAACCTAGGCATTAAAGAGGTAAAGCAACATCCAAAGGCCAGACAGTAAGTAAAGAGAAGGACCAGGTTGAGAGCAAT
+TTGACTCCAGAGTTCACATATGTACAGACAACCCCCTACTTGTGATGGTCCAACTTCACGGTTTTCAACTTATGATGGTG
+AAAAAATAATAAGCATTCAGAAGAAACGATACTTCAAGTACCCACACAACCATTCTGTTTTTCACTTTTAGTACAGTATT
+CAATAAATTACATGAGATATGCAACACTTTATATAAATAGGCTTTGTGTTAGATGATTTTGCCCAAATATAGGCTGATGT
+ACGTGTTCTGAGCATATGTAAGGTAGGCTAGGCTATGCTATGATGTTTGGTAGGTTAGGTGTGTTAAATGTGTTTTATAA
+TATTTTCAACTTATGAAGGGTTTCTCGGGACATAGCCCCTTTGTAAGTTGAGGAGCATCTGTACCCCATTTGACCCTTTG
+AAAAAAACATGTATTCTCAAAGAAAAGCTGAGACATCCTTGTTAATTCAGTGGCTGGTGCTGGCTCTTGTTTTATGACTT
+CAAATGACTTGTGCCTATAAGCAGGTGGTCACTGGTTGCCACCAACATGGTGTCTTAAAAAGCCCATTGAGAGAGGTAGA
+GGGCAAAAGGCAGAGGGCACAAAGTTACCTTTTGCCTCAAAGGTCTGACTGCACAGTGATTTGCTGAATAGAGCAGGTGA
+GTCAGAAAGGTACAGAGAACAGCTTGGCAGTCACCTGGTCCTAATCTATCCCTGGGCAGATACATTGAATCTGACACAGC
+TCCCGTGCCTGGAGTCCTCAGAATCAAATTTGCAAACTCAGAGCAGATTTCTTGCGTCTCCAGGCGATTTGAAACCATAT
+GCAATGTCACCCTTTCACCAAAGGAGCTGAATTTATGTTCCTTGTTTTCTCCCTCCCGGTAAGGCTTGGATTTTTCCATG
+ACAAAAATCTTGTTCATTTGAATATAAACAAGTTATAAAATGAAATCCACATTGACTTTTGCAGTTAATTAAAAAGAATA
+TGGACTTGAAAATCAGACTAACTTGAGTTAGAATTCAGGTTACCACTTTGAATCTCCATTTTCCTACCTGAAAAATAGAA
+ACTAACCCTTCTCGCATGGTTTTTGTAAGGATTAGATAAAAAAAATTGAAGGGGGCATCTGCCATTGATTTTCTCTTCTA
+TTGTCAGAGCATTGTTTTCCGGCTAGACGCCTTCTAGTCTTTCTGTAGCTGGGTACAGGAAGAAGCATTGTCTGAAGGAG
+TTATCGGTGTGGAATCCAAAACAGCTAAATTGAATTAGCAGAAAGACAAGGGCATTCCAGTACACTGTGGCCAGGGAAGT
+GCTGTATTCACTCATTGTTTGCACTCCTGAGGTCAGGCTTTCAGGTCAGACCTTGAACCCTCTGCCTGCTTCACATCCTG
+GCTCCTTGGGAAGCCACTCCTGTGGCTTCAAAGGGGCAAAACTAAGAGAATGGTGGAGCTGGAACTTTCATGACCAGAGC
+CTGCCCTGGCCACATCCTCCCTTGGAACCTACAGTGGAAAGTGGGGACAATTTTGCTAGGTCACACCTTTTTGAGGACCT
+TGCTGGGCTTGTGGATCTGCCAGAAAATTCTTTCTTTTTTGAGGGACAAAAGCCTGCTGCTCAGCAGTTTTAGATTAAGC
+CTAATGAGTTCTCTCTTGAGATCACCATTGTTCTAAGCTTAATATCATCCTCATTGACCTAGTGGTATCAAATTCTGTTG
+GCATACCTCTTACTAACAAAACACATCAAATATGTTTTTATAAAACATGATACTAAATTATAGAATATACCAAGAAATAT
+GCTTGTACAGTGAATCTGTGTGCTTCTATTGCTACCATTAAACGTTTGCACCTACTTCTAGCTATCACCAGAATGGGTTC
+CACTAGCTTCTAATATAAAGTCCTACACAGCTGTATCTCGTAATTTATAAGGTAAGGTGGGAAAATGCATCTCAGGTCTC
+AGCTATCTCCAGTGGGAGGCAGGTTCTACCACTTTCAAGGCTCATAACACAGGAAGGGGTGTTCAAAGGTGCAGGGTAAA
+CAGAAAGATGGTGTTCACTGCTAGGCGTCTCAAAATGAATGTGAATGTTCATGCACTCTTATATACAGCATTTGAAACAA
+AACAAAACAAACATAAAATTAAACCTGACATCTTGGGAATGAGAAATTCTAAGGGTGCCCAACATATAGTAAGTGCTCCC
+TAAATAAGCTGAATGGATGAAAGAGTAAATAACTAGGTAGATCATGGAGCCTCTGATATCTGAGTAATACGGAAGTGTTC
+ATAAAAAAGGGAAGCCTCAGTGAGACATATAAATCCAATGAAATATGTTAAGTCTCAAAAAGGGAGCAGATAATTACTTT
+TGCTTTATGTTTTTCAATGGAGTAATAAATAATAGAAGAATTCTGGTCTCTTCTCTGGAAGCTGTCGTTTACTTGGTGAA
+GTTTTCCTGTTTTTTCCTAATAAATGATATCAGTTTTAATGCCGTGCTGCCCAAAGATAACATCTGATATGCCCTTAGAG
+TCAGTTATACAAAGAAATATAGGTTTAGCTCTGGGCTTCTCATCCTTGAAACTGTAGACATTTGGGACTGGATAATTCTT
+TACTGTGAGGAGTCGTGCATTGTAAGATGTTTAGCAGCATTCCCTGGCCTCTAACAACTAGATGCCAGCGGCATCCCTAC
+CCTATGTGATAACAATTAAAAATGTATCCAGACATCACCAAATATCCCTAGGAGGCAAAATTGCCTCTGGTTGAGAATCA
+CAGACTTAGGCCTCAAAAATGGGCAACTCTTAGACAAATTTGATTTCTAGGCTTTTTCAAGTTGTAAACTAACTTTTCTC
+TAAGCACAGCTCTGCCATGGTATTTGCCTTGTATTCAGTTAGGGCCTGAACAAGATAGTGATAGAGACCAGAGATCTCAA
+AATGTGTCTATAAGATAAGCTGTTACACCGAATGATATTATTTCAGTTAACTATTTACATAGTCCTATTGTTAACAAACC
+ACTCTGGAATTCAGTGATTTAAAACAATAATTATTCTTATGTTTCTGGGTTTACTGATTGGCTGGAGCCACTCAGGGTCT
+GTAGGTTGACTGGGGCAATTTTGTTCCATGAATATTATTTTTACACCCAGGTAGAAGCTACTAGGGAGAAGCTCTTCTCA
+GGATGGAGGAAGAAGCTTCCGGAAGCACTAGCAGAAAAATGCACTGTCTCCTGAGGTCAAAGAAGGCACTCGGCACACAC
+TGTCATGTCCGTCCACTACTGCTAGCCTAAGCAAGACACAAGGTCAAGGCCAACATCAGCTGGGCAGGAAAGTACACACT
+TCCCTTGAAGACTGGGGCAAAGGAATTCATGAACGCCAAAGATGACATTTTCAATACAACCAATGAGATGGTGTCATGGG
+TCTTCGGTTCTCACTGGCTCCATTTCTTTAAGAGACAAAGATGGGGAGGTATCACTGGCGTTTGAGTCACGTTTGCAAAA
+CATATTAAGATCCCAGTGGGAAAACGCACAACACAGAGAACAAAATTTCCATAGCTACATTTAAGAAATCTGACTATTGA
+ATCAGTAGGCTGGTGGTTTTATTTGTTTTATTATGTTTTCCGGACCCTTGGTAAATATTGTCTTAATTTGGGTTAATTAC
+TTTTAAAGGGAACCTCAAGAATCAAGGCAAATTATTTGATATTTATTTCGTATTCTCCTTCCTTGTGACTAGAATACTAA
+ATAAATCACTAGCAGCTTGACAACAGCAGCAGCCTACAACAGAGGTATAGAATCGGTACTTTTGTTCTGCCTTCTTATGT
+ACTCTGCATTAGTCATGGCCATGGTAGAGAATTCTGTGGACTGTTTAAGCTTCTGTGTTTTTCTGAAGGTATATAGAGGA
+ATGGAATGGGTCCAACAGCAAAATTATTATTAGAAGGGAATAGAAATTGCCCACTAGTTGAATCTTCAATAAATATCTAC
+CGTTGATTTCATTAACTGATACATGTGAAAGCTAAAAGATAGTTCAAATATTCCTGATACATCTTCCCAGGGACCTGACT
+GAGGTCAACCCCAAAATTTTAAGGATGAGAAACACTGAATTTAAAAAAAAAATCCATACATTGTTCAAGTTTTCCTTTTA
+GCCATAACAGATCCTAAGCTAGATCAGCCCAGGCTTTTTAGCTCCTAATTCCAGAACCTTCCAGCATACCATGCTTGCTT
+AATACAATAAAGGAACTGCAGGTATTTGGCCTAAGGGAGATAATAGTCATTTGGGATTTTAAGTAAATAAGAATCTCTTC
+CATCTGGATGCAGAACGCCGGGGTCTCAAATTGGGCAGGCAAAAGGCTTATTTTGAGGACAATGGAACACTCCCCTGTGG
+TCCCATTCATGGAGATGTGGGATGAGAATGAAAGTGAGGATCTTCAGCCTTCCTGACAGAATGTCCATCCTGTCACCTGA
+ACCTCTGTCTGCCAAGTACAGTGGGGACTTTTCATTCCTGTGCTGATTTATAGCTTTCTGACATCTGACACTGTTGGCTT
+CCTCTTCTTAGAAAAATTCCCTGGGAATTTTTCTGATCCCACTTCTCTGATGGCTGCTTGTTCATTATTTTTGCAGTTTC
+TTTTCTTCTGGACTTGTCCTAATTAATGGTGTTCCCTTTGGTCCTCCCCTTTCCTCACTCCACGTGCTGCCCTTAGGGTC
+TAGAATTGTGGCTCTCAAATCTTCATCCCCAGCCCAAGTTGTCTTTTAAGCCCCAGAGCCACATTCTCAACCTTCTGGTG
+CACATCTCTTGGCTCATGGGAACCTGACTCCACATTCATGTCTCCCTGTGCCTGCCCTTCAAACACCATCTCCTTCTCTG
+GCATCTCCCCACTCAGGGAGCAGCAGCATCTACTCAGATACCTAATCCAGAAACTAGGGACTTAATGGAGATTCCACTGT
+TCCCACCTTGCCTCACATCTAAGCAGCCACAAGCTCTGTAATTCTACAGCCTCAGTATTTCCTAATTATCCCCTTCCCTT
+CATTCGCAGACATTGTCTGAGTTCATTCACATCTTGTCATTTCTTACCTGGAACTGTGCACTGACTTCCTAACCATTCCC
+TAAACCCAACCTCAGCCCTTCGGCCCTCCATCCCTAAACCATTCTCCATGTGCCCACAAGCCATCTTGCAGTATAGAGCA
+CACATTTTTTTCTCCTTTTTTGTTTTTTAAGGGATAGGTTCTTACTATGTTGTCTAGGCTAGTCTCAAACTCCTGGCCTC
+AAGCCATCTTCCCGCCTCATCCTCCTGAGTAGCTGGAACTGCAGGTACATGCCACCATGCCCGGCCCACAAATTCTTCAG
+AGAGTTTCTCTAATAATGAATAAATTCAAGTTTTAGACAGTAATCTGATTTATATCAGCATTTGTACTCTGAGCATGGGA
+CATTTGTTTATTTATTCCACATTATTTATTAAGTGCCTATTCTGAGCCTGACCTTGGCAATAGAATAGCAGTGAATATTT
+TCTTTAAACTCATTCTCATGGAGCTTAAAACTTAGTAGGGGGAGGCACACAATACAAAATTAAATAGATATCTATAATGA
+ATAATATATTAGTTTGGTGCAAAAGTAATGGCAGTTTTTTGCCATTGAAAGTAATGGCAGAACCTACAATTACTTTTGCA
+CCAATCTAATAAATCAATACAATATATATTACGTGAGAAAGTGAAAAGTGCTTTTGAGAAAAGCAAATCAGAAAAGGGGA
+GTTGGGAGCGTTGTGGGGGAGGGTTGAAATGCTAGGCAGGGTCTGAAGAAGACGACATTTAAAGACCACAAGTGGGAAAG
+GAGTGAGCTATGCGGATAACCTGGAGGAGGGGGATTCTGGGTGGGGAACAGCAAGTACAAAGGCCCTGAGTCAGGAGTGC
+ACTTGTCAAGTAGAGGAAGTAGCAAAAGAGACCGATGTGGCTGTAAAGTGAGAAAAGTGACAGTAGCCAGCTTGCATAGC
+ATCCCATTGCCAATGGTTGCAGCAGTACCTAGAAAGAAGGCCAGCACTGAAATCTAGTGTTTTTGGCTTTCTCTGGTATC
+TCTGAGAAAGTGTCAGTCTGGCATACCCATTGTCAGAACAATCCACTTAAATAATATGTGTCCTAGTCAGTTTTTGCTGC
+GCAGCAAACAACTTTAAGTCTCAGTTACTTACAACTACAAGCATTTGCTTTTCTCACTTATGGATCTGTGGGTTGGGTGA
+TCTTGGCTGGGCTCAAGATCTTGGGTGATCTTGGCTTGACTGCAGGCTGTAGGTTGGGCTCATGTCTGCCCACCTTGCCT
+CTGAGTTTTTCCTGAACCGAAAGACACATTCTATGGTGATCACAGAAGAAGAGGCCCAGCCAAACGATGAAGCACTTTAA
+AACCCCTGCTCATTCACATGTCATTGGTCAAAGCAAGTCACATGACCAAGACCAACATCAAATAGATGGAGAATTATATT
+CTACATGCCCTAGGGGGAGGCACTGCAAACCCACAGCAAAGGATGTGGAAGTATAATTCTGCAATAGTAAATGAGTGAAG
+AAACAAGACCAATCATCCAGTATTTCATTATACTGTAGTCCTGCCTTGAATCTCTACACACTTTTGCAGGTTAGAATGCA
+CTTACACACGCCTTTCTGTGATACCTATTTAAAAGCCTTGTAGATAGTAGGCACTCATAAAACTGCTTCCTTAATAATGA
+AATCTATGGATAAATGCCTTATTTCATATGACCCATGCAACAATTAGTGAATTAAATACAAATATCTTCATTTTACAGAT
+GAGAAAGAATCTAAATGACTTTTCATTCAGGGTTGGCAGACTCAAGGCTCAAGCCCAATTCTTTGCTCTCTAAAGCAAAA
+GACAAAAATTCCAACATTACCTCAAAATTAAGTTTACAGCAACATGAGGCCTATATTTGATTACAGTTGTCTGGAGTTCG
+AAGCACATTTTGTTGTAAATAGTTTTGGTTAATCTGACATTTTGTGAAACACAGTAAGGGCCACAGTGAGTTTCTGCTGG
+GTTTAGGCTTCTTTCTGAGATTGGCCTGGCTGAGCTAGGCAGGCTGACTGCTCATTCACCCATTTATTTTTCATTCAATC
+AACATGAACTGAGTGTTTATAAGAAGCAGTGTTTTTAAAGCTAGGGCTTCCACTGTAAAGAAAACAGACAATCATGGAGC
+TCTTTGAAAGGCAGACGGTAGTATATACGAAGTTGGAAGTGGTAACTACTCTGAAAACAAATAAAACAGAGTTGGGGATA
+GGAGGTTAAGGGGCATGATGACCAAGGTGAAGTAAACGAAAGAGGAGAAAGCTATAGTCCCCATGGCAGAGAAGGATTTG
+TATAGCTGGTAACAAGAAGGATCAACTCAAAGCACCAGGAAAGGAATTGGATTAAATAGACAACTCTTGTCCAGGGTTGG
+TGGCTTAACCTGCACGAGTGTCCCCAGGAATGCAGCCCCCACCCCAAAGGCAACAAAGACCCTGCTTTTAAACTGACTGT
+TGGGGTTTCTTCACTTCCTGAAACTATTTGAAAGAGTCATCATTCTTGATTTTCTTTCTTAGTTAAAATTTGAAATTCAG
+GTTGAATTTTTAAAAACTGAGTTCTGTCCTAGAACTTCAGACAGAGGAGGTCAAAGTATCTATAAGATGCTTACATGCTA
+ATAAAAGATTTCACACCAGTAAAGTGCATTGAAGGGCACTGTTGGTGGGAGTGAGGTTCTGAGTGGTCTTCTCATCTCCC
+CACCAGGAAAATAAAAGAATCTTGAATATGATTTGCATAGAATTCTGCCATAATTCAAGATATGACCTGTAATTTTATTG
+CAAATATTGTTGTTTGGCTCCCACCAAGTGGATAGATGGGAGTAGCACAGAGGAAGGGTGCAGAAAGGAGAGAACCCTCA
+GTAACAAGGAGAGGTTAGGAGAAGAGCCTTTTTATAATCACTCTGTTCTCTCTGGAAGGAAGAACATCTTTGGGAAACTT
+GAGGCAAACAATTTCCTTGCATTTACTTTGACAAGTACCATCTGTTAATGGCAAATGATCCTGGATTTTCATTTACAGTA
+TAAATACAAAGTTTCCTTTTAAAATAACTTTAATTTGTAAAGTTAGTACAGGTGGTATATTCTTGGATTAGTTTTTTCAA
+CCTCCCCTTTTCTTTCCAAGAGGTTGGATGTCTTAGAATTATACTTTCCAGACATCCTTGTGGTTAAGGCTCTGGATGAA
+TTCTGTTGTGCCAATTAAATGCAGTTGCATGAGTTTTGTAAGGCAGAAAGAAAGTGAAGCGAGCCTTGCTGCAGTAATGG
+CAGCTGATTAGTAAGCTTTGTGAGATTTGAGCACATGCAGGAGTCAGACTCATGCTTCTACTCTAGTCGACGGAGGCAGT
+TATTATGGCAGCAAGAGTGGAAAACGTGAAAACTGTGGTTTCTTCCTGTTGACATGGATAGCAAGTTCCACTGGTGCCTC
+TGACTTCCATACTCTTCTCACCATTTTATAAGCATCTATTTCCCTGTATTAAATCACCTTCTACTTGAAATACCTAGAGT
+GGTTTCTCATTATTTTATTAATTTAACACATGTGGCAAAATTGTGATGTGGTAAGCACTGGTCTCATGGTGACATGACTA
+GAAACACACATTCTCAGAATTTATACAAAGGATACCTGAATGGATTTCGAAATATTACAATTTGGAGGCAAAGCAAAGCT
+GGAAATGCCAAGGGAAACTCATTAATTGATGTCCTATTGCCTTAAAAAATCTATTCAATTCCTTTTATGAAATGTGTTTT
+CAATTAATCTAATTGTCTTGGAGTCCTTCAGCTGACTGATTTTTCAACTGGTTTGGAGGAGGTTTTTTGGACATTTTGGT
+GGAATATAAGGAAACAGAGTTTGTCTCTTTGACTACCTTCATCACAGAGTAAAAGCTACATACTTACCTGAAGCCACAAC
+TCCACGTGACTTACTATTTCAGTTGACCTAGCCCTTGAGCTTCTGAGGAAAGCAGATGTTGGGCTCAGTGGGTGGCTCCT
+TCAATAAACATGAAGAATGGCTTCAAAAAAAGCTACGGATAAACTATGAGTGGTTGGGGCTCTATACCAATGGCTGGGGA
+GGGTGTGAGGAGAAGAGAAACATAGCAGTGACTGCTAGCCCTGTGAGATCTTTTGCACCCAAGAAGTCCACTTAATTTAT
+CAGGGAGGTCTTAGGAAAATTTAGCTGCCTCAGAAAGCCTGAAAGAAGACAGGTAGAGCACTTGAAAAAAAAAAAAAAAA
+AAAACAAGAAAATGTCTAGAAGTATCTGCATCATCAGTAATGTGAGCAAAGGAACATTTTGAATACTGAGTTAGCAAGTC
+TTGCTCTAGCGCTGAGGAGCCTTAGGTTTTAGGGGATTGCATAAGCAGAAATTGCCTCTTAAACTACAAGATTCTCTTTT
+CCTTTTTTTGCTTTGGTAGAAGATCTTTGTTGTTTTGCTTGCCACTGTCCACTCTACCAACTTTTGTTAATCATACTCCA
+CTCTTCCTTTAGGAAATACTCCCTTTCCAGCTCTCAGCCCATGTGAAGATGGGCTGACTGGCCTGGCGTGGGCTGACTGA
+GTACATGACCCAGGCCTGGCAAACTAGATCATTCCATCATCTAGACCACTCTGATTGGTTCAGACATGGTCACATGATGT
+AAACCTAGACAACCCCAGACAATGAGCATGAGTCTGAAGACTTTTGATGGCACCATTTGAGAAAAGGATGCTCTTTTTGT
+TGGTGTAAATAAACTGGTGGATGTAGCCTTATAACTGTAGGGAGACCTCCCTGTGAGAAAAGCTTTACCTGAGAATGAAG
+CTAACATGAAGGAAAGATGAACAAATAAATGAAAAAAGAGAGATGAAAACAGAGACAGAGATAGTCCTTTAAATGCCATA
+TGAGCCCTGGATCCAGCCATACCTGAAGTCAGAACTACTCCCTGGACTTAGTTACATGAACCAATGGATGTTCATTAAGG
+CTTAGATCAGTTTGAACTGACTTTCTGTCACTCACAATCCAAGGCACCATGACGAATGCTTTAATTGTTCTTTTTCCACA
+TTAACACAGTGCTAATTAGAGAGCATAGATCTTTATTTTCTTCCTTTTTCTACAACCCCCACCTCCACCACCCAGAAGTC
+TTCTGATGCCTGGTTTTATCCTGTATTCATTCAGCAGACATTTATAGAGCACGCACCATATGCCAAGCATTGTCCACAAC
+AACAGTTTTAGAAAACTAATATGGCATAGACCTTGCCATTAAAAAATTCAGTCAATACTGCTTTCCTAAAATGCATAAAA
+GATCACAAGTTGGTCATTTTGTTTTCTCCAAATTTTCAGCTTTCAAAAAAAAATTTAACTCTCAATTTCAAAATGTTTGT
+AACTGTTACCCATAAATTTACTTTATTGCATATGAATAGTTTAATTTTATAATAGCATCAATTTTCACCAAATTTCTTCA
+TGTTTTTAATGTTGTTATCACATCTTCACCTTAGGAAACTTATTTAAGCTCTCTGAGCCTTGGTTTTCTCATCTGTAAAA
+TAAACGGGTAGAGGGTACTGATATGCCTGGTATCTTCTCAACCTGAAATTCTGACAGTTTGCTCACCTATGTTGACTCCT
+TTCTTCCTTTGAAACAATTTTTTCCTCTGCCCTATAGCAACTTCTATAACACATATCCCCCCCGCCCCCCAAACCCCAGG
+GAATTAAATGCATTTCTCAGAATTCAGGAAGACTGCCCTGCTTTCATTCCTAATGCCTGTAACCTGTTCTCTATGAAGCA
+GCCAAACCGATCATTCCCAACTATATATGAAACCAAACTCCACATTTTCCACTCCTATTTAACATCCAGTGGCATCCTAT
+CACCCTTGCAATAAAATCCAAACTCCTTATCTGATCCCTGTTGATATAGTTTGAATGTGTGTCCCTGCCCAAATCTCATT
+TTGAAATGTCATCCCCAGTGCTGGAGGTGGGGCCTGGTGGGAGGTGATGGGCTCAAGGGGGAGGATTTCTCATGAATAGT
+TTGACACCATCCCCTTGGTGCTGTCCTTGCAGTAGTGAGTGAGTTCCCATGAGATCTGTTCATTTACAAGTGTGTGGCAC
+CTTTCCCCTCTCTCTCTTGCTCTTGCTTTTGCCATGTGACATGCAAGCGCCTGCTTGGCCTTCTGTCATGATTGCAAGTT
+TCCAGAGGCCTCCCCAGAAGCAGATGCCAGTGTTATGCTTCCGCAAGCCAATGAAGCCTCTTTTTTAATAAATTACCCAG
+TCTCGGGTATTTCTTTATAGCAACACAAGAACAGACTAATACACCTGCCTTTCTCTTCCTCTCTCATCTTACTTCCTCTT
+CTCCCTCTTGCTCAATATATTCCAGCCACTCTGCCTTGAATTTGCCAATCTTGTTTCCCTTTCAGAGACTTTGGTCTTGC
+TGCTCCCTCTTTCCAGAATACTTTCTTCCCAGATCTTGACAAGGTTTTCTTCCACACTTCACTCAGGTTGCCCATTCAAA
+GATATTTCCTGACTCTACCCGCCATCTAAAATAACATCTTACTCCCACTCTTTTCCATACTTTGTTTTATTTTCTTCTCA
+GCTCTTATCACTTATTGAAATTATAATGTATCATGTTATAATGATTCTATCTTATATAATTAACATTTAATTTAATATAA
+TTTAATGTAAATTACATTAGCATAGAGAGTAGCAAATAGTAGAGGCTCAGCTGGCTTTGTGGAATGAATAATTTTACTCT
+AATTCTAATAATAGCCATCTGTTTAGCTTTGAGGTATTTATGACAAAAACCCACCTTAATCACAAGCCCTGCTTTTAACA
+TCTTTTGTTCGAATATATGGACAGTTTTTTTATCCAAGTGGATTTTAAGAAACTTGAAGATCCTACAAACAGACATCCCT
+TCTTTTGTGAGTTCTCACAGACTGCCATTAGCTAGTAGAATTGTAATTATATTTTCAGAAGGAACTGTATACTCTACAGA
+TTGAGAAAGGTTTTTCTTGCATTCTTAAAAGATTCAGTTAGGAAATGGCAGAATTGGTTTCTAAGACAACCTGTAATGGG
+TAGCATCTAAAACTTGGTTCACCAGGTGGAATCACAGGCAAGTGCTCAGAAATCGTGACATTAGACCCAGAGTTTAAACC
+AGAATGATGTGTTCTACTGATGAGGTCTTTCTGCTTTTTATCAAACTCATCCTATTTGGCCATTTGATTCTAGAAATATG
+GTCCAGGCAAAGGTCACAGTAAGCCGTACAAAGTGTTCCCAGTTCATTCAGAAAGAAGCTCATGGAAACTATCTCAGAGT
+CTTTAAGTGCCTATGCCCCAGGTTAGGAGATCAAGGACAAGAAATCTGGGTGTAACTCCTGGTGAACAGAATAAGGCTCA
+GAAGAGGAAAGGTCAAGAATCTCCCTCATATACATGTTTACTCAATGGTAACAAGAATGCACCAGATTTTTCACAACAGT
+ACTATTTGTAATAGCCAAAAATGGTGAGAATCCCCATGTTCATCAACAGTAGAATGGATAAATTATGTTGGAATTCATGC
+ATAGATTAGTGTACAAAAAATGAGAATGGATGGTTTACAACTGCATTGAATGATACAGATACATATTGTTGAGAGAAAGA
+AGACAGTCAACCAAACACAAAAGAGTACATACTAAATAATTCAGTCTAATGTTCAAAAAAACAAACAGACTTAATTCATG
+GTGTGAAAAATCAGGATTGTGGTTACCCTTAGGGAGTTAGTGACTAGAAAAGAACATGAAAGAGGCATCTGGGGTACTGG
+TAATGATCTAGTTCTATATTCGGGTCCTGGTTACACAGATGTGTTAACTACCAAAATTCATTCAGTTGTGCACTTATAAT
+ATGTGCATTTTTCTGTATTTATTTCATATTTCAATAAAAAGTTTACTAAAAAAATTTCCTCATGTGTTTTAGAAATTTTA
+AGCCAAGAAAATTAAGAGAGCCCTCTTTTGCAAGCACTCACAGATTGATATACCAACTTTCAAACTCATAAGAAGCATGT
+GTTGAAAAGTCGTTGACTCTTTAGCTGAACCTCACAAAAAGAACTCATTTGATTCTGAAGATGTCACACACCCTGAGTTT
+GTGAAAAGAGAAGGTTTGGCTTTCAGGATCCCAGAGCATATATTTTCCAAAATCTCACACTTTCCCTCTTGGTCATGATC
+ATTCACCAAAACACACACACACACACACACACACACACACGCACACACACACGTGTGTGTGTATGTATGTGTGTGTGTGC
+ATAACATACATATACATACATATATGTATATATATATAATTATACAATATATAATTTAAGCAATTACTGTCTTGTGTGCT
+GTATTCCAAGCTTTTCCTACATTATTAATAAAAAATTGTCCTCCATCTTCTAGAAATTTTCAGTCTACCTGAATGTTTAT
+CATATGTATAATTGAACATTTCCCTAAAGCATAAAGACTCAATCAATGGGCTTCTACTGCTCACTGAGATAATCACCTTT
+AAAAAGACAAATGTTTTTTCTGCTAGATCTCTTTATCTTCACTGTGGGTTCCTCTTATTTTATTTCTACATCAATGTTCA
+TATTTAACTTATTATTTTATCTTATTTTTAAATTTCTTTTATGTTGAGCCTTGATGAAAGCCATAGGTTCTCTCATATAA
+TTGTATGTGTATGTATGTATATGTACATAATATATACATATATGTATATGTATGTGTATGTACATAATATATACGTATAT
+GTATGTGTATGTACATAATATATACGTATATGTATGTGTATGTACATAATATATACGTATATGTATGTGTATGTACATAA
+TATATACGTATATGTATGTGTATGTACATAATATATACGTATATGTATGTGTATGTGTATTACATAATATATACATATAT
+GTATATATTATGTATATGTACATAATATATACATATATGTATATGCATGTATATGTATGTTATACACACACTAATTCCCC
+TACATCTCACCTGTATAAGCCATTTTTTTAGTTGCTATAAAGGAATATCTGAGGATGGATAATTTACTTTTTTAAAAAAA
+GAGGTTTAATTGGCTCACAGTTCTGCAGGCTCTGCAGGAGGCATGGTGCTGGCACCTGTTTGGCTTCTGGCGGCAGTTTC
+AGGAAGCTTAAAATCATGGTGGTGGGAAAAAAGTCCTGGTGGAAGGTGAAGGGGGGCTGATGTATCCTGTGGTGAGAATG
+GGTACAAGAGAGTAGGAGAAGGGAGAGATCCCGGACTTCTAAGGAAACACATCTTGTGTGAACTGAGCAAGAACTCACTT
+ATCACCAAGGGGATGGTGCTATACCATGAGGGATCCGCCCCCACGATCCAATCACCTCCCGCCAGTCCCCACTTCCAACA
+TTGAGTATTACATTTCAACGTGAGATTTGGAGGGGACAAACATCCAAACTATATCATGACCCTTCCATCTACCACCTTTT
+CTACCATCACCACCACCAGAACCCTGAACCCACATGCTATAACTCTGAATTTTTTCCCCACCACCACTATACTTTTTGGC
+AATTTTAATTTGATTTGATTGATAGGAAAAGATTCAGAGATGACAAACTCGGAAGGAAATGGCTATAGGCTTCATTCTAC
+CTACAGATAAATTTTGTTTGGTCTTCACAAGGTATTTGTTAAATTTCTGAATGTGAATGTCTTTAGACTGGGCAAATGGT
+CTCCAGTTAACTGTGGTTCCCCAGCACTTTTTGCTATTTAACTCCCAACATACTACACTTAGGTTTCCCAAAATCTCATA
+CCTTCCCTTTCTTATATTTTCTGCCAAGAATATAAGACATTTTGAGTTTTCCATCTTTGTTGTTGTTGTTTTTACTTTTT
+TATTTCAATAGGTTTTTGGGAAACAGGTGGTGTTTGGTTACATGAATAAGTTCTTCAGTGTTGATTTCTGAGATTTTGGT
+GCACCCATTACCCAAGCAGTGTATGCTGCAACCGATGTGTAGTCTTTTATCCCTCACCCCCCGCTCAACCTTCCCCCTCC
+CCAAGTCCCCAAAGTCTATTGTATCATTCTTATGCCTTCACATCCTCATAGCTTAGTTCTCACTTATGAGTGAGAACATA
+CAATGTTTGGTTTTCCATTCCTAAATTAGTTCCCTTGAAGTAATGGTTTGCAATTCCATTCAGGTTGCTACAAATGTCAT
+TATTTCTTTCCTTTTTATGGCTGAGTAGTAGTAGTCCATGGTATATATACATACATACATAGACACACACACACACACAC
+ACACACACAAACACACACATATATATACACATTTTCTTTATCCACTCGATGATTGATGAGCATGTGGGCTGGTTCTATAT
+TTTTGCAATTGTGAATTGTGCTGCCATAAACATGCATGTGCAAGTATCTTTCTTGTATAATGGCTTCTTTTCCTCTGGGT
+AGATACCCAGGAGTTTTCCACCTTTGAATTAATCCTTAGGAATCAAAAAACATTGCCCTTCCCCACTTGCCTACTGAAGG
+TCAAACAAGCCTCTTCTGGGGAGCAAAAGTCAAATGGAGAATATACATGTTTGTGTGACTGTTGTCCCCATGGTTTAGTG
+CTTCCAAGGTACCAGAGTTGATTGGTTCATTGAGGGGTATGTATGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGT
+GACAGAGAGAGAGAGAGAGAGAGAGAGAGAGAAAGCTAGATCTCAACCCTACTAGGGAAAGGATCAGATATGCTTACAAT
+GCTCCACCCCTGTAACTAAAATTGTCTTTGCATAAAAAGTCACAATTTAGAACTCCAAACTACTCATACCACAACATACA
+GAAGATTTACTCACCAAAAATCTCATGATACTTAAGAAAATATTATCTCTAAACATGAGCCAAGTTGGTGATGAAAAAGA
+AAAAAAAAACACAAAAAAACACAAAAAAACAAAAAACTCCCTGCTAAAAATCCAAACACTTTTGAGATTAGCAAGCGCCT
+TTAGAGTAGACCACCAGCATTGCCTTTATGGAAAACTCCTGGAAATGAAAACAACAGTCCCTTCATGAGATAATTAGGTC
+CTCCTTCCTCAACTCAATCTGAAAATATTCATTTAGAGACCAGACAGCCTGGTATCAGAAATTAAACCCCTGATTTCTGT
+GGCGTCTGATGGTTCAGAGTTCATGATTCTTGGTTAGCTGCATCAGTAACTGCTGTATGGACCTGTATAAAACCACTTGC
+CTACTCTAAGCCTTGGTCTTTTCATCTGTAAAAGGTTTGCAAACCAACTGATTCTATTTCTAAAAGTTACTTTTGGTTCT
+AAAATCCCATATTACAAAATGTGATATTCTATTTCTTAACCTGGGTAGTGATTGCACAAGTATTTGCTTTATAATTATTC
+TTTAAACATATATGTTTTATACAGCCTTCTAAATAAGTAATGTTTGACAATGTGTTGAAAGGGAAAAGCATTCTATCAGA
+AGAAGCCAGTCTAGTACCATTCTTGAATTCATAAAAGTAGGTGTGAAAATTAAATGTCAGAGGCAAAAAGCCAAGAGCAG
+ATTGAAGCTTGTAAAACTCCTGAGGCTTCAAACCATTTCAGTGATAAGCTAGAACCAAAGAAAGTAGAATGAATGGGAAC
+TTGCAGGGCAAGAAGGACTATTATAGGCTCATACAAATCCCCACAATGGTCCTAGGAGGAAATTGAAATCTAAAGGCTGT
+TACTTAATGGCCAAATTCTTAGCGAAATGTGACCATGGACATAGGAGACACTTTCTTCTGGTTTACAATAAGGAGCATCT
+TAACATTCTCAAAATGACTTGATTGTAAAACTTTTTCCTTCATTGCATTTATTTCTTTTGCCAAGCACCGTGCTAGGCCA
+CAGCAATATAGAGAAAACTAAAGTATGACTGCTTTCAAGGAACTTGCAGACTTAGAAGCCTCTATGTCATTTAGAAAAAA
+TTTCAAGCTTATTGCTTCTCCTGAGACCTCTGATCTCTTCTAAAAACATCCTACTCCCCCACTGCTGGACTGGCTCTCTG
+ATGTGCTTCAGAAAGCTCCCGCCTCAGGCCAGGCATGGTGGCTAACACCTGTAATCCCAGCACTTTGGGAGGCCAAGGGG
+GGCAGATCACGAAGTCAGGAGACTGAGACCATCCTGGCCAACACGGTGAAACCCCATGTCTACCAAAAATACAAAAAATT
+AGCTGAGCATGGTGGCATGTGCCTGTAATCCCATACTCAGGAGGCTGAGGCAGGAGAATTGCTTGAACCAGGGAGTTGGA
+GGTTGCAGCCGAGATCACGCCACTGCACTCCAGCCTGGCAACTGAACAAGACTCTATTTCAAAAAGAAAGAAAGAAGGAA
+AGAAAGGAAGAAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGA
+AAGCTCCCACCTTAGGGCCCTTGTACTTGCTATATCCTCTGCCACACAGTCTCTTCTCCCACCTTTACTTCTTTACTTGG
+CCCACTCCTTCTCACCATTCAGACCTCAACTTAAATGTCAGAGATGATTTTCTTGACTTCCCAGTTAAAAAAAATTTCCT
+CCTCCCCCACCTCAGTCATTCTCTACTTCATTTCTCAAAGTAATATTCTTCCAAATACTCCATATTTGAAAGTATCTTTT
+GCCGTAGATGTTTTTTAAAATGTATTTCTAGTCTTTTTTCTCTCAATATGAAGAACTTCTAGAAGACCAAAGGCAATGCT
+TTGTCCACTGCTACATCCCAGGGCTCATATCAGTGCCTGGCTCATAGTAGTTGCACACTAAATATTTTTAAATTTTATTA
+TTATTTATAAGTGAGTGAATAATATGAGGCAAACTCAAAAAAAGTTTTCCAAATGACTTTTAAGGTTCTAGTTAGTCTTC
+AGATTTATGTGGTCCTAAGTAGTGTCAAAAGATAAAAGTTTCAGGGAGCTGGGAGGTAAGAAAGAAGAGATGAATCCTGA
+TAGCATTCGTGCCTTCTTTGATGGTTGGGATCATAGAGGGATTCACAGAAGGTGTCATTAGTGGGAGTAGGAAAGATGCA
+TATAGTTATTCAATATACAGAAAGAGAGGAAGGCATTCCAGGTCAAGTGAACAACATAAGCAAAGGCACAAAGAGATAAG
+AATGCACAAAAAGTTGGTAGAACAGAAGACAAATGGAAGGGTAGCGTAATTTATGCAATGGTCATGAAAAGCTTTGGACT
+TTATTCTAAAGAAGACAAAGATTCGAAAAGTAAACCAGCCTCACTCATCTCATAGAACAATTAGACTGACAGCAGTGTGA
+GGAATGGGTGTGTCCATGAACACTTTGAAATTTTGGCCAAAATTGTACATGTATTTGTGAGCGTGTGATATTTGGGCATT
+TTACTGGGGAGAGAGAACCTATGGCTTTCATCAATGCTCAACATAAAAGAAATTTAAAAATAAGATAAAATAATAAGTTA
+AATATGAACATTGATGTAGAAATAAAATAAGAGGAACCCACAGTGAAGATAAAGAGATCTAGCAGAAAAACATTTGTCTT
+TTTAAAGGTGATTATCTCAGTGAGCAGTAGAAGCCCATTGATTGAGTCTTTATGCTTTAGGGAAATGTTCAATTATACAT
+ATGATAAACATTCAGGTAGACTGAAAATTTCTAGAAGATGGACAATTTTTTATTAATAATGTGGGAAAAGCTTGGAATAC
+AGCACACAAGACAGTAATTGCTTAAAGACAGGAAAGATAGGCAAATTCAGGTCCTAATGAGCTAAGAACGGATAGAAAAA
+GAGAAAAGTAGAAACCATACTTAGGTATGTTCTATGTGATTCCCACCTCTGCCTCAATAATCCATTCTTTCGTGGTGGGT
+TTCTCTTTCCTGCTAAGCAGTTCCTCCCACACCCCTATAGATGAACAGTCAATTCTTTGGTCATACTTGAGCCAGATCTC
+TGGTGATATAATTGCCAAAATGGCTTGACATTTAAAATATCAGAGATGACCCAGCTGCCCTAAGTGATAATAATGCTATT
+ATAGTGGGGGTAGAAACAGCCCCAAAGGAGAGAAGAGTGGGATACTAATGATAAAGTTCAGAAAAATAAGAATTAAGATG
+AGATAATGAACAACAGGGAGACTCGATCCAAGTCAAGTCAATGGTCATCTCACCATGGGGACCGATTGGAGAAAAACTGA
+AGTTAAATAAGGACGCTGGCAAGAGAAGACAACAGGCAGTGGAGTCAGATGGCCTCATCCTTGTAGAAGCTATGCACACC
+CATTTGAAGAGGAGGTAGTTGGATTAAAGGAATTGGCTCTAATTCAGTAGGAAAAAGTTGCAGGAGCCAGGAGGCTGAAA
+GACGCCTGCCATCTGGAGAAACTGGGGAAGGAGAATTGGAAGTCAGGCAGATCAGGGGTGGGGAAACGGTAAGGAAGAAA
+GGCTATTTGGCAGGAATACTTTGGGCGGGGTCAGCTTCTCCCACGATGCTCTTAATTCAGGGATGTAACTTGCACACTCT
+TCAGTAGAAGCATCAGCAATAAAAGAAAATAATTTTATTTCCAAATCCTAATAAATCTATATTTTGAACTGATATTTAGG
+AAAACTGTCTTTTAATATGTAAATGTGTTTCACATGCAATCCGTTTGGAAAAACACCAAATCTCCAAAATCTTTAACAGC
+AATTCTACAAAGTATAATATATTTATAATTTTTTGGTTGGGGTAAAATAAATTATACCAGATTTTTCAAGATATAGAATT
+TCTTATAGGAGAAGCAGAATGCACTAGAAGTCATTGAAACACTGGCAGGACTAAGTAGAAAAAGTTAAGGTTAGTAATGG
+TTACAGGTACAAGCTCTTAAATGAGAACACCCAAGATCACATCCTGGCTCCACTAGTTTACTAATTGTGTGATCTCTGGC
+AAGTTCATAAACTCTATAAACTTCAATTTCCTCCACCATACAATGGGATTAGTGCCCATGCAAGGAGTTATTTTTTTTTT
+AACTTTTATTTTAGATTCAGGGAGAACATGGGCAGGTTTGTTACAAAGGTATATTGCGTGACATTGAGGTTTGGAGTAAA
+ATTGAACCAGTCACCCAGGTAGTGAGCATAGTAACTGATAGGTAGTTTTTCAACCCCTGCCCCCTCCCTACATTTCCCCT
+CTTATATTCTCCAGTGTCTGTTGTTCCCATCTTTATACCCACGTGTACCCAAAGTTTAGCTCACACTTACAAGTGAGAAC
+ATGTGGGATTTGGTTTTCCATTTCTGCATTAATTCACTTAGGGCAATGGTCTCCAGCTGCGTCCATGTCACCTCAAAGGC
+CATGATTTTATTCTTTTTTGTGGCAGCATAGTATTCCATCATATATATGTACCACAGTTTCTTTATCCAATCTATCATTG
+ATGGACACCTAGATTGATTCTGTCTTTGCTACTGTGAATAGTGCTGCAATAAACATACAGGTGCACGTTTTTGGTAGAAT
+GATTTATTTTCTTTGGGGCATATACCCAGTAATGGGATTGCTTGGTTAAGTGGTAGTTAAACTGTTAGTTCTTTGAGAGC
+TCTCAAAGAGTTATTATAAGGATTAAAATTAAAAATAATAATGCTTGTAAATTATTTAACACAGTGCCTCATATATTTTG
+GGAGCTAAATAATTATTATCTGTCATTTCCATCATCATTATTATCAGTTAATTTGAGAGCCTTCCAGAATTATTTGCGGA
+CAGTACTGATCAAGATTGTGGTTTTTCATCTGCTCCTGACTTGCTATATAAACTTCACCAAATGATTCAGCTACAAAAGA
+GGAACAGGGTCTATGTCCATGTTGAATCTCAAGAATTTAGATCATAGACATTATTGTTATTATTAACACTCTACTTGCAA
+ATAGGAGCGTTGGTCTCTTGTGTCAACTCTACAACAAACTCACCACAGGGCTAGACAAGCCACACATCTGTTCTAGGTCT
+CTGTTTCTTCATCAGCCAAGATGGATAAATTACACAATCTTCAACGTTCATCTGAGTTCTAACATTCTATGATTCTGCAA
+CTGCTGTTTCAAATGTGGCACTTGTGAGTTATAAACAACATAACTTCCCTTCACCTGCATTGGACTACACCTATGGGGAC
+TGAAAGTGCAGTATGTGGTGGGTATGTGACCTTCCACAGGTAAGAGATGAGTTGCCCTTCTCTGCCTGCATCCTCACAAG
+AGCCATTTTGTTCTGCTCCTGTCTGACTGGCTCTCCAGTAATTTTCTGCAGCAAGCTCTATCCTTATCCCAGCCAGTGAG
+AGGAAAATGAATTCACCCAGCAGTCCTATGCATCCCTTCAGATGTTGCAGGGAGAAAGCACGCCAGGCATTTCACTGTTA
+ATTATCAGCAGCCTGGCTGCTATGGCACCTTGCTGTGCCAATTTGAGAGATGAGATTCTTTACACTGCTTATTAGGCTCA
+GCTAAATAAATATTACCCAGCATTAATGTTGTCAGAGATACAGTCTGAATTCAGCTCCCTTGAGTGGCTGTTGGTACCCA
+AGCTTTTCACCATCAATTTTTCTTCCTGTACCTCCTTCCCTTACCAATCCACCACTAATCTCTTTCTCTCCTCCCTGGAT
+GTTCAGCTGGATTCTGACATCAGAGGACTGAAATAATATCACCGATTTAGCTCCTTTGCTTAAGGTTATCAAATACTTGT
+GCTGCTACTAGATTTAAGGGCCCCATTATTAGTTACTTTGTAGTCTTGTCGTAACAGATTTCAAATTTCTGAAGCATGTA
+TTTTGAGTTAGAGCCCGCCCTCTCCCCCAGCAAAGTGAGTTTTGAAAGACTGAAGCCCTCTGGCTACAGCAATTGGTTTA
+AAGGTGAACCCTAATAAGGCCAATCATGCCCCCAAAAGAATGAATTTCAGGATTTATTTTAGCTATTTAAAAAACAGAAG
+TTTTCTTCTTGGCTGGATGTTGAATCTAGAAAATATAGCCCCAAGGAACCACTGTTAGCTATTTTGCAACCATGAAGGTA
+AATGCTACCCATAATGGAGACGACATATAGTAAGTGGTGGAAAGAATTAGAGAAAAGAAACCCACACCTGTTAAGAGCCT
+AGATCAAGCTTTACCTAAAGCCAAATCAATCTTTGGACATTTCAGTTTGAATCCTTATGGCTCTCATATTATTTAATTCT
+GAGTTGGAAATTCTAATACTTTCAGCTACATAGATGTGTTTGTTACAAGGTCTCCTGGATGTGTCTATTTGGAGCAAGTG
+TAGGTGAAGCCCTGCCCACATCACTTCAGATCTTACCACTATAATGCTCTCCTACCTGAATTCCCACTCCCGAGGGCTAA
+CTGTAGTGATTTTGCTTCAGCACAAGATAGTACAGAAGTGGTGGGTAATAAAACACCACACCGAGGCAGCCCTTAATAAA
+TGATTGGCAGCAGTTGGTGAGTAAATATGCCAGCTTCTTTGCTCATTTTGTGTAATGCTGAGACACATGTTCTATACAAG
+CTCTCAGAGTTCCTCAGGGGGATTGACCCTAGTTTGTTTGATAACAACCCTTCATTGGCTGCCTTCTTTTCTCTGTCCCA
+TTTCTCTATTGGTTTGTTGGTATCACCTTCTGAGTCACTTGTACTTGGGTCCACAATGAATTTATACAAAGTAACATCTG
+CTTATACCTTTTAAAATGCACCCTACAGGGAGTCACTTCCAAGATGGTTGAATAGGAAGAGCTCCAGTCTGCAGCTCCCA
+GCGAGATCGACACAGAAGATGGGTGATTTCTGCATTTCCAACTGAGGTACTTGGTTCATCTCAATGGGACTGGTTGGACA
+GTGGATGCAGCCCACAGAGGGTGCGCTGAGCAGGGTGGGGCATCGCCTCACCTGGGAAGCACAAGGGCCAGGAGATTTTC
+CCTTTCCTAGCTAAGGGAAGCCATGACAGGCTGTAGTTGGAGAAATGGTACACTCCTGACCAAATACTGTGCTTTTTCCA
+CAGTCTTAGCAACTGGCAGACCAGGAGATACCCTCCCGTGCCTGGCTCGGCAGGTCCCATGCCAACAGAGCCTTGCTCAC
+TGCTCATGCCGAAGTCCAAGATTGACCTGCAATGCTGCAGCTTGATGGGGGGAGTGGCATCTGCCATTGCAGAGGCTTGA
+GTAGCTCACAGTGTAAACAAAGAGCCCAGGAAGCATGAACTGAGCAGAGCCCACCAAAGCTCAGCAAGGCCTACTGCCTC
+TACAGATTCCATCTCTGGGGGCAGGACATAGTAGAACAAAAAGCAGCAGACAGCTTCTGCAGACTTAAACATCCCTGTCT
+GACAGCTCTGAAGAGAAGAGTGGTTCTCTCAACATGGCGTTCGAGCTCCAAGAACAGACAGACTGCCTCCTCAAGTGTGT
+CCCTGACCCCCGTGTAGCCTGACTGGGAACACCTCCCAGTAGGGGCCAACAGACACCTCAAAAAGGCAGGTACCCCTCTG
+AGATGAAGCTTTCAGACGAAGGATCAGGCAGTACTATTTGCTGTTCTGCAGCCTCCGCTGGTGATACTCAGGCAAACAGG
+GTCTGGAGTGGACCTCCAGCAAACTCCAACAGACCTGCAGCTGAGGGGTCTGACTGTCAGAAGGAAAACTAACAAACAGA
+AAGGAATAGCATCAACATCAACAAAAAGGACATCCACACCAAAACTCCATCTGTAGGTCACCAACATCAAAGACCAAAGG
+TAGATAAAATCACAAAGATGGGGAGACACCAGAACAGAAAAGCTGAAAATTCCAAAACACAGAGTGCCTCTTCTCCAAAG
+GATCGCAGCTCCTCGCCATCAAGGGAACAAAACTGGATGGAGAATGAGTTTGACATGTTGATGGAAGCAGGCTTCAGAAG
+GTCGGTAATAACAAACTTCTCCAAGCTAAAGGAGCATGTTCTAACCCACCACAAAGAAGCTAAAAACTTTGAAAAAAGTT
+AGACAAATGGCTAACTGAAATAAACAGTGTAGAGAAGACCTTAAATGACCTGATGGAGCTGAAAACCATGGCACAAGAAC
+TTTGTAATGCATGCACAAGCTTCAATAGCCAATTCAATCAAATAGAAGAAAGGATATCATAATTGAAGATCAAATTAATG
+AAATAAAGCAAGTAGACAACATTAGAGAAAAAAGAGTGAAAAGAAACAAAAAAAGCCTCCAAGAAATATGGGACTATGTG
+AAAAGACCAAATATACGTTTGACTGGTGTACCGGAAAGTGATGGGGAGAATGAAACCAAGCTAGAAAACCCTCTTCAGGA
+TATTATCCAGGAGAACTTCCCCCTAACCTAGCAAGGCAGGCCAACATTCACATTCAGGAAATACAGAGAACACCACAAAG
+ATACTCCTAGAGAAGAGCAACCTCAAGACACATAATTGTCAGATTCTCCAAGGTTGAAATAAAGGAAAAAATGTTAAGGG
+TGGCCAGAGAGAAAGGTCAGGTTACCCACAAAGGGAAGCCCATCAGACTAACAGCGGATCTCTCGGCAGAAACCCTACAA
+GCCAGAATAGAGTACGGGCCAATATTCAACATTCTTAAAGAAAATAATTTTCAACCCAGAATCCCATATCCAGCCAAACT
+AAGCTTCATAAGTGAAAGAGAAATAAAATCCTTCACAGGCAAGCAAATGTTGAGAGATTTTGTCACACCAGGCCTGCCTT
+AAAAGAGCTCCTAAAGGAAGCACTAAACATGGAATGGAACAACAGGTACCAGCCACTGCAAAAACACGCCAAATGGTAAA
+GACCATTGATGCTAGGAAGAAACTGCATCAATTAACTAGCAAAATAACCAGCTAATGTAACATTATAATGACAGGATCAA
+ATTCAAACATAACAATATTAACCTTAAATGTAAATGGGCTAAATGCCCCATTTAAAAGACACAGACTGGTAAACTGGATA
+AAGAGTCAAGACATCAGTGTGCTGTATTCAGGAGACCCATCTCACGTGCAAAGACACATATAGTCTCAAAATAAAGGGAT
+GGAGGAAGATCTACCAAGCAAAAGCAAAGCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGTAGGGGTTGCTCTGATACA
+ACAGACTTTAAACCAACAAAGATCAAAAGAGACAAAGAAGGCCACTACATAATGGTAAAGGAATCAATTCACAAAGAAGA
+GTTAACTATCCTAAATATATATGCACCCAATACAGGAGCACCCAGATTCATAAAGCAAGTCCTTAGAGACCTACAAAGAG
+ACTTAGACTCCCACACAATAATAATGGGAGACTTTAACACCCCACTGTCAATATTAGACAGATCAATGAGACAGAAGGTT
+AATAAGGATATCCAGGACTTGAACTCAGCTCTGGACCAAGCAGACCTAATAGACATCTACAGAACTCTCCACCCCAAATC
+AACAGAATATACATTCTTCTCAGCACCACATCGCACTCATTCTAAAACTGACCACATAGTTGGCAGTAAAACACTCCTCA
+GCAAATGTAAAAGAACAGAAATCACAACAAACTGTCTCTCAGACCACAGTGCAATCAAATTAGAACTCAGGATTAATAAA
+CTCACTCAAAACCACACAACTACACGGAAACTGAACAACCTACTCCTGAATGACTACTGGGTAAATAACGAAATGAAGGC
+AGAAATAAAGATGTTCTTTGAAACCAATGAGAACAAAGACGCAACATAGCAGAATCTCTGGGACACGTTTAAATCAGTGT
+GTACGGGGAAATTTATATCACTAAATGCCCATAAGAGGAAGCAGGGAAGATCTAAAATTGACACCCTAACATCACAATTA
+AAAGAACTAGAGAAGCAAGAGCAAACAAATTCAAAAGCTAGCAGAAGGCAAGAAATGACTAAGATCAGAGCAGAACTGAA
+GGAGATAGAGACACAAAAAACCCTTCAAAAAAATCAATGAATCCAGGAGCTGGTTTTTTGAAAACAAAATAGATAGAGCA
+CTGGCCAGACTAATAAAGAAGAAAAGAGAGAAGAATCAAATAGACACAATAAAAAATGATAAAGGGGATATCATCACCGA
+TCCCACAGAAATACAAACTACCATCAGAGAATGCTATAAACACCTCTATGCAAATAAACTAGAAAATCTAGAAGAAATGG
+ATAAATTCCTGGACACATACACCCGCCCAAGACTGAACCAGGAAGAAGTTGAATCTCTGAAAAGACCAATAGCAGGTTCT
+GAAATTGAGGCAATAATTAATAGCCTACCAACCAAAAAAAGGCCAGGACCAGACGGATTCACAGCCGAATTCTACCAGAG
+GTACAAAGAGGAGCTGGTACCATTCCTTCTGAAACTATTTCAATCAGTAGAAAAAGAGAGAATCCTCCCTAACTCATTTT
+ATGAGGTTAGCATCATCCTGATACCAAAGCCTGGTAGAGACACCACAAAAAAAGAGAATTTTAGGCCAATATCCCTGATG
+AACATTGATGCAAAAATCCTCAATACAATACTGGCAAACAGAATCCAGCACATCAAAAAGTTTATCCACCATGATCAAAT
+CAGCTTCATCCCTGAGATGCAAGGCTGGTTCAACATACACAAATCAATAAACGTAACCCATCACATAAACAGAACCAACA
+ACAAAAACCACATGATTATCTCAATAGATGCAGAAAAGGCCCTTGACAAAATTCAACAGCGCTTCATGGTAAAAACTCTC
+AATAAACTAGATATTGATGGAATATATCTCAAAATAATAAGAGCTATTTATGACAAACCCACAGCCAATATCATACTGAA
+TGGGCAAAAACTGGAAGCATTCCCTTTGAAAACCAGCACAAGACAAGGATGCCCTCTCTCACTACTCCTATTCAACATAG
+TATTGGAAGTTCTCACTAGGGCAATCAGGCAAGAGAAAGAAATAAAGGGTATTCAATTAGGAAAACAGGAAGTCAAATTG
+TTTCTGTTTGCAGATGACATGATTGTATATTTAGAAAACCCTGGCTGGGCACGGTGGCTCATGCCTGTAATCCCAGAACT
+TTGGGAGGCCAAGGCAGGTGGATCACGAGGTCAGGAGATTGAGACCATCCTGGCTAACACGGTGAAACCCCGTCTCTACT
+AAAAATACAAAAAAAATTAGCCAGGCATGGTGGCAGGTGCCTGTAGTCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATG
+GCGTGAACCCGGGAGGCGGAGCTTGCAGTGAGCGGAGATCGCACCACTGCATTCCAGCCTGGGTGACAGAGCGAGACTCC
+TTCTCAAAAAAAAAAAAAAAAAAAAAAGGGAAAAGAAAAGAAAACAAAACTCCATCGTCTCAGCCCAAAATCTCCTTAAG
+CTGATAAGCAACTTCAGTAAAGTCTCAGGATACAAAATCAATGTGCAAAAATCACAAGCATTCCTATACACCAATAACAG
+ACAAACAGAGAGCCAAATCATGAGTGAACTCCCGTTCACAATTACTACAAAGAGAATAAAATACCTAGGAATCCAACTTA
+CAAGGGATGTGAAGGACTTCTTCAAGAAGAACTACAAACCACTGCTCAACGAAATAAAAGAGGACACAAGCAAATGGAAG
+AACACTCCATGCTCATGGATGGGAAGAATCAGTATCGTGAAAATGGCCATACTGCCCAAGGTAATTTATAGATTCAATGC
+TATCCTCATCAAGCTACCACTGACTTTCTCCACAGAAATGGAAAAAAACTACTTTAAGGTTCATATGGAGCCAAAAAAGA
+GCCCACATAGCCAAGACAATCCTAAGCAAAAAGAACAAAGCTGGAGGCATCATGCTACCTGACTTCAAACAATACTACAA
+GGCTACAGTAACCAAAACAGCGTGGTACTGGTACCAAAACAAATATATAGACCAATGGAATAGAACAGAGGCCTCAGAAA
+TAACACCACACATCTAAAACCATCTGATCTTTGACAAATCTGACAAAAACAAGCAATGGGGAAATGATTCCCTATTTAGT
+AAATGGTGCTGGGAAAGCTGGCTAGCCGTACATAGAAAGCTGAAACTGGATCCCTTCCTTACACCATATACAAAAATTAA
+CTCAAGATGGAGTAAAGACTTAAATGTAAGACCTAACACCATAAAAACCCTAGAAGAAAACCTAGGTAATACCATTTAGG
+ACATAGGCATGGGCAAAGACTTCATGACTAAAACACCAAAAGCAATGGCAACAAAAGTCAAAATAGACAAATGGGATCTA
+ATTAAACTAAATAACTTCTGCACAGCAAAAGAAACTATCATCAGGGTGAACAGGCAACCTACAGAATGGGAGAAAATCTT
+TGCAATCTACCCATCTGACAAAGGGCTAATATCCACAATCTACAAAGAACTTAAACAAATTTACCAGAAAAAAATAAACA
+ACCTCATCAAAAAGTGGGCAAAGGATATGAACAGACACTTCTCAAAAGAAGACATCTATGCAGCCAACAGACAGGAAAAA
+ATGCTCATCATCACTGGTCATCAGAGAAATGCAAATCAAAACCATAATGAGATACATCTCATGCCAGTTAGAATGGCGAT
+CATTAAAAAGTCAGGAAACAACAGATGCTGGAGAGGATGTGGAGAAATAGGAACACTTTTACACTGTTGGTGGGAGTGTA
+AATTAGTTCTACCATTGTGGAAGACAGTGTGGCGATTCCTCAAGGATCTAGAACTAGAAATGCCATTTGACCCAGCCATC
+CCATTACTGGGTATATACCCAAAAGATTATAAATCATGCTACTATAAAGACATATGCACACCTATGTTTATTGAGGCACT
+ATTCTCAATAGCAAAGACTTGGAACCAACCCAAATGTCCATCAATGATAGATTGGATTAAGAAAATGTGGCACATATCCA
+CCATGGCATACTATGGAACCATAAAAATGGATGAGTTCGTGTCCTTTGGAGGGACATGGATGAAGATGGAAACCATCATT
+CTCAGCAAACTATCACAAGGACAGAAAACCAAACACCGCATGTTCTCACTCATAGGTGGGAGTTGAACAACGAGAACACA
+TAGACACAGGGCGGGGAACATCACAAACTGGAGCCTGTCGGGGGGTGGGGGGCTGGGGGAGAGATAGCATTAGGAGAAAT
+ACCTAATGTAAATGACGAGTTGATGGGTGCAGCAAACCAACATGGCACATGTATACCTATGTAACAAACGTGCATGTTGT
+GCACATGTACCCTAGAACTTAAAGTATAATAAAAAATAAATTAATTTTAAAAAATGCACCCTACAAGTTGTGGCTTACAT
+GATTTCCACCGTTCCACCCCCAAATAGACTGATGCATACCCAAACCATTTGACATAAACTCAGCCTAGAAAAAAGCTATG
+AATTCAAATCCAAGTAAGCCAAAGTGTTCTTTCCTGTCAGTGCCAGCCAAGCAGAGGGTTATGGACTAGACGGAAGATTT
+TAGGAGAGAAAGGGAAGCAAAAGCATGAACAAGCCAAGGTATGGAGGATGCTCATTCATCCTCAGGCTCCAGAAGTGGCA
+GCGAGCTGTCTGGATACTCCACTGAAAGTCACAGTCAATGAGGTTATTAATCTGGTGGTCCTAAAAGATGAGCAGCACAG
+ACACCTTGAATCTCTGGATACACTTCCCCCTCTGCATAGCCTCTCTCTCAGTTCCTAAACCCTCCATGCTAAGAAAAACA
+TCACCAGGCATGGGGCTTTGCCAGAAACGTTCACTCGAAACAGTACCTGGCAAATGAAAAATGCACTTTTAAATTATGTA
+CTTTCCGACTAGTCTTCCTAATGGTATGTTGACTATTATTTACATGAAAAATGTCAAAATGTGGATTCCTTTGTTTTATT
+TTTATTTGGGGCATTTTAAACAAAACATTCACTTTAATACTCATGGAGAAAGGTCTACTTCTCTTTCATCTTCCTCTCTG
+TCTTCCAACCAAAACTCCTTGCCAAAGAACACGGATGGCCAACAGGACTTGCACTGACCACATCGCACTCCTACTAGAAA
+ATTACGGATGGAGGAACCAGCAATTTCTAGATGAAATGGCCCTATTTCCTCATTTTACCACTAAGGATAAAGAAGGATCA
+TGCCTGTAATCCCAGCACTTTGGGAGGCTGAGGCAGGCGGATCACCTGAGGTTGGGTGTTTGAGACCAGCCTGACCAACA
+TGGAGAAAGCCTGTCTCTACTAAAAATACAAAAAATTAGCCAGGCATGATGGCACATGCCTGTAATCCTAGCTGAGGCTG
+AGGTAGGAGAAGCACTCGATCCCGGGAGGTGGAGATTGCAGTGAGCCATGATCACACCATTGCACTCCAGCATGAACAAC
+AAGAGCGAAACTCCGTCTCAAAAAAAAAAAGAGGGCAAAAAGTCTTCGATGACAAGCCTGGACAGGACTTTAACAGTATA
+TGCTCCAATTTATCCCTGTCACAGATGAGGAAACTGAGGCACAAATCAGTGAAATGCTTTGTTCAAGACTTTACAGCTAG
+TTAGTGACAAAGTCGGGGCCAGAACTAAGGAATCTTGATTCCATGCCCTCTTCATACTATCCTCGTCAATTCTGGTTTAA
+GTTTTTCACTTGATTTGTATGTTTGCTTTATCTTAAGCTCATATTTCAACCCTATAACCTATCTTGCATATGATGTGGGT
+TGTTTATTCTTAGAAAAATCTAAGATTATTCCACTGTATTAGTGGCCTTGCCTAAATAGATGCATTGGGGAGAAATCCTC
+CCACATAATGAATTTTGTCACTGAGACATTGCAAGAAGTAGCCCATAGCCTTTTCTTATCCTTTTAGAAACAGCCTGAAA
+AGTTCGATTATCCTCCAGATTCAACCTTAGTCCATTTGGTTTCATGCTCATTCCTAATCCCAGACCAATTCCTGGCCCTG
+AGCCCAGGTACTGGTATGGCCTGGAAAATGCCAGAAGCCATATGGACTCCCCAGGCTACCTCCTGGATGAGGATGGATGG
+CACACACCGAGGAACCAACTCTTTCCAAGAGATAGAACAGATGGCATTCCAGATATGAATATGTATAATGTAGACTGCTA
+AGTTGTTAAATAAGTATGTATTTTTAGGTGGATTCATGAAAATCAAGAATTTTATAACTAAAATGAAAGCCAAGCAAATG
+AAACCACAAACACCATTGAAAGCTGTGCATTACTCCATGTTTGTTGGTATTGGCCTCCATAGCATGCTCCAACCTCTAGA
+GAAACCACTCGAGAAGAACCAGGAAGAAATAATTCAAAACTGCGGCAAAGGATGTTAACTTCAAGCTTCCATGCTCTAAG
+ACCCTAGGACGTCCATGGATGGCAGAATTAGTATTCAGAAGAGCAAGGAGTGGGCTGGATTTTTCCTTGGCTTTACCACC
+TTTCTCTATGTCCCTTTATTGAAAATTCCACTCTATCACACCCCCAACTCAAATTCCTTATGCTTTAATTTGGCTGCCCC
+AAGAACAAGCCTAAGAGGCATAAGAATAATGCAAGTCATTGAATCAGTAGAATCAGTCAAGAAGATCAGCTTCCACAAGC
+TTCCAGTACAAATAGTACCCTTCAGAAGACTTTAAGCTCTTTGCTCCGTAATAATACTACATTACAGGTATACTTATTTC
+CAGTTTACAGAAAATTTTAACAATCCCATGTGAAAAAATAGAACATGAATTGTTACCCCCATTTTTACAGAGAAGAAAAT
+GAAGTCTTGGAGAGGTTAAACAACCACTCAAGAATGCACCAATGGTAAATGTGCCACTGGGACTTGAAGCTAAGACTCTG
+ATTCTGTCCAGTAATGCATCTGATTTGGCATTCACCAATGAACCTACAGCCAAACGGGCCATCACTGGAGAAGTCCAACT
+GCTACTTGAAAAGCCTAATCCATGGATTAACAGATTCCAATGAGTTAGAACATCAAAGGGTAGAATAGGATATATTTCCA
+CAATCTGGGAACATTCCAAATAGAGAAACTGGATTCTTATTTATCCATTATTGATGCAAGTCCTTAACTCCTTCCTGTAT
+CCATGGGACCTTTTGCAATCCTTCACACTAGAGACAGAATACACTCTCCTGCTCCATTAATGATAGGCAATGGAAAGGTG
+ATCTGCTTCATCCAATGAAATGTTAGCAAGTGTGATGCAAGCAAAGACTAGAAATTGACTTACTTGGTTGTGCTTGTCCT
+CTTGTGCTCCTATTATCTGCCCACAGTAGCTCACTGAACCAAGGGAAATGAGAAACATGTGCATAGATCTAAACTTAACT
+CACTGCTTTGAGACAAGCACATCTGAGCCGAGCCTAGATCATCCAGCATGCAGCCAACTGCTGACACATGAGCAAGAAAG
+AACATATATTGTTATTTGAAGCTATTGAGTTTGGGGCTGACTTGTTTTACAGCAATAGCTGACTAATACACTTGCTATAT
+TTCTTTCATCTTCCTACTTCCACTTCAATGCAAATGGACTCTGCCCCTTGTTATTATTACTGTCTGTTTCTTTCCAGGCA
+GCAAATCACCACCTCTTTTGATGTCCTAAATTATCAGCAGTCTCAATATCAAGATCTACATAGTCATTAATTTACAAAAA
+GCTATTATTAGGAAGTGAGATGGTAGTAAAAATAAGGTAGGTGAAGTTATACCCACAAAATAGAATATCGGGGTGCCAGA
+TCAATATTTAATAAATGGTTGAAAACTTCAAACATTTAAAGATTATTTTGTGGTTAACAAGTTAGAGAAAACACATTGCC
+ATTAGACGACATTTCTTATGTGCTTCTTTACATAGTGACCTGGTGCTTGATGTGCTGTGAAGACAGGATGCTCATCGCCT
+TATAGTTTATCGATAAACACAGGCAATACAGAGGCAGGCAGCTAGTAGAAAATATAGATAGGTGTTGGATAAATTGATAA
+TAAAAGGGTAAGATTAAGTGTTTTGTTTGGTCTTATTTTACCATTTCAGATTTGCTTGATTATTAATTTCCAAAGCCCCA
+GAAAACCCAATTCTCCTGCATTCCTAGAAATCGTGGCAGGTTTCCATCTATAGAAAATTACATGCTTTTCTAGGTGTTTC
+AGAGGTTTAAGTTGCTTAGTTCTTTAAGAAACTGCTCCAAGCTGGGGTGAGGGGAGAAGAGTGTACCAGTATCAACCCAC
+TTACACAAAGAAGGACTTGACTATTCATCAGGTATCTAGGGACAATTTCCTGAAGATGATGGAAAGAAGACAGTGCCTAA
+CACTTGGATGGTAAGCACTTCAACAGTATTTACTAAATTAAAGAAGGGAGGAAAATAGGAACTAGTCCTGCCTTACCCAT
+CTTTCCATTCCTTCCGCTTTGTGTTTTCATCTCAGTTTTGTCAGATCCATTTCTCTGGCCTCTCCAACTCTCAAAGTTCC
+AGCCCTGTGATTCAGCCATCGCAACTCTCCCAAAGTGTTCATTGATCACATTTCCTCCCTAGTCATTGGCACAGTGCTTC
+TTTCATATTTGATTCTTGACACCTGCAGGTTTTCCATTTTGGCTCTCAACTTTTTTTATTATACCCCAGTGGGCTTTGTT
+TTGTACATATCCCCCACCCTGAGTGTAGCCTCAGAAAGGTGGACTTTGGTTCTCAATTATAAGAATATATATATTCTTTC
+TAATAGCATCATGCTGGGCACTGTAGTATCCAATCCTATAGTGCTTACAAGGCACTTTATTCCACACTAACTCATCTGAC
+CCTCACAATGGTGTTCTCAGTTGGATAGGATGACCCTAACTGTTCAAATGGGGACTCAGAGTACAAAGGAGGTCACAGAG
+TTGGTATATAGAGAAACCAAGACACAAACCCAGTTTTCCTGTCTCTAACTCTGGGAATTATTTCTTTGCATCATGCTATT
+TTAAAAAGTACTAGCACTGCTGAAATTCTTGGACTTTCTTTATAATACACAACCTTAGAAGAGAAAGTAAATCATTGCTA
+AGGAAAGGAACTTTTCTTAGCTCAAAAAACAGCCTCAAGAGGACTGAACCTCTTAGAATGCTCGTCTTTGCCCAGATGAT
+CACCAGCCACACTCCAACATATTGATACAGATTTTATGATTTTATAATTTTGGGTTCAGTTGGCAGGACTCACTCTTCCA
+AAAATAGCTTTGGCCTTAATGGGTAGGATGAGCTGCATTACAAAGGCCCTCAACTCCATAAAATTTGGCATCCAGCCAAG
+CCTTCAAAGGTGACTAAAGAAGAAATATCAAGAAAGACCAGAGGTTGGTACTGCCTTGTGAACCTCTCAACATCTATTGG
+AACTTATCAATTTTATAGATATTCACATTCCAAAGCAAGGAGCATTTGAGCTTTTAGAGAGATTAGAAGACCCTAAAAAG
+TGCATCTCGCTCAAAAGGTGTCCAGCTTGTCCAGTGGTCATTGTCCAATCTGATTCCATTTCAAAATCATTTCCATATAA
+CCAGCTTGATGGTGAGATTCTTGAAAAGTTCTAGACCAGGAGGAGGAAATGAGGGTTTGGTTCCTGGCTGTGCCACTAAC
+CGACTGCATGAAGTTAAGCAAATTATGGGGCCTATTCCAACCTCGTTCTTTTTACAGGTCTGACACCTACAACGTGTCAG
+ACACTGGGAATAGAACAGTCCACAAGACAAAGTCCTTGCTCTAAGGGAACTTACCTTCTAGGGAAGATATATGATAAACC
+AATACATAAATATATAATAAGGACATGAAAGCAATAGAGAACAAAAGTGCTGTGAAAAAGATGCTGTTTTGGATAGGATG
+GTCTTTTGAGCAGTGAAATAAAATAAATATCTGAGGGAATAGAGTTCCAGGTGGAGGGGACAGCAGGAAGGCCATGAAGT
+TGGGGTGCACCTGGCATCTTCAAGGAACAGGGAGAAGGCCAGTACTTCTGGAGCAGAGGGAACAGCCGGGAGAATGGAAG
+GCAAGGAGGTGAGCGATAACAGGGGCCCAATCCTGGAGGGTCTTTTAGCTTTGGTAATGACTTTGGATGGGAAGGCAGTG
+GGGGAATCTGAGCAGATGAGGGTATGAACTGACTTAGGTTCCCTGAATGTGACCTTGGCTGCTGTATGAAGGACGGGCAG
+CCATTTCCTCATCTGATAGCAAACTGGTATGGACCAGAGAAACGAGAAGCTTCCCACTGCCACTCTAAGAGCTGAGATTC
+CGAGTCATATGTTTTACTGAACAAATTACTGCCCATCTTGGTTGAAGCCCATCGGGGCCTGAGCTAAGTCTTTGATCCCA
+GAGGCCAAGGCAGCACCATTCCTCGCCATTAGCCCATGGCTAGTGACCCGGGAGGTACAAAGTCAATCCTAGCTACAGCA
+GGAACTCCTGCAGAACCTGTGACCCTCATAGCCAGGGGGTCTCAAACCAGCGGGATACATCTCAGAGTGGCTTTGCTGCG
+ACGTACCTTTGGGTAGTTTGAGACTGAAATGACTCTAGTCACTACTTTTCATTTCCATCCTTTCTTCCCTAAAGGTCAAC
+TGGAAAGGCAGCCGGCTGAGAGAACTCGATCTCTGTCCAGCAGAGATTATTTTGTGGACGCTCAAGGTTCTGAGAGCTTA
+CTTCCTTTTTCTGGGTCACCTGCTTGGTGTGAATTCATCCAAAATCATCCGTTTATTTTTAGAGCTATAAATTATATCTG
+CATTTCTACTCTCCTGCAGAAATTCAAGCTCAGAGCTTGGGCAGGGGCCCTGTGTTTGAGGCACAGAGACTGGCAAAGCG
+TCTTGCCTCCCACCTCTTCTGAAGTGAGGCCTCTGCGCTACTGTGTCCCAGCCCTTCAAAAATCCTGAGTCATTTCACTG
+TTTCTGTTTCTCCCCTTTCCCCAGCAAGCTTCCTGATGCAAAGGAACACCGGAGCGGGGCGATTGCCTCCCTAGAGCTGG
+GCCGGCCACCCCCACCCCTCGGGCTGCTAAAGCAGCGGGCCGGGCGGAAGGTCGGGGAGCCAGAGCCCAGGGAAGAGCAG
+AAGGCGCGGGGGGGCGGGGGGGGCACCCAGGAGGGAGGACGGGGCAGCGGTGAGGGTGTAGACGGGAGGGAATCTGTCCG
+GTGCCGCAGTGAGTTGGGTGGCGGTGATTTGTTCGCGGGCGCAGGCTGGGAGGCGGGGTTCCTGACTCACTGCGGCACTG
+GCAGGGCTGGTGACAGCACCACCCCCGCGCCGCCTGCAATCTGCCACCGCTGCCACCGAGCCGAGGGGCGCCAGCAAGGC
+ATCCCCTGGGTCAAGGGGCCTCCCGTGGGCTTTCCAAACAGCCTGTGCTCATTTCCGCGGGAGCCCTCGGGGCGCTGTGC
+TGTGGCTGCATTGTCACTGTTTGTTTATTTCTACATCTGCTCTGTTAGAAGATAGGCTCGCTTAGGACAGAGCCTTCTAA
+TCCACCCCCTGATGAATTCGGGCCCTCGCCTTTGGCACAATGCCTGGCGTCATCACAACGCGGGTGATCAGTAAACGGCA
+GCGAAGGAAGCGGGGTCAGGACTGGAGAGGAGAGGGGGAGGAAGACAGACGACCGGAGAGGAACAAAAATGGAAGGTAAC
+TTGAGCTTTCATAACTGCAGAAGTGCAAAACAGCTCCCTGTGTGATATAGGAAGTAAAACCGTAGAGATGGGACTTGGCT
+GGCCTGCCCTTCACACACTTTAGACATTAACCCAAAGTTCATGCTCCTGCCATGACAGTGATGGTGGACAGGAGGGAAGA
+AAATGACAAGGAGGAAAGGAGGTGTCTTTTGCTCTTAAAAATGAGAAGGGTCATAAACAATCCATCGGCTGCAAACCACG
+AGTTGCCCCCAAAGCAAAGCAGGTTAGTGGCTGCTGCCGCCTCATCATAGCTTCAGAACCCACAGAGCCTGGAGGACTGT
+CAAACATTATCTAATATCCCATTCTACAGAAGAGAAAACTGAGGTTGAGGCAGGAACGCAGACAGGAATCAAGGTTTTGG
+GGTTGGGATTTGCAATCCATTACTTTGCCTCCTGCATCCACTGCTTTTCCCTGGGCATTCTCTTCTTCCGGTTTCTTGGA
+AGTTAAAATTGCGAGAGAAGCAACACAGAGTGGTTTTCCCAGGGCCTCGACCTCGGTAAGGGAAGACGTGGAATTCCGAG
+TGTGGAGACCCAGACAGCCTCCGAAACCCCCACGCAACCCTGGGCGTTCAGCAGCTGCCAATGCCCGCCTGACAAGTTTC
+TGGGGCATCTCTGCACACGGAGAGGTGTGTCTCCTGCAGCTCCAAAAACAGATTTTGCCATCTCATCCCCATATTTGCAG
+TCTCCAGTTGGCTTCTTGTGATTGGCCAGAACTTTTCCTCACTGGAGAGATGAGGAGAATCCAGTCCTCCAACCAGCTGA
+GAGAAAACACTGCAGTAGCAGAGATAGCAATGGCTGTTCCACCCCACACTGATACGATGTCAGGGAGCCAGGGGTGGGCT
+CAACAGGCTGAATGATGCCAGGGACTCCCAAGGGTCTATACATTTAACCCTGGAGGAAGTGGCCTCTGACCAGGTACCTA
+ACCTTCTGTACTTCTCGTTCTGTAATTGGGGGCATCAGAGTGTATTCTTTGGCTTTCTGGAAAAAAAGATTTCTCAAATT
+TTAATTTCGGAAGTTTCCATCAGGACTGGCAGCTGTGTACACAGAAGGAGGTTGCTAATGTATTTCAGATTGGGGGCAAT
+TACAGCTTCCCACCTACCCACCCCAATCTATTCCCAGATCACTGCACCACACTGACTCTTGGCCTCCTCTAGAGTTGGAA
+ACTCTACCTGAGAGATGCACTCCTTTAAACTAACATAAAACCTAAACTAACAAAACCATAGCACTTCAAGGTATCCTTAG
+GGTAAGGTTTAATGACCTATTAATATCTCTAACACAGCAATTCTCTTGCTTGGCTATTCACTAGAAGCACTTGGAGAATT
+TAAAATAACACTGATGCCCAGGCCATACCTCAGACCATACCTCTAGAGGTGTTTTCTAAAAGCTCCCCCAGGTGACTATA
+ACAACAGTAAGACTGAAAACCTCTGCTCTCGTGGGAGCTGCTAGTTTGCTGGTCAGCATCTGTTCTCCCTTCCACTAATA
+GCACCTGATGTTCCTTTGGAAACCCCACTCTTTCTTGGTCCACTTGATTGAGTTACACTAACCCCATCATCACCTCCCAT
+TTACTTTTTTCAGGGAATAGCAGGCTACCAGTTTGGCCCCTTTGCATTTTTCCATTCCCCTGAAAGAAGTAATTGGTTCA
+GGCAGAAGATGTGATACAAGGGGGTCCAATGAAAATCAGTCTCAAGACTGGCTGAAACTATTGGGAAGGGAAAGATCTGT
+TTCTGCTGCAGTTATTTAGGAGAAAACATGGGAGCCTTGAGCTTCCAGCAGGCATTTTGCCACCACATGGAGAAAATCTG
+CTTGTAAATTAAGGCAACATAGAGGAAAAGAGATTATTCATAGAGTGAAAAAGACTGAAATCTAATTAATACCACTTCTG
+AAGCCAGATACTTCCAGTAACAGATATTAAATCCCCCTTTTCACCCAAGAAAACTTTGAACTGAGTTTTCTATGATATGC
+CGCATCATAGCAGAGTTAATCTGTTGCTTTAAAATGCAGAGAATAGAGTTCTAATCTAATATGACAGCAAACAAGCAAGC
+TTCAACATTCCTGGTCAAAGATTCCAGATAGCTAAAGACAAGTGGGTGCTGTCCATCTTCAAGGATAATGGCTTTCAGTG
+GAGAAGGGTGAGGAAACTGAAGACAGCAATGTTCTACTACATGTTTTTTTCAGTTAGTATCAAGAGTTATTGGTAGCTTA
+GCCTTTGCTAAATATGAAAGAGCTTAAGAAGTAAAGAGGTTCCAACCATAGGAACCTCTTTTAACCGTAACTATAAGAGC
+TTTTATAATAGCTCTTATAGGTTTTTATAGTAGCTCTTATAGGTTTTAACCATAACTGTAAGAGCTTTTATAATAGCTAA
+TATAGCAGTGATGTATAGGAAGTACACAGAAGCACACAACAGACCATCAAGACAATGCATTGGATGTTTCAGATTCCCTT
+CTTGGTCACGGGAGATTCCCTGGAAAATGTGCCCTCTGTCTGAAAGTGCAAGTGAAGCCAAAGTAATTGATTGGCTGCAT
+TTTAGATTTGAAAGTTTTTCCATGAACAATTGCCATGTAACAAGCTACCCCTAAATTTGGTAACTTAAAAAATGATGTGT
+TTGACTTGTCTTTGTATAGACTTGGATAGCTTTTCTGGGCTTACCTGGGATCTGCAATTGGCTTCTGGTCAGCTAAGTGG
+CTCTGTGTCTGAACTTGGCTGGCTCTTGGCTGGGGCAATGGGAGTGATGAGCTAATGTATCTGTCAACCCTTCAGAGCCT
+AGCCTGGGCTTATTCACATGGGGGAGGCAGGGTTCCAAGAGTGAGAGCAAAATTGCACAAGGTCTCTTGAGATTCGGACT
+CAGAACTTGCTCACTGTCATCCTGCCACATTCTATTTGTCAAAGTGTGTCACAACCCCATTTTTTTTTTTTTTTTTTGAG
+ACAGAGTCTTGCTCTGTCACCCAGGCTGGAGTGCAGTGGCTCAATGTCAGCTCACTGCAACCTCCACCTCCAAGGTTCAA
+GCGATTCTGCTGCCTCAGCCTCCTCGGTAGCTGGAACCACAGGTGTGTGCCACCACACCCGGCTAATTTTTGTATTTTTA
+GTAGTGGTGGGGTTTCACCTTGTTGGCCAGGCTGGTCTCAAACTGCTGACCTCAGGTAATCCACCCGCCTCGGCCTCCCA
+AAGTGCTGGGATTAGAGGCATGAGCCACAACGCCCGGCCACAACCCCGCTTCTTGATGGAAGGAGCTGCAAAGTCACATT
+GCAGAGCACCAGTAAGAGCTCAGAGATCATTTAGCCCAATCCTCCCCATTTAACAGATGAGAAACTACAGACACCGAGCT
+TACCCAAGTCCCCATTTGATTAAATAAATTATATAGGATTATGCTGCCTTCCACACTGATACTCTGTCCACACTGATGTT
+TTGTCCACTATATCTCCTTTTTCCAAGTTATTTCCCATACGATGTAGCTCATGTCCATTATCATAAGATCTCTGCGAAGT
+GGTTAGGCAGAAATTGCCTAAATGTGCTACCTAACAATGGAAGAAACTGATACTCTTCTTCTCAAATTCGCCCATCAAAT
+ATATAGGAATTCTTTTCCAGGAATGGGCACTGGAGTCACAACAGAAAGCAGGGCAGACATAGATCTTATCTGTACAGAAC
+ATAGAGTTTACCATTTAAGTGGTGTCACTGGGACTTCAACCAAGACTGTATGATTCCTCCTCTGGATGTACTTTTCACCC
+ACAGTGGTTGTTATGGTTCCCTGAAGGCTAGTTCGTCTATCAAAGCCAGTTGTTTGTATTTGTGACGGGGCAATCCATAT
+TAATTTACAGGGTTCTCAGCCTCCCCTCATCCCCTTCTCTTCTCCTTGTGAATGAGAATGCTCTGTTTGATCACTCAACT
+TTGTGTATTCCCCACTGTCTTTACTGGGTAAGGTCCCAGGATTTGTGGTGGTTGACTCTGAATAATGGGAAGATAAAGTT
+TAGAAGAATCAGGAAGAAAAAAGTAAGAGAATGAGACAGGAGAACCACATAAGATTGGGCAGCAAAAAAAGCTGGGGTGC
+AGAATAGAAAGGAAGGGATTTGTAGAATGAATAAAACTACAGGCCACATGAAAACTAGAGAGAAGAAAGAATGTTTTAAA
+ATACTGTACTGAGTTATGAACTGAATCTCTTATTTTTCTTCCAGATACAGAAGTGAAGTTTGATCTATTTTTTATTTTTA
+TTACTATCGGCCACTAACTTAGTTGGTTTTTAATGTAACACTTTGGCAGGCCTAAACCACATGCAAACTGGGTAGTTATC
+TGGAGCAAATTTAATTAAGAATCATTATGTCCCCTGAAAGGACAACCAGAGTTTGTGCCTTCTCTAAATACTTGGTCCAA
+ATCAAAACATCAATTGTCCTTTTCTTAATGAAAAAAAAAAAACCTCAACTCCATACTTTATTCTGGTTTCATTGATTTTT
+CCCGAATAACATTTTTTCTGTTTCAACATCCTAATTGGGATACCACATTGTATTGGTTTGTCATGTCTTCCTAACTCCTC
+TGGACTGTGGCAGTTTTTCAAACTCTCCTTTTTTGGGGTAACCACAAAAGTTTTGAGGAGAGCTAGTCAGGTGAAATGCC
+TTTCAATTGGATTTTTCTAAATTTTTCTTTTCATAGTTACACTTGAGTTATTGTCTTCCCACAGAGAAAAGACCACAGAG
+GCAAAATGGCATTTTAAACACATGATATCAAGTGTGCATATCATCAACGTGACTTATGACCAATGACGTTAACCTCGACC
+ATCTGGCTAATGTGCTGTTTGCCAACTTCTCCACTGTAAAGTTGCTTCTTTCTCCCTTTCCATCCTGTATTCTTTGGAAT
+CAAGATGCTAATTGTAGCCCACACTTAAGGAGTGGGGAGTTCTGTCTTTAACCATAGTGAAGAATTTTATTTACTGGAAT
+TTCATTTAAAGTTGGAGTGGGAAGGAGGATGATTCTCAGAAGAACAATAGTGTAAAGAACAGAAGAACAATACTGTAAAG
+CATAAGGGGTGAGGGAGTTCCCATATTATAAGCATGATGGGCAGAAATATTGCAGAGTAGGGAATGAAGAAATGGGAAGC
+CACATGCTGGTGAAAGGAAGGTTTGGAAAAAGAGAGAAAATTTAGCAGTTGTTCTCACTGAAAAATTTTGTGAAGAGATA
+CCTAGAGAGAATGTTGTGAATGTTTTCTGAGCAACTCACTGTGAGAAGCCCCTATACTACATCTTTGAGAAAACTTACAG
+TAAAGATTTACATTCATCTTCTAGGTACCAGAAACTACACTGTTATCTTGTCCAGGCAAAAACTGGTCCACAAGAGAGTC
+CTATCATATTTGGGTTGTACCAGCAAACAATAAGTTGGGCTGGGTCTAGATCTGAAATGAAAGATCTGTAATCCAAAGAC
+CAGATCAAGCATATTGTTTAATTTCTTCAACCTCAAGGAACTTATAAAATTGCTTATAGTAGCAGATTTTTGCAATGGCA
+TCCATTAGTTCCACAGCAGTTTCCCATTATTTATCTAAACATGCAAGGGGTAGAGTGGCTAATAGTGGGATCTTTGGACT
+GTCTTGAAGATTGGACTCTTTTGTTAATTTTGTATTCCTCGATTATTGCTTATACATATATTGTACTCCATTGTGACTCA
+TATTTTTTGGTCATTACTTTTTACTTACATATAACCCATAACGTTTTTTATGTATGTTTCTACTGCAGAGAGAAAGATTA
+TTCCCCCACTCAATACCTATTAACTGATTGGCTGCTTTATGGTTGCTATTTCCATTCTGCACACTTGTGTTGAAAGATAA
+AGCTTCGATAACTTACAAGGAGCAGTTTAGCAAGTTGAAAGAAACTAGATGTAGGATCCATGTGTTTCCACAAAGGATAT
+GCAGGCTTCAACTAGAAAGATGATTTAAGTGTTTAGGGAAGAAATAAGCATGTTCTAACAAGAGTTTTGCAGTCAGCAAA
+ACAAGGTATGTGGTAAATAGGTCCTATCTTGGGAATTGCCAGAGCCCAGAGCTTGTGCTGTTGGCCCATCCAAGGAAACC
+AGTTAAATTAACTGTCTGGAAGGCATCCACAGAGATCCTTGGCTGGGAGGCAGAAGCAGGACATGGAACAGGTGCCCCTT
+TCCACTATTCCTGGGCACAAAGTCTCATTTCCAGCACACGCTGTGGCAGCAGGAGATGCAGTAAATCAACCAAGCTTCAT
+TTCATCTCAGAAGTCCTCTGTGTACTAATAATGCAGGGAAACTTAGCTCATGAAGGAAAGAAAAAATAAATAAATGCTGA
+CGGTTGTTAGAGCTAGTCCCAGTGATAAGCCTGGAACAGGCAAATATTCTTAGTGATTCAGACAATTACTATTAAACAGA
+GGCCAGTTTCCAAGTGACGTTGTGGTGCCAAACACAGCTTTTATCATTTTTGATGAGATTTTTATGATGCTTGTTATCTG
+CCAATATTATTAATGTTGTTAATGACATAAAAAAATTAGCATTTGCCACCTGTGTGCCAGAAGGCAGGCCTTCTTCCTTC
+TTTGGTCTTTGTTCTCACTAGTTTCATAGGCCATCTTTGCTTTGAGAAGGAGCAAAGTTGTCAGCCTAGACCTTGTTTGA
+GTTCTTGTCTGAAATATACTGGTCACCTAAGTGCATAATTCAAAGAAGCCTCCAGCTTGGTAGCTCCTGGGCCTCAGTCC
+TTCAATTTGCCTTTTAAGACTGCTGGGAAATAATAGCATTCAGTTAGCTGTTACTCTCACATTAGAAAAGTAAAATGAAG
+TATTCCACAGAAAAAAAATAAAATATTTTGTTTCAACATCAAAATCTTAAGTGGGATTGGATATGAACCCTAGAAGAAGA
+CCATCTCTGGGAAATCCCTATTGCAAGGACAGAAGCAATAAGATTCTTAGCCAGAAAGAAGCGATCCTTGGGCCAAATGC
+AGTGCTGCCCCCAGTGGTCTGGATAGCAATGCTCTGTGAGACCAGCAGCTTTCAGTCTTGGGAAGAACTCAAATCTCCAT
+CAGATAAAAATGTCTAAACTGGCCCATACCCGTATTATCATGGGGTGGACTTGGGTGCCGTGTGGACTGTGCAGGCCACA
+TTTGGGTTGGTTAAGTGTCTTAATCTGCCCTGTACCCATAGCTAATGTATCCTTAAGTGGGAAAGGAGAGACAGATGATA
+GATAACTAAATACAACTACAGTCTGCATCATTCACTAAAGCAAGTCAAGTGACCAACCCCAGATCCAAGGAGTGTGGAAA
+TAGACTCTGCTCTAGACGCTACCTCATAGAAAGAGGAATTATGAGATCACCTGAAAAAGGATACATGAATACAGGGTTGG
+GCAAAGAATTAGGACAAATGGTGCAATCCATCACTACCAGTTTAACATCATCTCTTTCAGATTAACTCAGTCTCATTTGG
+GCAAATAATTAATCACTTTTTATTCAAATGCTCTAGAAGGATTGTTATCTCTTCCTTCTCAGTGGATCTCACAACCTGAT
+CTTTCCTTCTAATACAGTGTTTGGGAACAGGTAGAAGTTTATGTTCTTACCTACATATTGTTCTCTGCTGGCTCCCAGTG
+ACCTCAAGTTCAGATATACCCTACCAGGGAAAACTCTAATTTTGCTGCATCTATTGCTGATGGGTCTTGTGTAATTTATA
+GTCTGTCCTTGCTGAATTGTCTTTCTTTGGGGGTCTTGGTGTTGAAATTCCTCCTCTGGGACATGGCAATGATTCCCATT
+CAACCTAGAGCCTGAGTGACCCACCCTACAGGCCTCTTCTACAAAATCACCACATTCTTTACTGAGAGTCCTTTGGTGGA
+GTCATAAGTCAGTTTCCTGGGGCATGCAGGAAAGTGAATTCTCTGTAAAGCATTCTCAGGCTAAGGCAGGCGAAATAAGC
+TTTCTTGGTATTTAACACTATCATGTGTTCATTTCTTTTCTTTTTTGACATCTCCCTGTTGGCATAGGACTCATCTTAAG
+GTCTTTAAATGTTCTAGGATCTGCTGGTAAAAGTGAGGAATAATTTTCCCTGTTCTGGCATTCCTCCAAAACTGTCTGGT
+TATTTCTCCCACTGTATCATCTTGGATGCTTTTGGCTCTAAATAACAGAAACCCTGACTCAAACTGGATTAAACAAGAAA
+GAACGTCATTATAGCACAAAATGAAAAGTCTAGCATTAGGTTTGGTTAATTCAGGGGCTCAGCAATGTCAGCTGGGACAT
+GGGTTATTTCCATCTCTTCACTATGAAATCCACACTGTTGACTTCATTCCAAAGCTGACTTTTCTGCATAGCCAGAAGTT
+GGCTGTAAATAGTACAGTAATACCCCCCTATCCAAAAGGGAATTGTTCCAAGACCTCTAGTGGATGCCTAAAACTGCTGA
+TAGTACCAAATGCTATATATACTATCTTTGTTCCTATCCATATATACTATGATAAAGTTTAATTTATAAATTAGGTACCA
+TAAAGATTAACAACCGTAAGTAAAATATGGGTTAGTTGAACACAAGCACTATAATACTACAACAGTCAATCTGGTAGCCG
+AGAGGACTAAATGACTAATGTGTGGTAGCATGTACAATATGGATATGGTGCACAATTTAAAACTTGTGAATTGTTTATTT
+CTCAAAATTTCCATGGAATAGTTTCAGACTGTGGTTGACCATAGGTAACCGAGATGCAGAAATCAAAATAAAATCATGGA
+GACGGGGGGAATGACTACTGTATGGAGAGCCTGCTTCCTCCATCACCAATGGGAAAGAAAAATAGAAAACTTCTCTGTAA
+CCTTGTATTTAAAAACTTTTCCTCCATCTTCATTGGGTCATCTCAGAACAAGTAAGAATGGACAGAGGAATACCATGTGC
+TGACTGACGAACGCCTGGGTTTCTGAACCAATTGCCATCAAGGGGATTGGGATTACCATAATTTTTGCTGAGACTAAGCC
+CCACCCCAGTAACCAAAGGTGAGTTCAGTTTCTCTGGTCACTATGGACTGTGTGAGGTAAGATGGGCAGCTATCAAAATA
+GTCAGTCTATCAATAAAGAGCAGGCAAATACGAAGCTAATTGCTCAAAAATAATCATCTTAAGTGATCATCTAAAACTAT
+CTTTCTAAAGGTTTTGTCTATCTGCTAGAAGGAACCAGTTCACTCATATCTGACCTCCTGCCCCTCAGTTTCTCTTTCTC
+CCTCTCTCTTTCAGTGGTGGGAAACTTTCTACTCCTTTCTGTCTCATTGAAACTTTCCTTTTAATGTAAGCTAAGTTTTT
+CTACTCTGCTTTACAGCGAAACTCTGTGTTCAGTCTCCACACATTGCTCTTAATATGTAAAGAAAGGCTTAACTATTTTT
+AATTTTTTCTTTGCTGACAAAAGTTAGCTTTCTAAACTTTTGTTTCATTCAACTTTTAAGTTGTGATTCAGTGACACCTC
+TTATTGTCTAGGGGCCCCTGCCATCCCATGAAAATACATATGCCAGTGACTTAATAAGTAGAGAATGGCAAGGCAGTAAG
+AAAAACAGGTCAACAGGAAATTCAGAATACATGAAAAAAAAATGGGCTGGGCTAGATGGCTCATGTCTGTAATCCCAGCA
+CTTTTGGAGTCTGAGGTGGGAGGATCACTTGAGTCCAGGAGTTCAAGACCACCCTGGGCAACATAATGAGACCCTGTCAC
+TACTGAGAAACAACAACAACAACAAAAATTAGCTGGGCATGGTGGCAAGACCCTATCTCTACTAAAAAAAAAAAAAAAAT
+AGGCACGATGGTGTGTGCCTATAGTCGTAGCTACTTGGGAGGCTGGGATGGGAGGATCACTTGAGCTTAGGACTTCAAGG
+AAGGTTGCAGTGAGCCATGATTCTGCCACTGCACTCCAGCCTAGCCAATAGAGCAAGACTCCGCTTCAAAAAAAAAAAAA
+ATCTATCCCTATCACACCCCTTCCTGGTTTTTCCTCCAGGTACAGAGAATTAGAGCCTTCAAACGCAGACATTATCAATT
+TGGTCCATGTTGCTTAAGATCCAATTGGCTTCCTTCCTAGCAATTATTATAATTTGTAATTATTTGCAATTATGTCATTT
+TTTGTCTGCTATTTTATTGCCTATTTCCTGCACTAGAATGTAAGCTCTATGAGACTTCAACTATATTGTTCATTCACATC
+CCTAACATCTAGTAGAGAACCTGGCATGGAACAGGCTTCCTATAGTTATTAGTTAGATGAATGAATGAGTAAATTTTCTC
+CTCCTAAATTGAATGGTCTTACACAGTTTAGCACTTTCAGGCCCATTATGGTAGCTGACTCTCAGAATGAGGTTACTCTG
+TCTACCAGATAATAATAATAATAGTAATTATTCAACATCAGAGATGTTAAGTGATTTGCTTAAAGACACCCAGTAAGTGT
+GAAATATGAATCTTAAACTCAGGTCTTTCTTTCAAGTCCTATATTCTCTCCATTTTTACTTGTCTTAAAGTTCTCCAAGT
+TTAAGAACCATTTTAGCATTGAGAGATGGTAGAAAGCCTTCTCAAGTCAAAGTGAATAGGGTGAGCCAAATTAGGGAAGG
+CAACAGGGAGCATTCTGGGAGAAATCCAGTGGTGCATTCTGGGAGCCCTTTCATGTACTTAGCCTCATCCAATGCCCAGG
+GAGATGTGAGACTTCCCGCAGCCCACAGGGCAAGCTTTTCCCTGCCTGTGTTTGCCTGCAAAAACAACTACTACTGCCTC
+TAGCAACGTCTGGAACCATGTTCCCTGCATAATAAGAAAGTATAAATGGAGGACACAGTATTGAAGTATTGAAATCACAA
+GATAAGGAAGATTAAATTAGGCTCTGCCTTCCTGAAATGCTCCCAGAGCTAGAGCTACATGGGAGCAAGGCCAGCTACCA
+GATCTTGGGGCTTCTCCTTTAGCAAAGGGGAAAATCCCCGGCCCTGCAGGCATTTCAAGCTCAGGGATTGGTGGAGGCAG
+ATCTGTCAGGAGCCCCGGTAACACTACAAACCAGGAGGGAAGCTGACAGGAGGACAAGCAATGCTGGCATTTTCCTCCCT
+TGGAAATCCCAGGGACTCTAACACCAGAGATCTGAGGCTGATCTGATGCTCCAGCAACTATATAACCTCCACCCTGCCTG
+AACAGCCTCCTACAAATAGGAAACTATAAGATTTGCATTATCTCAGAATAACCATTTGTAGGACTAGAGTCCGTCCATCA
+GCAGTAACTGTTCTTACTCAACCTTGATAAAAGCTTTGCCAGATGAGTTGGGCTCTGTTTAAGTTTACTAGGCCTGCCAT
+AACAAGGTAATACAAATTGGGTGGCTTAAATGTAGTATCACAGTTCTGGAGACTAGTAGTCTGAGATCAAGGTGTTCACA
+GGACCATGTTTTCTGAAACTCTGGGTAGAATCCATCCTTGCCTGTTCCTCGTTCCTAATGGTGGCCATCAATTTTTGGCA
+TTCACAAGCTTGTAGCTGTGCTATGCCAATCTCTGCATCAGTCATCGCATAGCATTCTCCCTGTGGGTCTTTATCTTCAT
+GTGATGTTTTCTTCTTCTTATAACAACACCAGTCACATTAGATGAGAGCACATACTAATGACCTCATCTTAACTTGATTG
+CACCTGCAAAGGTACTATTCCCAAATAAGGTCACATTCATAACTACCAGGGGTTAGAACATCTGCATATCTTTTGGGGAA
+CATAATAACCCACAACAGGCTAATATTACTATATCCACTTTTTGGAGGTAGAAACTGAGGCTCAGAGAAACCAAGAGATT
+TATGTAACATCACTGTGGTGAATCAATGCCATAATGGCCCCAATATTTTTCCTGCCTCTCTGAATCTATCTGTTGGTAGT
+ACATTTCCCTGCTGATGCTGGGCTTGGCCATGTGACTTGCTTCAGCCAATAGGACAGAGCAAGTGTGATGCCAGCAGAGG
+CCTGGAAAGTGCTTATGCACTAGGATTTCTTCTCTTGCATACTCTGGAACACCGTGACCATTGAGTGAATAAACCTAGGC
+TAGCGTGTTAAGTGATGCGTGACCATGTGGAGAGAGACTCCAATCATTCTCATGTCACTATATGAAATCAATCAGCACCA
+GCCAAACCTCCAGCTGACCACAGATGCATGACCAAACCCCACCAAGATCAGGCAAGCTCAACCTGAATCAGCAGAACCCT
+TAGTTGATCCATAGATTCATGAACAATTATGAGTGAATGGTTGTTTTGAGCCACAAAATTTGGGAGAGGTTTGTTACACA
+GAGAAAACTGATACAGTCACATAGGTGTTAAATGGCAGAGCTGGAACGTGTAATCACTATGTGACCTGTTCATACCACAC
+AGTTCTGAATCAATGGGATGGTGATTCATAATTTTCCTGTTGGCCACAACATGAGTTTCCCATGTGTCCACCAGCATCGA
+GCCAATATACAGAGCCTGTAACTATATTAACTTAAATATTTTGCTTTCCTTCTTTTTATTCCTCCTTCTCTAAATTATGA
+ATAGAAAGTTATGAGTTTGCCCTTTAATTATGTGCTTTCTGGCTATGAAGAGATTGACTAAAATCTATGGTGTTAGTGGT
+GAAGTAAGCAAGAAAAGGAAGAGAATGAAGGCTCTGTAGGTGAAAGAACAGTCATCTTAAAAACCAGTCAGTTGGCCCAG
+TGTAATGTAGCATTTGCCTGCCTCAGTTGCGTCCCAGGGCAGACGGAGAGTTTAATCCTTGCCCCTCTCATATGAAATAG
+GCCACCACTAGGATCCTCAATATATTTCTTTTTTTAATCTCTCTCTCTCTTCTGCAGGCTTATGGGTGGAATGCAAGCTA
+TTGGAAGTATTTGAAGATGTGACCAAGGAAGCTATGAGAATTGGAAATGAGGCAGTTGTTTAGAAACGGGCTGATGTGCT
+TGTGGTAAGGTGGAAGGAGGAAGGCAGATGAGGAAAGGAATGTGCAACTGAAGCAATTAGCTGTTACTATGTAACGAACC
+ATCCCAAAACGTAGTGACATAAAACAACAACCATTTGTTATTTCTCATGCGCTTACATTTCTACTAGGAAGTTCTGCAAA
+TCTGGATTGGTCTTGGCTGATCTTGGCAGGGCTTGTTCTGGCATCCACTCTCAGCTGCTGAATTAGCTCTCAGTTCCTGG
+TTCAGGGTGGCCTTGGCTTGGATGACTTGGCTCTGCTCCATGTATCTTTCACATCCTTCCAGCAGGCCAGCTCAAGCCTG
+TTCTCTAGATTGTGGCAAGGATCCAAGGGAAAGAGAGAAAGCCAAAATGTTCATGCCCTTTTTTGAGCCACTTTTTTCAA
+TGAGTTGGCTACTGTCTCCCTAGCTAAACACATCTCAGCCAAGCCCAGAACCAGTATGGAAGGTCACTAACCAAAGCTGT
+GGGTAAATGGAGGCATGGAAATCTGTGACCTTTAATGCAATCATCTACAATAACAAGCCAATGACCATGAAGCTGGACAG
+AGAAATAGATTCCTGGGAGGAAGAATAGCAAAATGTCTGGAGTAAGAGGAAGTAGGGTATTTCAGGGAACTCAGCAAACA
+GTGTGATGACAAAATGAAAGCACAGTCATGCAATCAGCACCTGGGTAAATATTTAGGTTGGAGTCTGTGTTGACCCAGTC
+TGCAGGTAACCCTCAGCCTGGCCCAGCATTTCACTGCCCTCCTACTACTTACCCAGAAACAACTCTCCTGTATTGATTAA
+GCAGGAAAGAGGAAACTCTCCTGGAAGACACAAGGGCAGATCCAAGCAGGAGCTAAGGTTTTCCAGAATGTGGTCTTAGG
+TCTGTGAATGGCTTTTGGACAGAGGCACAGCTGAACATTCCCATGGTTTCTGTGACTTTAGAATCACTACCACCTCTTGA
+CTATGCACCTCTTGACTAATTTCTATTTCTCAAATTATAGAATAAAAAATGCTGATTCTAAGGATAAAAACACAACAAAT
+AATCCAAAAGTATATAAGATGAAAAATAATAGTGTGAGTTCTATTCCATTCTCCAGAGAGTAACAACTGCTCTCAATTTG
+GGCACTTCCTTTCTGTCTTCTTCCTGCACATGTACTTTTTGCATATGTCCTTTGCACGTTCTTAACCATACTTGGGTAAT
+GATGCCAACATCACAAGCATAACTTTCTGACAGCCCAGAAAGCCAACCTTGTCTTCTTCCTTGTTCAACCCTTGTGCCAT
+TTTGCCCTAACCAGAAAAATACTCCACTTGGTAATGATTCTCTCTCTGTGTCTGTGAAAAATAACATTGGAAAGTCTAAT
+GTGGAAATGTTGAACCAGGCTGGGCATGATGGTTCATTCCTGTAATCCCAGGACTTTGAAAGACCAAGGTGGAAGGATGG
+CTTGAGCCCAGGAGTTCAATACCAGCCTGGGTAACATAGTAAGACTCCCATCTCTACAAAAAATTAAAATTAACCAGGCA
+TGGTGGCATATGCCTGTAGTCCTAGCTACTCAGGAGGCTGAGGTGGGAGGATGGCTTGAACCAGGAGGTCGAGGCAGCAA
+TGAGCCATGATTGTGCCACTGCATTCCAGCCTGGGTGACAGAGTGACAGCCTGTCTCAAAAAAGAAAAGAAAAGATACAA
+AAGAAATATTGAATCAAAAAATATAGGTAAAAGTGAAACAAAAGGAAATTTAAAAATAAATATGTATCAATAGATAAAAA
+ATGGTAAAATGAACAAGATGTGGAAAATGTTGTAGTCTCTACATCTGGCCTAATATTTAATCTAAGAATCTAATGCAAAT
+TTCTTACATCATGAATAAATATGGATGATGGGCCTACAGATCAGATACATTTTAATAATGAGCAGACTAAGTGAGGCCTC
+ATACCTCTCATCTCCATGTGTAACCTAATTAATCGGGCCATTTCAGTTGCAGAACACTCACTTCCATAACTGTCTTCTAC
+TTCAGAAGTGTGTGTTTCAGTTAGAATTTGATTTGGGTGCAGATGCCTATGATAGACAATGCAAACAGTGACTTAAAGAA
+ATAAAAGTTTATATTCCTCTCACATAAAAAAAATTTAAATTTAAATAGTGCAGGGCAGATTTAGCACTCCACGGTGCCAA
+GAATATGAGCTTCTTTTATTTTGTTGCTCTTGGCCTTCACTTTATGGCTCAAAATGGCTGCTGAAACTCCAGACATCAAA
+CCTGCATTCCAGCCTGCACAGTAGAGGAAGGAAGTAAAGAAGGGTAAAGGCTCATTTTCTAAGGAAATTGCTCATCAGCC
+TCACACAACACTTCCTCATACACTGCATTAGCCCAATTTAGTCATATGACCACACCTATCTGCAAGGAAAGCTGAGAAAT
+GTTGTTCTTGTTCTCAGGGGAGATGTATGTGCCCAGCTAAAAATAAGGTGTTCTGCCACCAAAGAAGAAGAGAAAGGGGA
+TATTGAAGTTCAAGTGGTAATCTTAGACATATAAAGCCATCAAAATTATTTCCCCCCCAAACCCTTTTGAGAAACTGAGT
+GCGTAAAGATAGATTATGACTGGGCACGTCAAACTTGTTTTAAATATACAATTGGCCAGGCACAGTGGCTCACACCTGTA
+ATCCCAGCACTTTGGGAGGCCAAGGCGGGTGGATCACCTGAGGTCAGGAGTTTGAGACCAGTCCGGCCAACATGGCGAAA
+CCCTCTCTCTACTAAAAATACAAAAATTAGCCGGGCATGGTGGCATGCCTGTAATTCCCAGCTACTCAGGTGGGAGGCTG
+AGGCAGGAGAATCTTTTGAAACTGGGAGGCAAAGGTTACAGTGAGTTGAGATCACACCACTGCACTCCAGCCTGGGTAAC
+AGGGTGAGACTCTGTCTCAAAAAAAATAAAATTAAAAAATGAAATAAAAATACAAATAAATATACTATTTTTTTCAAAGG
+ATAGAAGGAGCATTGTCTACGTTAAATAATGCAGAAGATTTTAACTGATTGCAGCCCAATTTGTTTCCATATCCCCAGAG
+GCCATCCCAACCCAGAGGCTTTCCCATAGAACCGTGGGGAAAGATCTCAATTGTCTACACTCTGGCTCTGAGATAAGGCA
+GCGCTCTCTCAGCCCTTTAAAGGGGGAGAAGGATGTTAGTTCTATTCCTTAGGCAAGAGGGCCTCACCTGTGGCCATTCA
+ACCCCAGGTTCTGCAAGAGTGAGAAGCAGAAAGCCTACTTTGCGTTGCCTCTACCTGAGGAGAAGAAGCCAGGTGTGCTC
+TCAGATGGTCGGGGTAGTGCTTGGGAAGATGTCGGGAGGACCGCGTAGAAAGCTGAGAGTGAGCTTCCCTTTGCTCACTG
+GCTTCTCAGCCAAAAACCGCTGGAGTGGAAGAGGGACATTATGGGCCTGCAGCAAACTCTCTCTCAACCTTCAGTTTCCC
+AATGTATAAAATGAGTATGATGAAACTTGCCTCATAGATTCATTACAAGGGTTGTTTAAAAAACATTTACCAAAGTGCCC
+GCTGTATACCTCCATTTAATGCATGCAAGTTCCTTTCTCTTCTTCCTTTTGGAATAACATAACCTGATTGTAAGAACATG
+TTTCCACAAGACAACATGATGAGAAATGTTTGGCAAGATCTAGAATTGCAGTATGGTAGCCACTGACCACATGTGGCCAT
+TAAGCACTCCAAATGTGTCTATTGTAACTAGGAACTGAACTTTTAATTTTATTTCATCTTAATTCATTTAAACTTAAATT
+TAAAACCTGCTGCTTGATTAGAGTATTGAAAAATATTTAAACGTATTTGGAACAATTTGGGTATGTGAATTTCCTCTTTC
+AACTATAAATCTTACAAAATCTAAACACAGATCGAGTATTCCGAATTAAAATTTAACATTCAAATTAAAATGTGCTACAT
+GTAACAAATACATACAAGATTTCAAAAAATATACTATAAAAAAGAGAATGTAAATAAAGTCATTGATAATTTTTATGTTA
+ATAACTTTCATGTTGAAATATGTTGATATATTGGGTAAAGTAGAATACAATATAAAACTAATTTGTCCTGTTTCTTTTGA
+CTTTTCTGATGTGATGATTAGAACATTTAAAGTTACATAAGTGGTTTGCATTATATTTCCAGTGGACAGTGCTAAGCAGA
+GGATAGCAAATAATCATTTGGATTGAGTCTTCTCACCAGACTGGCTAGTGATTTTGGCCTTGCACAGACATTTCATTAAT
+TAACCGAGCACCTATCCTGAGCTAGGGATAAAGTAGCAGTTGAATCAGAAATAAGCACATAAGCAAACATTGTAAATAAG
+ACAATTTCTGATAATTATCATAAGCAGAAGGAAGGAAATAAAGTAATAAAAGAGAGAATAATTACCCAGAAATGGAGCTG
+GTTTCTTTACATGGGAGGGCAGGCAGAGAAGGCCTCTCAGAGGAAGGACATTTGAGTTGAGATTTGAAGGGTAAAGATGA
+GCTAGAAGTGCAAAAGTTTAGATGAGGGGAACATTTTACAGAAATATTAAATGGTTGTGCTCAAGGTCAGATAGCAAGCT
+GCTAAGTCAGACTAGCATTATATACCCAGACTGATCTGTGCCTAAGTCTGTGATCATATCCATTTGTGAGAGATGCTAAT
+TTCCACTCTGACAAAATTGTTGACACTAATGAACTGCCCTCCCCCTTCCCCATTATACTTCAGCAGGGTATTGGTGGGGC
+ATTTCCCAGTTATATTCCTAAGAGCAGAAATCGAAAACAACAGTTCTATACAGGTAGGCATATGTCATATCTTGGAGAAA
+AGCATCCTTTCTACTCTTTTCAGCCTCTTGACTTCCTATGACAAGAACCAATTACAAATACTGCAGAGCAAGAATCAATG
+AAATAAAGAAATAATGGGAATGATTTCTGAGGACAGGAATTAAATAAGTGTGTCTTTCCAAAGCTATGACTTAATGGGGT
+CAGGATAACAAGCTCCAGGAATAGAAATGTCCTAATGGGAAAATAACGGCAATTCTGAAAGAGGAATAGTGGGGATTTTG
+GTGTAAGGGACTAACCATCATTACAGTCCCATGGGAAGTTATTAGATCTGGAAAGGTAGGCATCCCTTCATGCTTCTATA
+GACCGAAAAAAAAAATGGATGTAATCAGCCATGTCGAGTTACAGAATTTCATAGGCAAACTAATGATGGAGCACCAATTT
+TTTCTCAGACTTAGGACCTCCAATATTTTTCTTTAACTCTGGGGGCTGCTGTGAAGTTACAAATATATGGTATGGAGCCA
+TTTAAGCTCCAGATCTTGACCTTTTTTTTTTCTGATGCTGCTGCTAAGAGTCCAGAAATCTGGTCAGCCTTTGCTTTACT
+TCATAAGAAGAGAAGAGGGCTGAAAAAAATATTCATAGACACCAAAGTGAAGATGGAGAAAGATGACACTGGTGTTACAG
+GAATAAGCCTGGAGCTGAAGTGATTGGGAAGCCCAAAAGGACATCAAAAGTTACAGGATTTAAGACCACAGCCAACGTAG
+CTGTGTCAAAAGAATAATTCAAGTTTTAAAAACTGATTTTTGTTTAATAAGAAAAATTATATGTCTACCTCAGAACTCTT
+TCTTTGGCCCTAGGTTTGTAAATCCAATTTCCCACTTGACTTCCAACCCCAAATAGAAATTAGGATATTTTATTGTCACC
+CCAAATGGAACATGGCTAAAATAGAACTTTGGGGTTCTATGCCTAGCTTTTCCTATCTGGGGAAATTGACACTACTTAGT
+TGCTCAAGGCATACCATCCCACATCCATTTTATCACCAAGTCCCGTTCATTCTGCCTCCAAAATATATTTAGAAACTTTG
+TCTTCACTTAACCTCCAGAAAAGAGGTTAGGGTTAGGGTTTGGGTTAGGGTTAGGGTTCAAGTTGTCATCATCCTATGTG
+GCAGCAACAGTCTCTTAACTGGTGCCCTGGCTTCTCTCCTTGACCTCCAACCCCCAATCCATTATTCAGATAGCAGTCAC
+AAGCCTACCCAGATCCAGGGGAGGAGATATAGACCCCCAAGCTCTCAATGAAAGGAGTATCTGAGAATCTGTGGCCATTT
+TATTCTAAACTGCTGCATTCACTCTTACAGAAATCAGTCTCCATCCTACCTGTTGTAGAGCACTTCTGGCACTCTCACTG
+CTGCTGTCACTGGCCTTGACCAATATGGAAAAAAAAAATCCACACCAACTTCATCCTTTACAACGTCAATATTTTGCATG
+GTTTACCCACAGAAACAGGGCAGGCCTCTGTTGAAACCTTAAGGTTTTAATTCAGTCCAGTTCTGTATGTCTTTATTGTT
+AGCTCTAGTGTTTCAACCATTTATTAGTCAGATGGCCTTTGGCTAATTATTTAGTCTCCCTGGGCCTCAGTTTTTTCATC
+TGAATTAAATGTCCTTGTATCTCATCATTTCTAAGTGCTTTGCAAGTCAGCCAGCTCTATGAGAGTTCAGCCATTGGCTA
+TGCTCACAGGCCATTGACACACAGTTAATAATAATTTTTTAGTTGCATTAGTTTTAAGAGGGCTTGCTTCTATTTCAACT
+TTTCACAGAACATGATGCAAACCTCCTGAGGATCTGAGCTGTCTTGCCTCCCTCCTCCCCAGCATCTTAAATTATTCTTA
+CCAACTTTTGATGCTTTGATTGTTTCCATAGCCAATTTTCTAAAAGCTGCTTTCCTTTTTGACCCAGCCAATTTGTCCCT
+ATGCCAGTTTTCCAAGCAAACCCCTATCTGATTTGGCCTTCTAAAAAATTTCTGCAAGGTCTTCTTTTCTTTCTAACTGT
+TCCAATTTTGTTCTTGAGTCTTCTTATTCAAAAGAGGCTTCTATACCTTTTATTAGCGTACATTTCTAACAAAAAGCAGT
+TTCTCTTGTTTGAATTTTATTTTGCTCTGTGTTAAATGGACAATGCATGCTAAGGCAGAAAACTAAATTCCTTTTACATA
+TTGGAGATTCATTTAAGCTGGGATAGAAGACAAAGATAATAAATATTTTTCAAAACTATTAAAAATTTTAAAAATCAATC
+ATGATTAAATTGCTTACAAAGAGAAGGAATTTGTTTTGTTTATTTTGAAGCCTCTTTTTCTAAATTTTGGCTCTGAAATA
+TATATGACTATATCACACATATATGCAAATGAAAATAATAAAAGTAAGTTCTAATGAAAAGACATTTAGCACATTTAATC
+AAACTAAACCACTTTGAGGCATCAACAATCCAGAGGAGTAAGGCTATGTGTGTGTGTTTTAAAATTTTTATTGCAGTATA
+ACAATGACATAGAAGAACACACAAGTAATAACTATGTGATCTGATGAATTTTCACAAACTTAAAACACTCATGTAACCAG
+CAAAAGGGTTATCTTATTTAAAAAGTCTCACTGATGGGCTCTTGAGAAACACCTGTATAGTTGTCCTACCTTGTTATTAA
+CTTTATGTAAGCATGAGGCTGTCTCTCCAACTCAATGATATATTCTACATGGCCGGATAGTAGGACTGTAGTTTATACCA
+CACATTTTTTCTAACATAGTGGTAGCTAAGTGCCCTGGAATGAGGACACCCTGAGTAAATGAATATGAACTATATCAGAC
+TCAGTTTATTAAACATTAATACACACAAATCTCTGCTCCACATGCTTTCGATTCCTATTTTATCTTCTATATCACTGGCC
+TTTGCTTCTAAGTCTTTTTGCTGTCACCAACCTCACTGACAAATTATTTAATCAATTATTGACAAAATGCTTAAGTTTCC
+TAGAACTCAACTATTCTTCATCTAAACCTCTCTCTGGCCGAGTTCAGTTGGTCCCATGGCTTTAAATAACACCTATAAGC
+TAAAGACTCTTGAGTTATAACCCGTACTACAGCCAATTAATTGCATTTACGGCCCCAATTAATGGCCTTTCTATATCCCT
+CTCATATCTTGACGTGAGCTGTTGCAAAGTCACATCGCAACAACTCACATCAAGAGGTGGAATCTATTTCATTACCCAGT
+GAATCTGAACTGGCTCTATAACTTTCTTTGGCCAATAGAAGGTTGGGGAATACCAACTGCATTACCAACTTTAAGCCTGT
+GCTTACTCTTGGAGCCCTCCGTCTACCATATGATCAAACCCAAGGCAGCCCACTAAAGAATGAAAAACCACATGGAACAT
+TACACTCACTGACAATCAACCAACCCCTGGAAGCAAAGCCATCTAACCAACTTGCAGCTGACTACTGATGCATGAGGTAA
+CTCAAGCAAGACTAAATGAGTCCCCCCAGCTTAACCCAGCTAAATAAACAACCTGCAACCTCATAGGGTAACAAAATAGT
+TATTGTTTTAAACTGCTGACTTTTGGTGTGGTTTGTTACACAGCAATAGCTAACTGATATACCAGCTTAGACATATGTGT
+AGACACGTATTCAACACCAATCTACACTTCCATGTAGACATCTCATAGCCATTAAAAAAATCTATACCAAATTTGATGTT
+TACCCCATCTAAAAACCTCTTCAATGTTTTCCTTTTGAAAAGTGGCCCCCTTATCTGTCCAGTTCTTCAAGCCAAAACTC
+TGGACGATACCATTTAATTCTTCCTTTCCTTCACTTTCCACATTCAATCCATCAGAAAGTCCTGCTGATTCTAATTTCTG
+AATTTTTCTTAATCCATTTAGTTCTGTCCACTCCACTCACATCATTTCTGCCAGTCTAAGCTGCTAGTGTCTTATGCCAA
+CAGCACTACAGTAGCCTCCTGTCTGATCCCTTCTGAACCTCTCCAAACCTTTTTTACACAAAAGCTATTTTTGGTTTAAA
+AATCCTAGAAGATCATGTCGGTTCCCTGCTTAAACCATTCAGAAACTTCTCATCATGCTTAGAATCAAATCCAAGCTGAG
+TATCATGGCCCATGAGACCCTCCATGACCTATTTCCTTTCCTTCTCTCCAGTCTTTCTTCCCGCCACTCTCACTCTTTGA
+CTTACTACATTCCAGCCACATTTGCCCAATTTTTTTTTTTTTTTTTTCTGAGACAGTGTCTTACCTTGTCACCCAGGTTG
+GAGTGCAGTGGTGCAATCCCAGTTCACTGCAACCTCTGCCTCACAGATTCAAGCAATTCTCCTGCCTCAGCCTCCCTCCT
+GAGTAGCTGGGACACCAGGAGCATGCCTCCATGCACAGCTAATTTTTATAATTACATGTAGAGAGGGGGTTTCACCATTT
+TGACCAGGCTGGTCTCGAACTCCTGACCTCAGGTGATCCACCCACCTCAGCCTCCTAAAGTGCTAAGATTTCAGGTATGA
+GCCACCATGCCCAGCCCATGTTTGCCCTCTTGCTTCTCCTGAAATAGCCAAGCTCTTCCTCATCTGAAGGCTTCTGCTCT
+TGCTCTTTAATCTGCTACAAAAATCTCTTTCCTTGCTTCTTGCTCTTTGCCTGGTGTCTTCCTTTACCTCTTTCTTATCC
+CAGATTAAGTCTCGCCACCTTTTGAAGGCCTTTTCTGGCCACCCTGTCTAAAGTGGTCTTCCTGTCCCACCATTTTTCTC
+CATTGTCCCATCCTATTCAGTTCATTGAAAATATTACGATTGCTGATTCTTGGTGTATTTACTTAACAATATGTGTTTTC
+TTGCCTGTCTTTTCTACTAGATTTGAAATCCCATGGGGGCAGGATACCATGTCTACTTTGTTCACTTAGCATCATAAGTG
+CTTTCCACAATTTTTGGCCCATGGTAGGCACTCAGTAAACATTAGTTTAATAAACTAATCTAAGTCTTCCAATAACACTA
+TCAAGTAGATTTTTATTATCCTAGTTTTACAAATAAGAAATTTGAGGGCAGGCATTATTTGTATTACTCATACTCAGAGT
+AGAGAGACAGTACGGTAACCAGGCCAATATGTCAGAGAAGTGGAGTGGGGACATGAACCCTATAGTTCCTATACAAAGCC
+CAATCTTTAATTGCTCAATTCCCATGACTTGAAAAACAAGCAATTGTGCTCTTGAGGGATGGAGTGTTGTAAAGGGATGT
+GAAGAACAGAATGCTCCAACCCAGACAGGTGTTTCCCCCTCCTTGAACTGATGCTGCCACTCCAAGAGTCATATCCTCAC
+CCAATTAAATCCAGATGCAGGAAGGTAGGGATAGACAATAATGGCTCTTCTTCATACTTTCCACTCAGAAAAGAGGAAAA
+TAGTTTCCTGAAGCCTTCAGTGGATATCTAGTTACATTTCATTAGCCAGAATTTGGTTCCACTCCCACTACTATCATTTG
+CAAGGGAGACCAAGATTGCCACTCCTGATTTACGTTCATCACCATTCATCATCACCTGGGCTAGGCTGGTTCCACCCAAA
+TGTATCAAGATTCTGATATCTATGATGGAAGGGAAGAATGGCTATTGATCATACCAGCAACAGCATTTGCCATAGAAATT
+GAATTACTCTAGGTTCTTTTGTCCCACTTTATTCTAGAAAATATGGATGTTATGGATTAAGTCCATGGACTAAGAAACTT
+AGAGCTCAATCTGATGATACTGAGTCTTGAAAAAGTTAGTCTACTCTTCTCAGTAATTGATGAACCCATATGTTGTATCC
+AAATAGTTCCAACCTCCCAGCAAAGCATAAATTTATATACCTGGCATAGAATAACAAAACCACACAACATAACAAGGCAT
+TGTCCTAATGATAAACCTAAAATTGTCCTGATAGATAAATCCAGCCATGTTTTAAATTCAGCACTTTATAAGCAACCTAA
+ATATGCTAATATCAAAATAAATCCCTTTTGGTAAACCTCAGTGTTACCTTGTTCTCTTCCCCATAAAGACAACTTTTATT
+TTCCATAAAATTCAATCTACATATTTCTGTTTTACCAGCAGTCATACAAGAGTTGTTGAAAATTATTTATAGATGAATAG
+AGGCAAGTTTGGCGTTGCAGAGCTGAGCTTTGGCTGCAAATTGGGAGACTGGGTTCTGGTCCTAGAACTTCCAAAAACTA
+GCTGTATGAATTTAGGCAAGTTACTTCATTTCTTCTTTGGTAAAATATGGAGATTAAAAGGATTATATCTGATAACCCAA
+TTGAAACCACTTGAACAATATATTAGTAACGGTAATTGATTCTAGCTGTTACAACAAATGGCCCCCAGATTATAGTAGTT
+TAACATAATTAAAGTTTATTTCTCACCTAACAGTCCAGTTGGATTCATGAGGCATTTTTGCTGTCTTTCCTCCAGGCAGT
+GTTTCAAGAACCCAGCTCCATTCATCCTATGATGCCACCATCCTCAGCATTTGACTTCCATGGCTCCTGTAAGAGAAGAG
+AACATGAAGAACAAAACAGGAGGTTTTTATGGGAGGCATGTCAATTTGACCAGATACTTTGCCCAGAACCTTGTCACATG
+GCCTCTGCCTAAATGACAATGAGGCTAGTGGCTGGGTGTGGTGGCTCATGCTTGTAATCTCAGCACTTTGGGAGGCTGAG
+CCAGGAGGGTCTCTTGAGCCTAGGAATTCAAGATCAGCCTGGGCAACATAGGGAGACTTCGTCGCTACAAAAAAAAATAA
+ATTAAAAATTAGCTGGGCATAGTGGCACATGCCTGTAGTCTCAGCTAGTTGGGAGGCTGAAGTGGGAAGATCACTTGAGC
+TGGGAGGTTGAGGCTGCAGTGAGCCGAGATCACGCCACTGCACTCTAGCCTGAATGACAAAGTGAGACCCTATCTCAAAG
+AAAAAACAAAAAATGAGTCTGGGTAACGTAGCCTGTGAGACCAGAAAGAAGCATCAGGATTGGCGAGTATTAATACCATC
+TCTACCATAAACATTTTTGGAAATATTTCTTTTTTTTTTTTAGAGAAAGGGTCTTGCTGTGTTGCCCAGGCTGGTCTCAA
+ACTCCTAGCCTCAAGCAATCTTCCCAACATAGCCTCCAAAAGTGCTGGGATTACAGGCATGAGCCAACAAGCCTGGCCCT
+AGAAATATTTCTTTCTCTAAATTCAAAGTAACTAGGGGGAATATTAATCTCATTCTAAATATTTTATTTTGTCACTCCCT
+ATCCTTTGTTTTCAAACACCTCACACTACTATTATGTGTTATCACGAGTCAACTGTTTACATGTCTATCTCCAGCTGTTG
+GTTGGGAGCTCACTGAAAAAGGAAACCTTTGTAATTCATTTTTGTATCCTGAGGGCTTAGCACAGAGGCTGGCACCTGTT
+AGACCTAAGAAGGATTGGCAACTCTGGACAAATCAGAGAAGGAGCGTTGTCAACCCTTTTCTCCTGGTGATGATGATGGT
+GATGGTCAATGTCCACTCCCAAATATCTATATTGGGAAGAGGTGGTTTGTAGGGTGAGCATTTGTCTTTTGAGTGTATAC
+TTGAGGGAGGGATGAAATTATGTTTGTTTTATACTCTAGAAAAATTGGCTAAAACAGCAAATGTAATTTTACAGAATGAT
+GGAGAGGGCATACTGAGGTGGGAGGCAAATAATAGAAGGAGATGTAAAACTGTCAACCTTGATCCCCACAGTGATGGGGG
+ATGATGACAGCAGGAGTAAGTTATGGGATGATTGTATGTTTGTGAACCTCAGTTCTTCTGCAGCATGAGCTGGCAGGATC
+CTCTCCTTGACCACCTCAGGTGACCTGCTGGAGCCCACTGGTCTCACCAGGAGAGGTCATCGGGCCACCAGACCAACTGA
+AGTAATAGTTCTTCTTGAGGACAGTCTCTAGCTCCCCCTCTATCCCATCCCACAGTCCAAAAATGCTACCCAGCATGGCC
+TAACCCAGAGTCTACCCCCAAATAAGTGATTCTGCAAGTTGCTAGAATTCAAAGATGCTCACACCCCAGAATTAGAAAAC
+ATCACTGTGGCAGTTTCAGGTTCTCAGCAAGCAGACCAAGAATGCACATGTGAGCTTCCTTTATTTCAAGTTAAGGATGC
+ATTTTCGGTTTTTAAAGTTCAACAATAATAAAATCAGTTGAAAGCCCAACTTCTAGAGCAGTGCTATTCAATATACTAGC
+ACTGGCCTGTGTGGCTATACAAATTTAAACTAATCAAAACTCAACAAAATTTAAAATGCAGTTCCTCAGTGGCAGTAGCC
+ACATGTTAAGAACTAAATGGCCACATGTGGCTAGTAGTCACCTTATTGGACAAGGCAGCTGTAGTACATTTCTGTCATCA
+CAGAAAGTTATGTTGGATAGCATTCCTCTAGAAGCTGCTTCCGTGAAGCTGTGTGCCAGCTTTCTAAGTCAAGTTCTGTC
+GATCCTATTTGGTAGAAAATATGATAAATGTTTTAGAAAATCACAAGAGGACTTTTATTTGGCTGAAATTTAAACATATG
+CATGCAATTTCTTAAAATATATTTTACATATTTGAAGGTTCAGTTCACCTTGGAAAGCCAAATTTCACATCTACAATATG
+AACAAAATTTCAACTTCCTGGAGCTCAGTGATAATTATGGATTATAGGTTAAAAATAAAGTTAATAACTCCTAACAAGAG
+CATGAGCAATGTAAGACTTTTTCCAGGTAACATTAGAAGCAAGAAATAACTAAAAAAGGACATTGCTTATTTTTAAGGGT
+ATTATCTCCAAGTACATATCATTTTAAAGAATAAAATTTTGTGTTATCTTTTTCAAAGCACAGATATTTCTTGTACATGT
+TAACCTTCTAAGCTGCACAATAAGATATGACATCGTTTCTTTCCAAGAAAAGAAGTTAGTTCACCTAAGAGAAAAATGAA
+TCAGCCTGGATCCTATTTTATTATTGGTCCAGGAGAAGCAAGGACCCTACCTGACTTCAGGGAGACCATCTGGGCTAGTT
+ATTAGAGCTGTTTATAAAACCCTGGCTCTCCTTCTCTAGGGGTGTGGGAGGGCTCACTTCCCCACTCCCTAAAGTAAAAA
+AAGGCCACATGACTTGGCTTAAACATGAGTAAGAGTAATGAATGTCTCTTCCAGAAGAAGCATTTAAGTAACTGTGCTTG
+ATTCATTCCCCCTTTCTCTCTACATTGACAGTCATGGAATCCCAAGTTGGAATCTCCATCAGCAGATAACCACTAGTGAG
+GACGATTAAGAAAGGCTCTTTTAATCTACATTGGACACATAGCAAGAGGAAAAATAAACTTTTCTTATATTAAGCCATTA
+TGAGCATGAGGATGTTTGTTACTGCAGTATAACATGGCCTTTGCTGACCAATACCTCATCCAAATCTAGGTCACATGGCA
+CCAAATGGCAACACGGTTCTGAGTCTTTTGTTTCTATGAATACAAAGTGAAAAAGCTATATGTGTACAAATCAAGAATCA
+GATAAAGGTGAAGAATCAAACTTGACGAAAGCAATCCTAGGGGCTGGGCATGCTGTCTCACGCCTGTAATCCCAGCACTT
+TGGGAGGCCAAGGTGGGTGGATCACATGAGGTCAGGAGTTCAACACCAGCCTGGCCAACATGGTGAAACCCAATCTCTAC
+TAAAAATACAAAAATTAGCCAGCCATGGTGGTGGGTGCCTGTAATCCCAGCTACTTGGGAGGCTGAGGCAGAAGAATCGC
+TTGAACCTGGGAAGCGGAGGTTGCAGTGAGCCAAGATCGCACCACTGCACTCCAACCTGGGTGATAGAGCAAGACTCCCT
+CTCAAAGAAAAAAAAAAAAAAAAGCAATCCTAGGTCTGATTTCCTACAGACACTAAACATAGAAAGAAGAAATGGCTCAT
+TCATGTGAGCACAAACAGTAAAGAGGTTTATTTAAACTCTGCTGTGAGCTTGGGCCTCCTTTCCTGCCTGGTTGAATTTA
+AAAATACCATATAGACTGTGTGTACCTAAGATGCTTTTCCTACAAGAGCCTTAAAATTTTCATTTCTCTCTTTCTAGATA
+TGCTGCCAAGAGAATTTTTTTCTTTAAGAACAAATAAACTAAAGAAATTAACATCCAGCTTGCTGAGTCACTTGCTTTCC
+ACTTGAATTTTAACTACAAGGATCTGAAGACAGCCTGTGTTAGGCAGAGAAAGAAAGAGACAGAGAAAGAGTCAGGGAGA
+AGAGAGGAAGAGAGATGTAGCTGGAGAGTCAGGGAGAAATTCAAAGAGATGGAGAGAGAGACTGGTTCTGGAAAGAAAAG
+ATAAGTTAAATAGAAAAAGGAAAGTAATCTTTGAAAGGTTTGTTTAAGACATGTATTTTCTTACTCTGTAGTGGGAGCGC
+TGAATAAAAATATTTAAATAAAAGCTGTAATGTGATTAAGGTTTGAGTTGGGGTTTAATCGTGGAAAGAGCAAGGTATTT
+AGAGTGAAAGATCTAACTTTTGTATTTATTTTTCATTTTTTAGTGTTTTCGCTTTTTTAATCCCAGGTCCTCTACCATTG
+ACTATCTGATGTTAACAAAATCAGTTCGCTTTTGTGTCTTGGTTTCCTCATCTATGAAAAGGGAATAGAAAAACTTACCA
+CATAAGTTGGTAGGGAAGACACAATTCTATAAAATATATTGGATGATCTGTGAAACCACCTGACACCAAAAGGGAATAAT
+AAATTCCCTTCTCTCTTCTTTTTCTACACCATCTCTTTTTAATTCTACCAGAAAGTTCCTTTCTCTATGAGGCTTCTTGA
+AGACTGGCTGTATACAGGAAAACTGTATTTCTAATTCTGCTATTTATTGGAATACTAGGTCAAGTTTTTCCTCCTTCTTA
+TTCTATTTAATCTTCACAATAACCCTACATGAGTTTATCTTCATTTTACAGAGAAAACAGAGGCTTACAGAGGTTAAAAA
+AAAATTCCCAAATCCACACAGATAGTCTTTTAGTAATCTATTGCCATAATAATACAGTATTGAAATTTCCCCCAAATTCG
+GCAGCTCACAACAACCATTTTGTGTGACTCATCAACCTGTGGGTCAACTTGGCCGTTCTGCTGGTTGAAGCTGGCCTCAG
+CAGGACACACTCCTGCAACTATAGTCATCTGGGCTTTGGCAAATCCCTTTAGTCTATAGCCAAAGAGCAATAAGTGAGCA
+GCAAGAAACTGAATTATCTCTGACCCTTTTATTAAATAAAATGCTGATCTTGGGCCAGCTAACTAACCTTCCAGAACCCC
+ATGCCAAGGCTTTCCTTTGGTCCAGTGCCTTAAACCCATGCAGTCCTGGGGTAGAGAAATGAGGAAGCCCATAATTCTGG
+CAGAAAGGCTGCCTCTCATTCCCAAATACCCCTGAGAGAGATGAATGATAGCAGGATGGAGCCTGCCAAGGGCACACAGA
+CAGGGCATACAGAATCGTCCAACCTCTCACCAATTCAAGGGCAGGGCAATTAAGAGGCCAGAACTCACCACAGCTAGAAA
+TTATTTACGTTAATGCTTTAGCACAATGTGGTCCACTAACCAGCAACATCAGCATCACATGGGAGTTTGTTAGCAATGTT
+GACTTTTGAACTCCATTCCAGACCCACTAAATCAGAATCTGAGAAGCACAGGTTCAGCAAGAATCGCACCAGGAGTATAC
+CACTGGAAAAGCATGTGTAGCCAGATTTACTGTATTAGATGAATTAACGCTTCAAGGCCCAGCACCTTACTGAGTTTCTG
+ACCTCCCACCCAAGCACTCAATACTTTCCCCTGTCAATTGGCCCTGCTCTCTGGGATGAGTGCCTGGTATTTTTGCTTGC
+TCCGTATGCATTTCCTTCTTGTTTGCCTCATTCTGATTTTTCCTTGAAAGGAGTTTCCTTCCCCACTCCTAGTCGATGTG
+TGTCAGGTTGACTTATCATGGCTTCTGGGAATGAATATATGACCCAGGTCTGACCAATTAAAAAAAGTAGGGATTTTGCT
+GGAACGATTGGAAAAGTGGCATGTTCTTTTCTGCTGTTGATGATAATTGGCTGAGTCTATCTGAGAATGAAATCAACACA
+GAGATGATGAGAGATAAGTTCCTGATGACATTGTTTGAGCACCTAAATCCAGCCTTGCTTGAAACTATAGATCTGATTCC
+AGATTTTTTAACTGCATACGCCAATACATTTCCTTTTATGCCTAAGTTATTGTGGTCTGGTTTCTATAATTTGCAAGCAA
+AATGGTCCTGGTCTGCAAAGCCACCCTATGTTGTAAGCTTGATGCCCCCCTCACAAGTTTTTGCCTCAACTTCCTGTCTC
+TCTTCTGTTTCCAATCACCTGTGACAGTCATTTTCCAATTTCCAAATTACGAAATTTTTACATATTATATTTGCTGTCAT
+AACACAAGTGGGAATTGACCCCAGAGACTTGGGATAGACACAATTCTTCAATAACATAGAATAGGCCTGAAGTGCTCAGT
+TCTGTCTAGAAGCTCACTACTCAAATATATACTGTGGGGGAAAGTAACCGCCTGATGGTTAAAAGGCTGGCCCCGTATTT
+CCTCCTCTTCCTCAGTTTCCTCATTCATGAAATCAGCAGACATTTTGGAGATTTACTTCAGCCCAGACTTTCTAAGACAC
+TCATAGTTAGGGGTTCCTGGTTTAAATAGATTGCGACAACTTAACATGCAGCTTAACATGAAACACAGATGGCAGAAAAA
+TAAAAAGGTGCCTCTATCAGTTACAAAGAAATTGGTTGGGCGCAGTGGCCCACGCCTGTAATCCCAGCACTTTGGGAGGC
+CGAGGCGGGTGGATCACCTGAGGTCAGGAGTTGGAGACGAGCCTGGCCAACATGGTGAAACCCCATCTCTACTAAAAGTA
+CAAAAATTTGCCCGGTGTGGTGGTGGGCGCCTGTAATTCCAGCTACTCAGGAGGCTGAGGCAGGAGAATCACTTGAATCC
+AGGAAGTGGAGGTTGCAGTGAGCCAAGATCATAGCACTGGACTCCAGCATGGGCGACAAGTGGGAGACTCCATCTCAAAA
+AAAAAAAAAAAGAAAGAAAGAAAAAGATTGGCTAGACCCTGTGTGTGGCATGACATCCTATTCCAACTAAACACAATGTT
+CTATTTTACAAATAAGAAAACAAGCTAAGAGAAGTTAATTGGTTTTCAAAAAGTCACACAGTACAGACATAGCAGAGGAA
+GAATGAGAACCTACAGCAGAGTTGAGAAGGTTCAACAGAATAACGGGTCATGTTTGGGCACTTGTATTCTGGAGCTGGCA
+CCTGGGTCCTGGCACTTATTAGCTGAATGTCCTTGGGTAAGATATTTAACTGCCCTGTGTCTCAGTTTACTCATTTGTTC
+AATGGGCATTATAATAGTATAAACATCAGAAAGTTGTAGGGGGTTAAATCTCTAAAGTGTTTAGAATGCTTGCAACTTAT
+AACATTTTATATGTGTGTTTATTAAATAAATAAAAATAAATCTGACTTCATGTCCAAAGTATATTTTTCATACCCTGCAG
+CTGTCCTGTACCAGAAATATCTCTTCCCACAAGATCTCTTTAACGACCTGGCAATGTGTGTAAAACCTCACCTTACTCAA
+TTCTTAAAATTACTCTGATTTTCTAGCTTGGTGAGAAGTCTCTAAATGCTTACTATGAATTAGTTACCTGATAATCTATC
+CTTAGTAGAAGCTATCTGCCGCAGCAGATTAGGTCACTAGGGCTTCTTGAAGGGAGAAAGGAATTAGAAAAAAAGCATCG
+AGTGGATGGAAAGAACCAGAAAGAAACAAACAAGGCAACACCTATTAGTTTATTCATGGTTGACCCAAGGAATAGATTGA
+AATACCAGATGCACCTGAAAAATAATTTTTTCTCACTCTGGCACCCAAGCTGGAGTGCAGTGGGCTGGGTGGAGGGGGGA
+GGTCATGGTTCACTCCAGCGTCAACCTCCCCAGCTCAAGCGGTCCTCCCACTCAGCTTCCCAGGTAGCAGGGACCACAGG
+CATGTGCCACCACACTTGGCTACTTTTTTAAAAAAATTTTTGGTAGAGACAGGATCTGACTGTGTTCCCTAGGCTAGTTT
+CAAACTCCTAGACTCAAGCAATCCTCCCGTTCGGCCTTCCAAATTGTTGGAATTACAGGTGTTAATCACTGCACCTGGCC
+TGATTTTTTAATTATCAAATCATGTTCAGAACTCACTTCAATCCAATTAACTGAGGATTTCTGGAAAGGTTACCCAGGCA
+TCAGTATTTTTTCACTGTGATAAAATACATATAAAATGTACTATAATAATTTACTTTAATAATTGTCTTGACAATTTTTA
+AGTGTAGAGTTCTATGGAATTGAGTACATTCACATGGCTGTGCAACCATCACCACCATCCATCTCTAGAACTCTTTTCAT
+CTTGCAAAACTTAAATGCTGTGCCCGTTGAACAATAACTTTCTCTTTTCTCCATCTTCACCCTGACAACCACTATTTTAC
+TTTCTGTCCCTATGAATTTCACTACTCTAGGTACTTCACGTAAGTGAAATCAAACAATATTTGTACTTATGTGACTTGCT
+TATTTCACTTAGCATAGTATCCTCAAGGTTCATCCACGTTGTAGTATGTGTCCAAGTTTTCTTCCTTTTTAAGGTGGAAA
+ATATTGCATTTTATGTATATGAATTTTACATATCCATGCCTCCATCGATGGACACTTGGGTTGTTTCCCTCTTTGGCTAT
+TGTGAATAATACTGCTATGAACACGGTGGGCAACTCCCTCTTCTAAACCTTGCTTTCAATTCTTTTGGATATATACCCAG
+AAGTGGAACTGCTGGATTATATGGCAAATTATTTTTAATTTTTCAAGGATACATCTTACTGTTTTCTATAGTAACTACCA
+TTTTACACTGCCACCAGCAATGCACAGGAGTTCCAATATCACTACATTTCTTTCAATACTTGTTATTTTCAGGTTTTTTT
+CTTAGTATCTATTCTAAGGGGTGTGAGGTGGCTTCTCATTGTGGTTTTGATTTGCATTTCTCCAATGATTAGTGATATTG
+GACATCTTTTCATGTGCATATTGGCCATTTGTATATCTTTTAGGAAAAATATCAGTTCCAATACTTTGCCCATTTTTGAG
+ATGGGTTGTGTGGTTGTTGTTGAATTATAGCTTTTTATGAATTCTGTATATTAATTCTTTATCAGATATGCTATTTGCAA
+ATATTTTCTTCCATTTTGTGGGTTGTCTTTTCACTCTATTAATTATTTCCTTTGATGCCTAGAGATTTTTATTTTTGATG
+TAGTCCAACTTACTTTTTTTTCTTTTGTTGCCTGTGCCTTTGGTATTTCCACTTTTTTTGGCTATTGGACATAATGCTGC
+TATGAACATTTATGTACAGGATTTTTTTGATGGATATGTGTTTTCAGTTCTCTTGGGTTTATGCCTAGAAGTAGAACTGC
+TGCATCATATGGTAGCTCTGTGTTTAAGTTTCTGAGGAACTGCAAAAATGTTTCCAAAGTGGCTACACCATCTTATACCT
+TACCAACAATAGATAAGTGTTATATATTCTCCACATCTTTACCAACACTTGCTATTTTTCATCTTTTAGATTAGAGATAT
+CTTGGGGATGTGAAGCAGTATCTCCTTGTGGTTTTGATTTGCATTTCCCTAATGACTGATCATGTCAAGCATTTTTCATG
+TGTTTATTAGCCATTTGTAATCTGCTTTAGAAAAATAATTCTTAAATTTTTTTCTCATTTTTAAGTTGTGTTGTCTTTTT
+ATTGAGTTGTAAAGGTCCTTTAAATACTCTGCATAGAAGTCCTTTATCAGATATATGATCTGCAAATATTATTTTCCTTT
+TTGCTTTTTGCCTTTTCATTTGTAAAATTGTATCTTTAGAAACACATTTAACTTTTTTTTATTTTGAGGAAGTCAAATTT
+ATAATTTGCTGTCATTTGTATTTTGGTGTTCAATGCCTGATCCAGGGTCATAAAAATGTGCACATATGTTTTATGCTAAG
+ATTTTTATAGTTTTAGCTCTTAAGGTTTTGATCAATTTTGAGTTAATTTTTTTCTATGATATGAGGAACTGGGCCAACCT
+CATTCTTTTGCAAATGAACCTCATTCTTTTGCATATTTTGCAGTTTTCTGAGCATCATTTGTTGAAAATACTGCCCTTTC
+CCCATTGGTCTTGGAGCCTTTGTCAAAAATGATTTGACCATACTATATGCAAGCATTTATTTCTGGACCCTCTATTCTAT
+TCCATTGGTCTGACTTTATGCCAATACTACACTGCCTTGATTATGAAGCTTCATTGTAAGTTTTGAAATCAGGAAATCTG
+AAACTGCAACTTTATTCTTCTTTTTCAATATCATTTTGGTTATTTAGAATCCCTTGAGATTCTATATGAATTTTAGAGGG
+ATTTTTCTATTTCTGAAAAAAAATGTCAATGAGATTTTGATAGGAATTGCATTGAACCTATAGATCACTTCGGGTAAGTT
+AGTTTTTCTATATGGTGTGAGAAACAGATACGCCATTTTTTTGCATGTGGATATCCAATTCTTGCAGCACCATTTGTTAA
+AAATACTATTTTTTTCCACATTGAATTCTGTTGTCACCCTTGTCAAAAAATAAATTAATCATAAATGAAAGGGTTACTTC
+TGGACATTTAATCTGATCCCAGTTATTTGTTTTTATGTTGGTGTCGTAAAGTCTTTATTACTATACCTTTGTATTAAGTA
+TGGAAATGCGGAAGTGTGACTCTTCCAACTTTGTTCTTCTTTTTCAAGATTTTTTGGCTATTCTGGGTTACTTGCATTGC
+TATATAAATTTTAGAACCATACTGTTTATTTTCCCAAAAACATTTCTGGGTTTTTCAGTAAAGTTTTTGTTTTGTTTTGT
+TTTGTTTGTTTGTTTGGAGATGATGTCTCGCTATGTTGCCCAGGCTGGGCTTGAACTCCGGGACTCAAGGGATCCTCCCA
+CCTCAGCCTCTTGAGTAGCTGAGATTACAAAGGTGAGCCACTGTGCATAGCTGGGAAGTACAATTAATGTGATAAGTTTT
+TAAAATCTAGTTCAAATTAATACAAACTTGATTTTGATAGTACACCAAAACTTTGCTCTGACATAGCTCTATTCCCTCCT
+CCCTCCTTGGTGCTTTTATTTTCATACAAATTACATCTGTATACATTATAAACTCATCAACATAGTTTTACAATTATTGC
+TTTATGTAATTTCGATCAGATAGGAGAAGAAAAAAATACAAACCAAAATAACACAATAAAGCTGGCTTTTATATTTCTCT
+TTGTACCTACCTTTACCAGTACTCTTTATTGTTTTATGTGGATTTGAGTCACTGTCCAGTCTCTCCATTTCAGCTGAGGC
+ACTTCCTTTAGTACTTACAGGGAAGGTCCGTTAGCAACAAATTATTTTAGTTATTGTTGCTGTTTTCCTTCCTTTTGAAG
+GATAATTTTGTTGAATATAGACTTCTTGGTTGAATGTCTTTTTCTTTCAGTACTTTAAATATGAGCATGTCATCCCACTG
+TCTCTGTGGTTTCTAATGAGAATCTTAATCTTATTAAGGATCCCTTGGACATAATGAGCCATTTTTCTCTTTCTGCCTTC
+AGTATTTTCCCTCCATGACTTTTAGTTTTGATTGTAATGCACACAGATGTTGATCTCCTTAAGTTATCCTACTGGAGTTT
+GTTGAGCTTCTTGGGTGTTTAGATTAATGTTATTCCTCAAAATTTTGTTAGTTTTGGCCATTACATCTTCACATGTTCTA
+TTTACCCATTTCTCTCATCTCCTTTTGGTTTTCCTTTATGCTGACTTGGTATGCTTGATGATGTCTCACTGATCTCTGAG
+GCTTCATTAGCTTTTGTTCATTCTTTTTCCATTTTGCTCAGACTGAATCATCTCAATTGATTTATCTTTGAAATTGCTTG
+TTCTTTATTCTGCCACCTCCAATATGTTACGGTGCTTCTCTAATGAATTTTTTTATTTCAGTTATGATATTTTTCAACTT
+CGAAATTTTTATTTGATTATTTACAATACCTATTTATTTATTGATATGCTCTATTTTGTGAGACATCTTTCCCATATTTC
+CCTTTAGTTCATTGGACATGGGCTAGTTCATTAGTTCTTTAACACATTTATAAGAGCTGATTTTAAGTCCTTGTCCAGAA
+ATTCTAATGCCTAGGTATCCTCAGGGACAGGTTCTCTTGACTGTTTTTCTCCTGCATATGGGCCACATTTGTTTGTATGT
+CTTAATATTTAGTTAAGAACTACACATTTTAAATAATATAATGCGGCAATTCTGGAAATCAGATTTCTACCTTCATCCCA
+CAGCATTTGTTACTGTTGCTATTTGTTACTACTGTTGTTACTGTCCCTAGTAACAATTTTTGTCTTAAATTCTATTTTGC
+CTGATATCCACTTTATCCAATTCTCTTTGATTATTTGCATTGTATATATATTTCATCTTTTTTTTTTTCAATCTATTTGG
+GGGAGTTTTGTTTGATTGTTGAGTGATTTTGCTGAACTACTTTGGTAAGTCTTCTTTATTGTGTATGGCCCTGAAGTCTC
+TACTCAGCTAATTTAATGTTCAGCTCATGTTTGGACAGAGATTTCCCTAAATACCTTGCACCAATAAGGCTCACAGTCTT
+TGCTGAGAAGTTCTGTGTATGTATTTTTTGGAGTACTCTTTCAATGCTCCACCAGGCAGGTTACAAAGCTGCCTTAGTCT
+TTGCTTCCTGATGATGCAGTCTCAGGTTATGGAGAGGTGAGAGAAGAGGGCCTTCTCAGATTTGGGGCATGCACCCAGCT
+TTCTGCATGTGCATGGCTTTCTAGATTCCCAGAAATACGTGAGAACTTTTGAAAACACCCTATGAACATCTCAGTTTCCA
+GCATTTCCTGTTATGATTTTTGGTCAACTTCTTATTTGTCCCAGCTGTTACCACTGCCTCAGGCAGCTGCTATGTGAAAC
+ATTTGATGCTGATTGTTTTTGACTAATGCTTTGGGGAGATAATGTTTATACTATGTGAGTTCTAAGTTACCTCAAATAAA
+GATAAGCTTCAAGAGTGGTAGTTTTTAGGGAACTTTTTATTTTTTATTTTTACTTTTTTTCTTTCCAACTTCTATTTAAG
+ATTCAGGGGGTACCTGTGTAGGTTTGTTGCATGAGTAAATTGCATGTCATGGGGGTTTGAGGTATGGATTATTTTGTCAC
+CAAGGTAATGAGCATTGTACCCAATAGGTAGTTTTTTGATCCTCACCTTCAGGAAATTTTCAGACAAGTTAAATAGTGAC
+AATTCTCTCTGGATGGGATTCTGGGGAGCTCCAAATCAATTCTGTTACCCTCCAAATGGCATGTAGGTCCCTGGTTTTCA
+AGGTTGCTATAAAGCTGCAGAAAGGGGAATGGCAATAGGGCAAGTTAAATGCCACAAAGTTTCTTGTTCTTACCCAAAAT
+GTAGACATTTTTCTTGAATAAATGCTCCTCAGATTGTTGCAAGCCTTTGGTTAATTTCCAGATTTCTAAACAAAAAAATT
+ATTTTAACAATTTTTGCTAGTGTTCTTGTTGCTTTTATGAAGGAATAGATTTTCAGAGGTCCTTATTCAACCATCCCCAA
+AGTGGCTCTTTCATAATCATAATTTTATTATTAAAATGTTGGCTCCAACAGACCTCCTGACCAGATGTCAGCTCTCATGT
+CTCATATTTCCATTTTAATCTCAGTATCTTTCCTTAACATTGCCTAGAAAAACACAGGGGTTAAAAATACATTTCAAGGA
+ATCTTTCTTTTCTCCACTTTAAAAATCAATACCGTTTTGTTATACTGAATAAGAAAGGTTATGGAGTTTCCAAAGCATTA
+TAGGAGAGGACATTGAAGACAGAATTTTGCAGCCCACCCAAACCAGGAGAGCTTGCATAAAAGCTTTTTGAAATAATTTT
+GTTACTGCCATTCTTTTAAAGAAGAGTAACACATTCACAGGAAAACAATAATGAAGATTGTTTAAATAAAATAAAATCTT
+TTTGCATCTTACTCATTATCCTTCCCAAAAGTAACCATCTTTAAATACTTCCTCTTCAAGTTATCTTTGGTGTTTCCAAC
+CTTAATTTTAAATAATATCTTTTTATTTCTTGATTTACCAATTTTAAATAATATATAAACATCCTTTATGAAAAAACTGA
+GAACGTTATCTTGTTGGCTCTGTCTCCCTTATTACAACCACTGAACTTTGGTACTTCTATTACTTTTTGGGTTTCTGTTG
+GCTAACTTTAAAATAATACATATAACTTTATATATGATTATACCATCAGTAAACTTTATTTTTCCTTTTATGAACTTTAT
+ACTGATTATTTTAATCCTTTACCATATAAAATGAGTAAATTGCGGTGTCTGCATTATAAAGTCTCTTTACATTGCCCCTC
+CTAAGCCCTCTCCTTTACTTCCCACATCTGACAGCTCTCTCACTACTTCTATATGTTAGGTCCCATAATATTCAGATTCT
+GTTTGGTAAATATTAAGTCATTTATTCTTGGTCTACAGTTTAATTTAAACTTCAAAGATCAAAAAAGAGCATTTACAGCT
+TATGATTATATAGATAGTATTTATTGCAGAACCAGGTAGGGTGGTAAAACTTTAAAGAAAAAATGTCTCTGTGGCACTAA
+ATTTTCACACCAAAAAAGAAAAAAAAATTGAAGATAAAGGACAAATAAAAAATATTTCCTTAAACGCCATCAAAAAATGC
+TCAAAATTGTGTCGTATAATTGACTTCAGATTTGAACCAAAAATATATGATATAGCTTTTGTTTTTCCTGTCATTATAGG
+GGCATTTTCTTGTTATTACTGATAGGAGATATTTATGCCATTGTCTGGTTAATACTATAATTAGCTTTTAAACTATACTA
+TTTGTCAAACTAATTTTACTCTGATTTATTACCGTATCTGTTTCCTAGCTTTGGTCCTAGGAAAAAAATGCTACCTTAGT
+TCTAATATTTCCTAAGTTCCTGGTCTTCCTCCTTTGCATAGATTCCTTTGTGAGTTGTATTGTTAAGGTAAAATCCTTAG
+AAAATATTTTTATTTGCAAAAAATGTTCATGGGAGGTAATTGTCCTGAGTCTGTCCACATCACATCTAGAAATGTTTTCA
+TTTTGCCCTTCGCTCTTTATTAATATTCTGGCTTAATATAGACTTATAGATGGAGATTTATTTTTCTTGCTGTATTTGAA
+GACCTGTTCCATTGACTCCTACCACCTGAATTTGTTTTGAGAAGTCCAATTTTGCTGTAATTCTTCCTCCTTTGTAGGCG
+ACCACCTCTTTCTTCTCTATGAAGCTTGTAAGAGCTTCTTTAGGGAAAGTGGGAAAATATGCAAAAAAAAAGAAGCATAC
+TCATTATGTGGTACTACTTACAGTGAACAATATTTACCTAGATATATTCATGGCAGAGTGACTATTTAACACACAGTTGT
+GTTATTGACAGGATGGGGAGAAGAAACAAGGGTGATGTAAGAGAGCCTCATTATTAGGAAGTCAACTGATGTCTATTTTA
+TAAATCAAGAATTAGCAATGTCTGCATATTATTTGGAAACAAAAAAGAAATTAACCAAAGCAAATGCAAATGCCATTGAA
+AACCATTGCCTCTGGGAAATATAATATAGATGTAGCTATAAACACATATATAATGTCAACAGGAGAAAAGTGCTATGAAG
+AAAAATAGTGCAGGGTAAAATAATTTTAAAAAATAGAGGTTAGCAACTTTAGCGAGTTTAGATTGTCAGAGAAGGCTCCT
+TTGTAAATGTGGCATTTGAGGAGATGAGACATGAGACGATCTGGGGGAGGCTATTTCAGGTAGAAACACTAACTAGTGCA
+AAGGTCTTGAGATAGGAGTGTGCTTGGTTGTTCAACGGACAGCAAGAAGGCCAACGTGATTGGAATGGAGAGGTGCTGGG
+AAATGAGTTCGGAAAGGTGGCCAGGAGCATGATCTTGTAGAGTAAAGGAAGAACTTCTAAGTGTGATGGGTGACATTGAA
+AGTTTTAGAGAAAGGAAATATGATCAGATTTGTCTTTTAAAAGTATCACTGCAGCAGCTATGTAGAGGAGATACAGGTGC
+TAGAAGACAAATTAAGAGGCTACTGTAATATCCCAAGAGAGAAATGATGGCGACTTAAACTAAGGTGGTATCCGCGGAGG
+TGCTGAGGAGTAGTTGGATTTGAAAAGTATTTTGACAATAGAGCAACAAGTGTTATTTATTATGTGTGAATATAAAAAAT
+ATCCTTGAAATAATATAAGATTTTCATCCTCAGGGACTAGGAATGTCAATGCCGTTCGCTAATATGGAGAACTAAGATGA
+AGGGAGCCTCTTGAAGTTAAAAAATCTGAAATCTGCTTTGGATATGCTAAGTTAGATGTCCATATTTGCCTTATGCTTAA
+GTACAGATATTGAGTAGAAACTTGTGCATACATGATGCAGTCCAGGAGCAAAATTAGGGCTGAATATGTAACATAAAAGC
+ATCAACACATAGACACTACCCAAGGCCTTGGAAATGGATACACTTACCAGGGAATGATTATGTAGAGAGGAGAGAAGTTC
+CAAGTCTTGAACGCTCGGAGTTGAGAAAATGAAAGCAAAGCCAGCAAAGGAGAGTGAAAATAAGATGCTTGTGAGTATAC
+TAGTTTGGAAGATGGGACATCATGAGAAAATGTTAATTAAGAATGTAAGAGGCGAAGTAGATTCTTGCAGAGTATGGAAC
+CAACTTATGCATATATATGTTCAGTTTGAGGTGCCTTTGATTCAGCCAGATAAGAGTGTTCATTAAGAAGCTAGGAATAT
+GACTTTGAAGCTCACAAGAGTGAGTGAGACTGGCAATAGGAAATTAAAAGTCAAATATATAAAAAGATGGTTAAACTCAA
+GGGTATGAAATCAGATTTTTTGAAGACAACATATGAAGGAAGATATAGAACACTGAGTCTCTTGAGGTACCATGGTCAGT
+AGAGAAGGCTGAAAGACCAATCAGAATTCTGCATTTTCTGATGTAGAATTTAGAGTAGGAAGTTTTCAGTACCTATTCTA
+ACTCCATTACCAACTGGCTGCATGGTATAAGATAATCATGTATTAATTGGGAATTAAAATAATAATTACTATTATACATA
+CCTCAGAGAGTTGTTGCAAGAATAAAATGAGATCATGTGTGTGACAGTTCTTTGCAACTTTCGTATTTCTTTAGCACAGA
+GCTATATAAAAGAGAGCTATTTAATTTTTTTCTCTTTGGCTTAGAGCACAGAAGCTTTCAGGGTGGAGAGTCAGGACAGG
+TTCTATGGACATATGGAGCTCCCAGGGGGGCTTGTGCAGGACTCAAGGAAATGTCCAGCTTAACCTGCTGGGCACAACAA
+TTTTGCTGCTTCAACCCATCTGCTATGGAAAATGAGGAGCCAACAAAAAAGGGAGAAACAGAGGTGAAGGCACCTGATAA
+TGAAAGCTTTAAGAAAGGGAAGGGGTAAGTACATGCCGTCAGGTGAACCCAGAAGTCAATAAAGATGAAAAGTCCTTTGG
+ACCATGACCAAGAGATTATTGATGCAATCTGCTGATGTCTTAGAATGTGCCTTGCCTAGGCTGGTGCCTGTGAGCTGGCA
+GAAGTGTGATGCCCTTCTCCGCTAGTATCAATAAAATCGCACGAGATATTCTTGCCCCATTCCCAGGTATATCCTGAGTC
+ATTCATTTCAACCCAGCTTAACCCCATCCTCACCCTCAGAGTTCTTCACCCTTGGAATCAATTCGTGTTGAAGACCTCAT
+ATCATCTGCCTCAAAACCACTGATCCACATTGCCATTCTGTTCTGACCTCTGGCTGACTTGCTTAGGTATATTTTGAGAG
+CATTTTCTGCCCTGATTCTTATTTAAGCTGTTTATTGGATATTGATCTCTGAATTCCCTCCTTGATTGTAATTTGAACTC
+TCTTCCTTTGAATATGTTGTTTCTGTTCACCTCAGGTTAACAGCCTAGACTTCTTTCTTGAAATTCGCCACAGCGAAACC
+TTGTGATTCAGCCAACCAAACCAGTAATTCATACTCATCCCTCTAGCAAACGAAACCCTGAAGTTGCTAATGAGAAAAGG
+CATATGCAAGCCACAGTGTGTTGAGGCAGAGAAGGTAGCTGCAGAGGGCAGAAAGGAGTCTGATGGAGCTGGAGGAGGCT
+GTACTAAAACATAGTGATTATGAGCTTGGGCCCTGGGGTTCAAACCCCAGCTATCCCATTTCTTAGCTGTGTCCTTGGCC
+TGGTTATTTAACCGCTCTAACCCTCAACTCTCTACTCAATTTTTCCTAGACCCCAGTGCTAAGCATATGGCAGGTGTTCG
+ATATTGAGTAAATGAATAAGTTCATATAGATGAAAAAAACAAGGCCCAGTATGATTGATACATATCTAATTAATAGCTAA
+TGAAAAAAATGGCAATCCTCCATATTAAAGCCAAGCTACATGTCATTTCTTAGAGAAATAAAGTTAAAGTCTCATCAAAA
+TGATAGATTTTATTTAGCCAATACTTCATAGTGCTAGACATGACACTCAGTGCTTCATAATTTCCTTGTGTATTTGCTAT
+AATTATTGTCTCCATTTTACCTATGAGATAATTGAGATACAAAGAGGTTACAGAACTAATATGATTCCTAAAAGATTTAC
+TCAAGGAATACTGTGATCCCTAACAAATAGCCTTTTTGAAGTGTACTAATTTGGACTACCATTTTCTCTGCCTCTCTGTT
+CCTAATTGTCACAGGTTCTTCCTCCCTACTGGACCACAGGCGTGTCTCAGCATTGGGAAGGGTCCTAGGATGCATTGTAA
+AAGAAAGTCTGTAATTTTGGTGCATCAAGGAACAAAAGTGTGGAAACATCCAGCCTGTGGCTGCTGGACACTCCCCTTCT
+TCCTTCTCACCATACTGTCTGTAATAAAGAATAAAGTGTTATTTAATATTCTGAGTGTACAGTCCTATTTTTTCTTAACC
+TGGCAAACCAGAAACAGCAACTAATATTCCTGGTTCTCTTTTTACCTTTCACAAGATCTATATCTACCTTAGAGGCTTCA
+GGGAACAAGAACAGCCTGGATAGTTTAAACCTAGAGTTAAACTACTCGAGGCATTTTAAGGTACCGCTACTTCACTATAT
+ATAAATGGGGTGGGGGGCTGGATTAAGGGCTAGAAGAATATATGAAGAGAAAGTGGCAAGCAAACCACTTGGAAAGCAAA
+CCAAGGAATCAAAACAGTTGCTCTATCAAACTGAATATTCTTGCGCAGAACTGAATATTCTTTGCCCTCACAGTGTTTTG
+AAACATTGTCATAGGGTGCCAGGCATGTCCCAAAGGAGGACCCTGGAGAGAGGGCTTCTGAGCTGGGCTATCTCTAAACT
+GCTTCCAAGTTGACTTTGGAAAGATCACATGGGATTTTAGAAAAGTTATTATTATTATGACTTATAAAATTGTAGTTATT
+GAGTCAACTATGAATCTTGCACCTTACCTTGGAGATACAAGCTAAATACCTTGGACCTAGAGCTACTGCAAACCAGAATA
+CTTTGGGGCATAATGTACCAAAAAAGCTTTCAGAGGCTTGAATTATCACAGTGTAAATATCCATCTAGCAGAGCAGGTAA
+TTTTCTCGTTACTCATCCTAAGCATTAGATACTCTGTGTGTTAGTTCTTTGCAGAAATAACCCATAGCAGCTGAACCATT
+TTGGAACAAATTTTCCATCACCTTCCTTCCCTGTGTGGTTCAGGCCTTTGCAAACCCTGAGTATCATGAATCTGCTGGTT
+CTCTACTGGCAAAGATGATGACGTTTTCTTGTTTGTTTTTGTTTTGCTTTAATCATGCCGTGAGAAAATCCTAAGGGAAA
+AAAATCTCATTATCCTATTCAAAATACATTTAGGGTTATCTGGTCATCCCTAGCAGATTTTTATGTACCCAATGTTTATG
+TATCTCAAATTACACTCTGTTACCTTTCCCAGAGCAAGATGAAGAACCTCTAAGAACCTCTCAAACAGCATCATTTTCAC
+AACAAGCACTAGCAATGGAAAACATTGTCACCACCACCAGCTAATCTTGTAGGGTCCATGGTATTTTCTGTAATGCCTTG
+AAACAGTCTGCCTCTCTCTTCTTGTCTGTCATTTTTATAGATGTCAGTGTGGGGCATCAGCAGTTGAAAACTCAGAATAC
+CAAGCCAGTCCTTCCCTGTGACTGCTACATAGCTTTTGCCTGCAGGATAAGCCTCCATCACTGACGTAGTTGCTTTTTAA
+CTTCTTTTTTTTTTTTAACTTGGAAAATCCCCCACCTCCTCCATTCCTTCACAAAACTTTCTGCCTGAATGATTTCTAGT
+CTGAGGGCCATACACACATTTTTTTTTCTCCCTCTGCAGGTTTTTTTTTCTCTTCAAACTTCCGCATGCTGCCTTTTTTT
+AAAGTGTCACACTTTGGAAAAAATTCTGACTACAAAGGATTTTGCAGTACATTGCATTCAAACCCCTTTCCCCCTATTCC
+AACTTGCTTTATAATGAAAGTAAGAGCCATACTCAAGAGACTTTTATTGGCAATTAAGTATCTAAGGACTTTGCAGATTT
+TCTTCTTTATTGTATGGCAATCTTAAATGTTTAATGAATATTTAAGTAAACAAATTTCAGCAACATGCCTTTTTTAGGAA
+TTTGGCTTTAATTAACATCCAAACACTGTTAATTATTATGGCAAAAACATCAATACGTCAAAAAAAAGCCTTCAGGCCAG
+GGAATCTTTAAGAAACAATTATGTCTACTAATTTCGATTAAAGGTATCCTTTATTAAATTCTTAACAACCCGTCTCTTTC
+TAAACGCAGCGCCTTCACAACTCTTCTTCCCTTCCCCCTACCCCGCTTCCCTCCCTCCGCAATGGAGCGCAATCAAAGGC
+CGTATTATTGCCTAAGCTTGGCTCTCAGGCCAGCTAGGGATGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTG
+GTGGGGCGGGGGTGGGGGGCGAGGGTGCAAGGGGAGAGGTTGTACGCCGCTTTTCCGAACCCAGCTCTAAATGGTTTCTC
+CTAAGCGAGACAAGGATTTTCCATAGCCTAAAAGAGGCCATCAAGTTTTAACATTGCGAGGCACGACTTCTAATCGCATC
+CTTCCCGGAAAAGTACAAACAGTTCCTCAGACGAGGTCCCCCACCTCCCACGCGCTCCCCAGCCCTCCCTCCCTGCGGAG
+AGCCCCGCGACAGCCTCCCCAACACCTGTGAATCATCCGGGAGGCTGCCACCGCCGAGCGATCCGCGCACCACCCCCTTC
+CCGGGCCCGGGCACGGCCAGGGAGGACAGTTAGGGTTGTTGCTTTATAATTATCACTTTTAATCTCTAATACGACCAGCA
+CAAGTAGCCTTTGTCTCCCCGCCCTGATTTGAGCATCCGAGGGCCCCCCGAGGCAGCCTGCACGGGTACCCCGGGGTTTG
+CGCACTGAGTGAACCCCCAGATTCGGCCGTGTCCCAGCCGTCCTTGCTCTGAATCCCCCACCTCCTCCCGGCCGTTTGGA
+CGGTTCCCTCGGGGCGTCTGTGCCTCCGTGGGGTACCCCTAAGACACCCAGCATGAAACCCCAGCACCGACTGCAAATTC
+CACCGAAAGCAGAGTCATCCGTTTTTGCCCTCGGCAAACAACTCAACTTCAAGCCTCGAGTCCTTGCAAACCCACGCTGG
+CTGGGCTAACTCCGCTTCCCTTCTCCTTCCCCCGACTCCTGCAACAGCTCAGAGAGCCTGGCTCTGACGTAGTTCAAAAT
+AACGTGCGCCAGCCTCGCTGCCCGGCTGACCACAGCAACGAGAGCCACAAGCCCTCGACGCTGCTTCGATAACATGAAGC
+AATCACTCATAAAAATAGCAACCCACGTAGCCTGGCCATATATGGAGCTGGCGAGGGTGGACGGGGATGCCCCTACGGAC
+CCTCTTCGGAGAAAACAAATCGCCGCTGAAATTTCCCCCACCCCTCCCATCACTACCTTTTACCCCTCCTCTGCCTTCCC
+CTCTCCTTAGATGACTCAACCACAGATTCCACTGAAAGGGGGTGGGGGTGGGGGTGGGGGGGAATATATACACATACACA
+CAAATTATACATATATTATACATGTATTATAAAATATACACACACATACATATATACATACACACACATATACACATATG
+TACACACACACGCACACACACCAAAGAACATCAGGAATGCAGTTGCGGGATGAACGCGTTTGCCCAAGACCGAGTGTTAA
+AATGGAGTCATAAGAGTCGCCCCGTGCGGGTGTTTATAGAAGGTGAAAACACGCAGGTTTGCAGGCCAGCGCACTGGGAA
+CCGAGTCAAGCGCGCCGCAGCCCGGGCCCCGCCCCCGCCCCCGCCGTGCCCCGCCCCCACTTCCTCCCGCCCCTCGCTGG
+GCGGCTATTAAAAGCTGCTGACGTCAAAACGGACGGCCATCTTTGATGAGGGCAGAGCTCACGTTGCATTGAAGACGAAA
+CCTCGGGGAGGTCAGGCGCTGTCTTTCCTTCCCTCCCTGCTCGGCGGCTCCACCACAGTTGCAACCTGCAGAGGCCCGGA
+GAACACAACCCTCCCGAGAAGCCCAGGTAAGAACCCCCCTCCCCAGGTCGTGGCTCTGGCTCGGGATGGCTGCATACCCA
+GAAGGGACAGCAGCTGGGCAGTCCGAGTGGGCTTGGGTAAATACAAGTATTGGTGCACTTAGGGTGCGGTATTCCGTGTG
+TGCACGTCTGGTGGGGGCTTTATAAAGTGTTTTCGAGGTGAGCTGCTGGGGAAAAGGGAAACGGGAGCCCTGCAAATGGA
+GCGACGGGGAGGAAGAGCGAGGGTACGTACCGAATAATAACTTACTCTTTTGAAACAGTAGTGGGACCCCCTCTTTCTTC
+CATTTTGCGATGCAGTTTTCAGAGATATTTAGGCTGTTGAGTTTTCCTCCCCTGTCGTTCTAACCCTATTATTATGACTG
+>21
+GGAATGTAGGAGGGTAGGGAGTGGATATTTTCTAACCTGGAAAAAACTCATTTTACCCTATATAATTTTTTTTAGCAAAT
+TCCTTCTTTGCACTTACTCCACAATCTTTCCAAATTCTCCCAAATGCTCAAGCTTTTAAAAAACAAAAGACAGAAAGATA
+GCAGGTTATTAGGTTTTCCACCAAACCTTTTCTTTCTATTCCTTTACATCAGTGAGCTTAGAATAACTCTGCTCCTGGAA
+CTGGGAAAGGGACTTGGGAAAAGAAAGAAAAAAAAAGCTCCCAAAGTTTAGCATCACAAAACATTATAGTCACTGCTATT
+TTATTATTTATTTATTTATTTATTTACTTTTGAGATGGAGTCTTGTTCTGCCACCAGGCTGGAGTGCAGTGGTGCGATCT
+CTGCTCACTGCAACCTCCGCGTCCTGGGTTCAAGTGATTCTCCTGCCTCAGCCTACTGAGTAGCTGGGACTACAGGCGTG
+TGCCACCACACCCAGCTAATTTTTGTATTTTTAGTAGAGATGGGGTTTCACCGTGTTAGCCAGGATGATCTCGATCTCCT
+GAATCTGTGATCCGCCCGCCTTGGCCTCCCAAAGTGCTGGGATTACAGGCATGAGCCACCGTGAGTGACCCTGCTTTGTA
+AGTTTTACATCATATATCCCCTGTGTTAGACCAAGAGCTTGTAAAAGCCAGAAGACATACACCATTTATCGTTCAATTAG
+AGATGTTCACTGATAAAACTGATTCTTCCATCTGAGGGTGGTAATTGTAGTTACAGTAATGTAGATGACAATCTAAGTTA
+TGTTCTATAAACTGTGTCACTGACATCTCAATCTACAGCTAACTTTGATTTTTTAAAAGCAGAGGAGTGGGTTGTATAGG
+TTTAGAAATACATCCATCAAGCTAGTTAAGTGAGATGGATTCAAGCCTTCAACTCACTGGAATATTTACCAAATTGACTA
+TTCATTAGCTAAAGGAGCCATATAAACAAGGTACTTTTTAAAATTCAAAATTATGTAAGGTTTTATTTCCTTTTTATGTT
+GTTATGAATTGGATTGTAGAGGTTATAAGGTAAAATAAGATTACTTTTACAGTAAAACATACAACACATTGTCGCAAGAG
+GGCAGCCTTTGAACACGAATTGCTTCTCAGGCATTCCTTGAAATTTTGAGACAGTTACTTTAATTAACACAACAAAATAA
+TAAAACACTACAGAGGATCTAAGAAGATACTTTGACTTATGCATATTGTTACTTTTTTATTACTGCTAGTGGAAGATGTA
+ATCGACAAGGAAGGTAAGGGCTGAATTTGTTTTATGTAAAAAGACAAATTTCTTTGGGTTTTAGCTTTCAAACATCAAGA
+TAGTAGGTCAATGTCTAAATGAGTGTATCAAAGTTCTCACAATGGCACATGTAAATTCACCTCTTGTCTACCCAAAACTC
+TAACCAAGCAAAGGCAAGTTGGAAGAATCAGACAAATGTAAGTCCCTAGGATACATAAATGCCACGGGATTTTATGTTTT
+TAACTTTCAACCTCCTAAGTTTCACTAAGAGACATGTTATTTGGAGAGGTTGCTAAAGTGTATTTGTATGATGTCTCTGT
+TTACGGTTGGTCTGGACCTCTTTCTGTCCAGTGGTTGCCCTCAACTGATGAGAGTCACCTCTCTAAAGATCATGCTTACG
+TCCCTGGAGCAGTCCGCAGCACTATCAATGTCTGACGTCCACCTTTTTCCAATTAATATTTCTGTACTTACTGAATTGGA
+AAACATACAGGTGTTTTTTGCCTGGTCACTTGTGGTTTTACAACACTCCCCCCGCTTCCCCACCACTGCCCCAGGTGATT
+TTGATGCCAGCCAAAGTGTGAGAACTACTGGCAGCTGCTGGCCGACAGCAGCAAAGGCCCAAATCCTCTGTGTGACTTCA
+AGACCCGTATCTGGTTTTCAGCCATATTTCCTTCTCCTTCTCACAGCTGAGTCCACTCCTCCTCCCTCACCACCTGTAGG
+CTATGCTTTCCAGCCTGTGTCCAGGATTAGTCCTGATGTAAGGCACGTGGGGTTTAAGGAGCTCTCTCTTCAGTCTTATT
+TCCACACCATCTCTGACAGATATATTCCAGTTGTAATCATGGGGCTACCTCCTCTTGTTTCCAGTCCCAATCTCGTTTTC
+TCTATCACATTCTACCTTGTAATAACCTAAATCTTCTGGTGGGCAAGCTTCTTCCTCAATCAAATTTGTGTTTATGATTG
+TGTTGGGGATTGGCATAGTAATCCAGGTACTGCTACTGCCTTGAAAGCGGAGACTGCATCCTTCTGTCTGGATATCTTGA
+GTTTCTGGATTGTAAACCTTATTAGGGCCCATTTTTCCTAGAATTGGAGCCTCTACTTTGTTCTTGCTACTTTAGTTCAC
+CCAGAAAGCACGATCTCTGCCTGAATTTCAAAGTGTTGGCTAGTTTAAATATCCTTTTGTGAGACTTTTGTCTTCCTGCG
+GGGTGATTTTTTCATCTTACATATCATTGCTAATCTTTTTACCTTGAATATAATTGAAATAGTTTTAATCTGTGTTACCA
+GCAACAAGAACCCCTTTTATGAGATGAAGAGGCCAGGTCAGTCTCATAACTTGTGTACCCCTCCGTGTGTTGAATTTATG
+TCAGAGCCAGGTTCTTTCCTATTTGATGTCTCTCACCAAATTCCAGTGTTTATGTCCTAGGAGGTGCCCAATAAATTCTA
+GAGTTGATACAAGCTGTAATTTTAGAGAAATTAAATTAGCAAGTAGAATTAGATTATATACAACCTAATCAACTTTTTTT
+TTTTTCCCTGAGGATGACCAAGATTTGGGCTTCAGTGACAATAGTTATCCCCTAAATATTTATACTCAATCTAGGATCAC
+CAGTAAAATGTTGAATAGAGGTGATGAACACACACATCCTTTTCTTGTCCTAATCTATGTGGTTTAAACATTTACGCTTT
+CACAATTAAATATAATATTGTTTATATGTTTTGTTGTGGATGCTTTCCACCACTTCTAAAAAATTTCTTTTTCTAGCTTG
+CTGAGAGTTTTTTTTTTTAATCAGCAATAGATATTTCTCCAAAGAAAATATCCAAATTACCAACAGGTACATGAAAAAAA
+TGCTCAGCATCACTAATCATCAGGGAAATGCAAATCAAAACCACAAAGAGATATTGCCTCAAGCCTGTTAGAATGGTCTG
+ATTAAAAGAAAAAGATAACGAGTGTTAACAATGGGGAGAAATTGGAACACTTGTACAGTGTTTCTGGGAAAGTAAAACTG
+TGCAGCCTCTATGGAAAACAGTACAGTTGTTCCTCAAAAAAATTAAGAAAGTAGAACTACCATATGATCTAGCAATCTCA
+CTTCAGGTTATTTATCTGAAAGAATTGAGATTGGGATGATTCTCAGGCTCATTGCAGCATTATTTACACTAATCAAAATG
+TGGAAATAAGGTTAATGTTTATTGGCAGATGAATAGATAAAGAAAATGTGGTATATTCATACAACGGAATATTATTCAGC
+TTTAAAACAAGAAAATCCGGCAATATGCAACATGGATGAACCTAGAGGACATTAATGCTACGTAAAGTAAATCAGTCACA
+GAAGGACAAACTGTATGATTCCACTTGTATAAAGTATCTAAGATAGCCAAATTAATTAGAATCACAGATTAGAATGGTGA
+TAGCCCTTTAAGTTGAAATCAATTTTACATTCCTGGGTTTTGTAGTTTTGCTATCAGGATTCTGACTCATAAAACAAGTT
+TGGAATTCCCCCCTTCTTCTGAAATAATTTAAAAATTTTATTTTTTTAAAACGTTTGAAAGAATTCACCAGCAAAACTCT
+TTGGATGTGGAGTTTGCTTATTTTTTTATTTCTTTAGTTTTATGTTTTTATGTGCAGGTATTTGACAATTTAATTACTTT
+AATAAATGTCAATTACTTCATATATTCCATTTAATCTTGTGAAATTTTTTATAAGTTTCTTTTCAAGGAATTCACCCTTT
+CATCCAAGTAATGAAAATTAGTATAAAATGTTTCACAATATTCTATTATCTTGTTAATGTCTCTTGTGTCTACAACTGTA
+CAATCTCATTAATCCCTGATATTTATAATTTGCGTTTTCTTTCTTTCTCACAGAACTAACTTTTGGCCATGTTAATTTTC
+TCCTGTGTCTGTTTTCTGTCTCATTGATTTCTAATTTCTGTTTTATTTTCTTTCTTCTACTTGTTTTGGACTTAATTTGT
+TCTCTTGTTTTAGATTTTCTTTTTAAGGTAGAAAATTAGTGCACTTGTTTTGAACTTCTCTTTTTTAGAATAAGCATTTA
+TACAAAAAATTTACTCTGGGTCTGCTTAACTTATCCCACATATTTTGATATAGTATATTTTTATTTGTATTCAGTTCAAT
+GTATTTTAAGGTGTTTCTTATGATTTCTTCCTTGAGCTAAAGGGCATTTTAAGGTGTATTTAAATCTTCCAATAGTGTAG
+ACTTTCCTGGGTAGCTTGTTTTCACCCATTTCAACTCATTTTCATTATGGTCAGAAAGTATACTTTCTATGATTACAGTG
+TTTTGAAACTTCTTGACAGATATTTTGTGGTCTATAGCCTATTGATAAATGTTTCATGTGTAGCATAATAGAACATATAT
+TCTATTTTCTTGGTCAATATCGATTAGGTCAGGGTGTTGTCAAAATTATTTTACCTTAACTTTCTTTAGTTATTCTATCA
+GCAATACAGCATTATGTCTTCATAATTACTTGACATTTTATCATTATGAAATAGCTTTCTGTTGTAATATTGCTTGTTTG
+GAGGTCTACTTTTTCTTATATTAATATAGCCACATAAGATTTCTTGTGCTTGTTGTTTCCTTGGTATACTTGGAAATTAT
+TTAACTCTCAACTTATCTATACCATAATATTTGGCATGCATCTCTTATAGGTACTTTTATTATATAGTTGGACAATCTGT
+GTTTTATTTGGAGTATTTAAATTACATTTGAAATAATTATTGATAAGGTTGGAGTTAAGCCTACTATTTTTGCTTTTTTT
+TTTTTTTTTTGAGACGGAGTCTGTCCCTGTCGCCCAGGCTGGAGTGCAGTGGCGGGATCTCGCCTCACTGCAAGCTCTGC
+CTCCCGGATTCATGCCATTTTCCTGCCTCAGCATCCGGAGTAGCTGGGACTACAAGTGCCCGCCACCATGCCCGGCTAAT
+TTTTGCATTTTTAGTAGAGATGGGGTTTCACCGTGTTAGCCAGGATGGTCTCGATCTCCTGACCTCGTGATCCACTCGCC
+TCGGCCTCCCAAAGTGCTGGGATTACAGGCTGAGCCACCGCGCCCAGCCTATTTTGCTCTTATCTTCTATTTGTTCCATC
+ATTTTTTGCTCCTCTGTTCTTTCTTTCATGCCTTTTTATATTAACTGAACAATATTCAGCGTTCCATTTTAATTCCTTTA
+TTGGCATTTTAGAAGAATATCTTCATATAATGTGGTGGTTCTCTAGGGATTGTAATAAACATCCTGGGCTTATCGCAGTC
+CACTTACTGTTAATAGTCAACGTTTTCATGGAACATAAAGAAAAGTTGTGGCAAAATTGTTGCGTTTAGTCTCCTGTCAG
+AGCTATTAATTTAGAATATTTTACTCTACATACGTTATCAATAGTACTCTTAATTTTTTATTTAAACCATCAGTTGTTTG
+TTATAAAACTAAAAGAAAAAAGCCTTAGTTTTTCCTGTTTATGCACATGCCATTCCTAGAGCTTCTCCTTACTTCCTGTA
+GGTCAGAATTTCCATCTGTTGTTATCTTTCTTTAGTCTAAAAAATTTCCGTTTGCATTTCTTGTATCTAGGTCTGCTAAT
+GACAATTGTTCGAAGCTTTCTTTTATCTGAATAAATCTTTATTTTCTCAAGAATTACTTTTTCTAGTTATAGAATTTGGA
+GCTGACAGTTTTTTTTTTATTTTGAAGATGTAGTTTCATTGCTTTTTAAAGTGTCATTTCTGATGACAGGTAGATGACCT
+ATTTGTTTCCACGTATGTAAATTATTTTCTCCCTCCCTTTAGCTACTTTCAAGATTTACTCTTTTTTTGCCCAGTGTTTT
+GACTATGGTATGTCTACATTTGGTTCTGTATATTTTTATTCTGTTCTTTGAGTTTCTTAAACCTGTAAGTTGATATATGC
+TGACAATTAGGAAAACTTTTGGTCTTTACTTTTTCAAATAGCTTTTTCTGTCTCATTTTCTTCTCTACTCTTTCTAGGAG
+TCCAATTATATTAGTGCTAGACTGGTCATATTTTTATTATTTTTCTCCTTTGTATTAGCTGTATTGGGAAATTGTTCTTG
+ATCTGTTTTTAAGTGCACTGATAGTTTTTTCTGCCATCTTTAATTTACTGGTATGTGCATAAAATGACACTTTTATATAA
+GATATTCTGTCTCTCAGTTCTAAAATTTGTCCTATTATCCCTTTATCATTTTTACTTCTCTGTTGAGATTCTCCATATAT
+TCTCTCTTATGACCATCTATTCCTTAAATCCTTGAATATGCTTATAATAGCTTATTTTAAATTTCTCATCTTCTAATTCC
+AGCATGTGGGCCATTTCAATGTCTTAATCCATTGTTTACTTTTTTTGTTATGTGTCATATTTTCCTGGTTCTATGTCTAG
+GCAGGTTAAATTATATGTTAGATTATGTGTATGATATTTTGTAGAGATAGGCTCTTTTATTTTCCTTTGAAGAGTGACTC
+TTTTCTAACATTAGTCTTCTTTCTGTAGTCAAACACCAAACTTTCACTCCTGAGCTATATGCAATGGTTGAAATCTCTGC
+TCTGTACTAGCAATTTAGCTGTTGTTTTCTGCTGGATTCTATGGAGTCTCTTTTTATGAATGTGAAATGTAGCAGCCATC
+ATATATCTGAATGAAGTTTAAGTGCTGATTTTAGATTTTTACTCTGTGACATTCTCCTCTGTGACTTTCAGTTGTGTGAG
+ATTTCCTCCCATGTCATTCAAATTTCCCAATTCTTCTTTTCTGTCAGCCTGGAACTCTGTACTCTTATTCCTCAAGCTAG
+TAAAACTCACCGCTTTACTCTTAGACATCTAATTTTGCACAGACTTGGAAGTGTCATCAGGTGTGAAGTTACATTGATGC
+AAATTTAAATCATTGCAATTTTCTTTATTCAATGGTCAAATACTTCATCAGTGGTTGAATATTCTACTATTTCTGCCTGT
+TTTCTGTTTCTCATTACCATCAAATGTGTATTTAATTTTTTTTAGAGTTAATAGTTTTTTTCCCTGCATTTGAGTTAGAC
+CTGAGCTACTACCACATTTTGTAATTCAAACTTTTTGTCTAACAATGTTTTAAAAAACTCTTTATTTGGAGGGAATTCTC
+AAATTTCTAAATACTCTACAAACATTAGGATCAGGGCTCACATTCCCAGTTTCCCTTGTAGCCAGATGTGCATATCGAAC
+ATGGCCTCAGCTAATGAAGCATACCCACATATTTGAAAGTAAGAAAAGTGTTAAGAGGAACTGTAGATATCATTTTAGTT
+GATGTAAGGATGGAAAATAAATAACCAACTTTCACTGGTGAACTTTGTCTTCAATCATTGTTTTCCCGTGGGTAAGCAGC
+TAATTTTCTGGCCTAGAGGTGAAATTGGTGTGTTGGTAAGAATTGATTTAATGATAACCTGCTTTTGTCTTTTTAAAAAT
+TGTGAATCCACCATTGTTTCTGGTCCTGTTTTATACTTATGCAGTGTATAATCTCTCATGCAAAATTTAGTTCTTTGTTT
+CTCAGCTTAAACTGGATTTTGATGAGCAGTTTTCACCTTCAAATGAAAGAAATGGCTTTGGTTGATGTTAGACAAAAATA
+ACAAGAAGGAATTAAAGAAATTTGTTTGTCATCTTGTAACATTAAATAAATGAAACTGAAAATTCAACAATTCTGTTTCA
+TAAAAAAGTAGAAAGTTTCTACTGTTTTTCATTATTTTCAAAGCTATATCATTAAATGAAATTTCTGCCTCATGGAAGGT
+TTTTAGAACTATGTAATAATACCATACAGACCATTAGGAGAGAAATGAATAAAGACTGAAAGAAGTAAGCACTGTGTGGT
+AACATAGATGGTGCACAGTTGTGTGTGTGGAGCCCTAATACATTTGACCATGGAATTTATACAATAAATGTCAGTTTCCT
+GTGAACCTGCTAATTAATATGACTTTCGCAGACCTTTCCTTAACCTAAACTGTTTAGGTTTCCTGTCGACAGACTGCATG
+TATTCTCACCTTTAACCTTTCCATAATCTTCCCTAATTACAGATGTCATTGGTTCTTAATAGCAAAAGAACAGCAGTAGC
+TTATAAAGTTATATTATTTGTTTTTTACTAATATGTGAAGTGATTTTGTCTTTCATTTCACCTCTAGAGCTAATTATGTC
+TTCTAGGTTAACAAAAATTCCAGATCATACCATCATTAGTTTAAACTTTACAATGAAATCATTATTTCCATTTATTACTA
+GCCTGCATTATAGCAATGTTTAGCTAATGAATTCTCAAAATATTTCTTGAAAGGTGAAAGAGTACCTCAAGTAAATGATG
+TAATTAATTTATTTATCAGAATTTTAAATTAAGTGTTCTGTTTAATCTCAAAGGGGTACCAATGAAGTCAGGGCATAAGG
+ACTCTGGTTAATAAAACGGTAAGAATTAAAAAAGATTATTACATTTTTCCTCATACTATGATGGCTATAATTTTTAAAAC
+AGTAGAAATGTTTAATTTTTAAAGGCTACTAGTGTCTATTTCATTCAAAGATGTGAGCTCTTCAGTTCAACTTGTTGTTT
+GAAATTTGAACAGCTTTTTCTTGCTTTTGTTCAATACAGTTATTTAACAGTCCAGGCAGGCCTCCGGAGTTACATATATG
+TATGTGGCTATGTGTGTATATATACATAGAAGTACAAATAGCATATAAACAATTTACATAAGACTTTTGCTTTTGATGAA
+AATTCTTCTAAAAGTGAATATAAATTAATAGCATATGGAGCCTACAAAAATTGTCCAATAGGGAACTAGGAAAAGATAAT
+TTGTGAAGAAAAAGTTTGCTAAAACAGACTTTTTATTTTTACATGTAATAACTTTATATTGAAGGGTTTCCAGAAATATA
+AAATGTTATAGGGCACTTATCCTGAGACACATGAAGGAGACAAGGGATAGAGCTAGGATGGAAGTGAAAATAAAGGACAA
+ATAGAGATAGATACTTAAGTGGAAATGAAACACTGCATGTTCTCACTCATAGGTGGGAATTGAACAATGAGAACACATAC
+ACACAGGAAGGGGAACATCACACACCAGGGCCTGTTGTGGGGTGGGGAGAGGGGGTAGGGATAGTATTAGGAGATATACC
+TAATGCTAAATGATAAGTTAATGGGTGCAGCACACCAACATGGCACATGTATACATATGTAATTAAATTGCACGTTGTGC
+ACATGTGCCCTAAAACTTAAAGTATAAAAAAAAAATCATCAAACAGGAGGTAATTTCAAAACATGGCTTTTCCAGGCAGC
+AAATTTTCTGAATGTATCGCAAAAGAAGATAAATTGCATGTCTATGTAGATCATGTCAATGAAATAAACAATAAAGTTAC
+TTATTGGAATATTGAAAGTGGTTGATAATAAATAAATTATTTATATTTATGGTAAAAGCAATAAACTTGATTTTTAGGGG
+GTTCAATGACTTCCTGAAAATATCCTCTCACAGATTTTCTCAGAGTGTCTAGTTGGCAAAAGCACAGCTCCAAATAGCTA
+TTTCTGTCAACGTCTACAGAACATTTACAGTTTGATTTCTAAAAGTTTCCATTTCTACTGACCTTACAAATTTGTAAAGT
+GAATGCCCACCAGGACAAAATATTAAGTGATAAAATTAATTGGATCCTACAAGTGCAAATCTAGGCAAGGAAAATCTTCT
+TCCTTCTTAATGAATGATTTTTATGTCTCTTCATTCTTGTCCTTCTAGGTAGAGCAACACTCTGGAGACCTTCTTTTGAG
+GCATTTCACCACCAGAGTAAGTGTGATGTGTGAGTAATTAGTCCACCCACAGGGAGCTGGTGAAATTAAATTGGAAGTTA
+TTTCCTAACTGGCATATCTTGGAGTAAAATTAATCTAATAACAGCTTTCATAAGAGAGCATAAGCTTAAAAGACAACATT
+TTTCATTCAACTTTACCAAACAAAGTGCAAATTTAGGTGTCCAGTTGTGAAGTTTTGTTGTGTGTGGATTGTACAGTGAA
+GGTACCAGTGGACAACAGTTGCTCATCAAGCAAGTTTATACAAAAGCTTTCGGACAAGTCCAACTCTAGATAAAATTCTA
+AAGTATTTCATGCTTGTGTTCAGAGTTTCTTTTTCTCTCTCCAAGGTAACCCACATATCAAAAGACATATGGGTTGTGGG
+TAGAACTTCCTAAAATTGCTGGTGAGAAGTGTGCCATGCATAAGCATACTTCACTAGCTTGAATTTTCTGTTAGTCTCAC
+AGGCAACAATTACAATCCTGTATGATTTTCTATCTCCACACACTCCTGAATATAGAAAGACCAAGTAACATCCCTGGTTA
+AGATGTGTACAGGTTCCAAGACATGTCTAAATATATTCACCAAGAGGTTTATTATTTTCACAGTGGCATTCACTAAATCA
+GTTGTCAGTGTAGCATTACTCAAGGAATAAGCAGGGTCTTTAATTTATCAAAGTTTGGAGTGCACCCCAAGTTGGATCAC
+TGAAGCACATAACTATAGATAAGATCACTCAAAAGCACAAATCCAGGTAATAAATATTCACTAGTAGTTTATATGCATTT
+AGCAATTTGAATGCTGGGAAGTGTAGCCCAGAAAATCAATCGACATGGAGCTATTAAAGAGGCATTCATGGCACCTGCAC
+TTTGAATCTCTTCAGACTCAGGTTAAACAGGAGACATGTTAGCTCATGTATAGTGCAAAAACCCGTCCTCTTTCCCTTTT
+TATCTATGAACCTGCCCTTTTCAATGTTATCTAGATGCCTGAAGGTATGAATACCCTTGATCTTGGTAAAAAATGGTACC
+ACCCGTCAGCAAAATCTCACTGATCAGTGTCTATGTTACCTCACTGAGTTAGCCTTTTGTGTTGTTGTGGCCCAGAATGA
+CAATGTTGACACAATCCAAACCAGTAGTTGGGAAGTTAATGTTGGATGCGTCTTTGACAATTGATGGAATGACCTGAAAT
+CAAATGTGAGGCAGTGGAGACACAAGAATGCTATTCAGGAAGTTAGTGATCTGTGGAGATACTAAATGAAATATCTGGAA
+GGAAATGTAATCTTGCAACTATGCTTTTATGTGTTTTTTGACATAAACAGTTTCTATTTATGGTGGAGCTAGAGTGTCCA
+TTTCCCATGAAGTTCCCATAGTGTTAACTAATACAGTCATGTGTCACTTAATGACAGGGGTGTGTCTGAGAAATGCATTG
+CTGGACGATATTATTATTGTGTGAACATCACAGAGTTTACTTACACAAACCTAGATGCTATAGCTGATTACATATCTAGG
+TTAGGTGGTATAGCCTATTCCTCCTAGGCTACAAACCTGTGCAGCATGTTATTGTACTGAATACTGTAGGCAATTGTAAC
+ACAACAGTTAAGTATATCTATACTTACTTATATCTATACAGTAAGTGTATCTATACATGTCTAAACATACAAGAGGTACA
+GTAAAAGTACAGCGTTATGATTTTATTGCACCACTGTCATACATGTGCACTATCCTTGACCAAAATGTTGTTATGTGATG
+CATGATTGTAACAAAAGAATTAATTAAATATAATATTGAAGTGCCTATTGAGATTTCAATGAAGAGAGGAATATTTGTAA
+ATTCTGATTACCTTAAGTGGGAATTGACTTTCTTCCTGTTTCCATGGCTGTTCTTGTGAAAGAGCATAGCTTTCCAAAGA
+CCTGAAATCTCTGACAAATCTTGCAATTCTCTATTGCCTGCATTATGAAAGTCACCTGGTATCAAATGAGGCAAAATTGT
+AGATTATAAAGACCTGTACTTGTCAGTTCCTAAACATAGCAAGCCTTGGTCTATCTTGAACATTTCTGCAGTTATAAATG
+AGCCTTGGGTCATGATTTTCTGCCTTTTTATCATAGACAAGATTTAATTTAGGAGATGTCCTTTTAATGTGTAATGTGAA
+TAGTAAGTGACACTTATGAAGCCTATTTTCTTCCAGCCATTTTAATTGTCAAATCTGTCCAGTCAAGATGCATTGTTAGA
+GGCTTCACTGACAACACCGTCTGTGTGTGTGGCTGTGTGTGTGTGTATACATACACATGGTTGAGCAGGATTTATTGTAA
+GTCAAAACAGTCTCAAATTTCTTTGGACAATACAGCTTCCTTGGCCATTCCCTTGGAGCTCTGAGTTGAGGAGCTGGTGA
+TGTGTATTTCTCTTGCATGTTCTCCACATTATCCCTATGCAGGTCTTCCATGGGCTTGGTTTTGGAACCAGTGATGAGGA
+AGATCACTAGAATTGTTGTTGTCAAAAAGATGCCTCCATTCTTCTGTATAAGCAATGGCAATTTTCCCTGACCAATATTT
+CTTCTTTTGAAATGGTAAATTTTGAATATTAACTGATTAATATAATCCCATAAAATAACAAAGTTTGAGCCCTATGATCT
+GTATTTTAGGCTAAAGTTGTGTTTTATGCTAAATCATCATTTACTCCATGTATGATCTTTTTTTTTTTTTTTTTTTGAGA
+CAGAGTCTCACACTGTCACTCAGGCTGGAGTGCAGTGGCACAATTTTGGCTTACTGCAAGCTCCGCCTCCCAGGTGCACA
+CCATCCTCCTGCTTCAGCCTCCTGAGTAGCTGGGACTACAGGTGCCCACCACCACACCCAGCTAATTGTTTTTTAAGATT
+AAGGGCGAAATAAAAGCTGCTCTCCTATATTACATTCATAAATCAGAATTATTAAAGCAAGCTATTTATTTATTACAAAC
+ACATACTCCAAATTTAGCATCTAAGACATGACAGTCTTTCAAGAGACATCAATTTACTTTTTAGGGTCTATAGCTATATT
+TGTATTCATGTTATTGGTGTCCATAATATTTCATTAGATGTTCTGGGTAAATTGATAAATGAAAATAACGTATGTGATAA
+GGAGAACAACCTAGGTCTCCTGGAGGGCCCCTTACAACTCAGCTGGTGTTAGCAGAAATGCCTGGGTGTAACAGTGAGAA
+ACAACCTTTCAAAAGTATTGAGCAATTTTTCAGGCAGAGGAATATGTATGCCAATCCAGGCAGCCAGATAAGTGGAGGTC
+AGTTAAAAGATCAGTGTGTGATTACATTTTTTTTTTAAGGTTTCTACTGAGAGGAAATATAACTCTCAGTTATTATTTTC
+TCTGCATATCTCTGGTTATTTTGTCACAACTTTTTATGTAAGCTACTACCAAACACTGGGTTATCCAAGATCTATGGGTC
+GTATTCAATATTTGCTTTTCCCTCAACTACTAACACGTTTGATTCCCTTTGTAACTTTCTCCCCTATTTCTCCTTTGAAC
+CTGTTTTCCCCAACCACACTGAGTTAATAAGCCTCCTATATACTCTCATACCCTGTTTTATCTAAAGAGATTTCCTTTTT
+CTACTATTATACCCATCTCATTCCTTAATTTTTTCTTCTTATTTCTCTCTCTATTTTTTACTCTTTTAGGCACAGCTCAT
+GTCTCATGACTTTCAAAAAGTTTTTCTTCTGATATTCCCAGCTCGAATTAGGTATATCCCAATCAGTTTCCTTAGAATTA
+CAAGTCTGCATATTATTGGCTTGCAATCTGTCTTAGGCTGGGAATTGGCTCCTTGTAGGCAGGGACTAGAATTTATTTCC
+TCTTATGTCCATAATGCCTGACACAGGAACTATTCTCACATACAAATTGTAGGGCCATAATTCCTGAGAACATATGTATA
+TTTTAGTGACTCATAAAGGCTTGAAATACATTTTTAAAATGATTAATTGGGAAGTCCATGAACTTGACATCTTGATGTAG
+AAGAAAAGCAAGGGCAGATGAAGAAGTTATACACGATACTTGAAATACCAGAAGAATATAATCAGAGAGCAATAAATTAA
+TCCTAGATTTTTAGATGTGAAGAAGTTCTGAGAAATATTTGAAGACTTGGCTCTAGGGATGAGTACATAACAGAGACCAG
+TAGCCAGACTCAGAGCTTTAAAGAGTGAAGAAATGGCATTGCAGGGGGAAGGACTGTTGTATACTAAATAGAAATTGTCA
+TTAAATGGAAGACTAAGCAAGAGGCTGATGATCTGTGGATGAGAGACCAAAATGGTGTGGAGATAGAGGTAAATGCCATA
+ATCTACAAAGAAGATATTTTTGTGCCAGAACGAAATGATAGGTCTAGAATGTAATAAAAGATAAATAGTTAACTACATTG
+TAATGTGGTCTGAGAACTCACTGAAGGCATAGCCATTGATATTAACTTCCACTTTGCTTCCAGACAAAAGATTTTAAATA
+TCTTAAAAATTACATTTGATATTGTATGATTCATCAAGACAAAGTAAAGCTGTAGTTGCCCTGGGGGGGTGGGGAATGGG
+GAGTCACTTTTAATGGGTACAGGATTTCAATTTTAAAAGACAAAAAGAATTATGGAGATGGGTGGTGGTGATGCTTGCAC
+AACATTGTGAATACATTTAATGACACCGAATTGTATAATTAACAGTGGTTAAGATGCAAATTGTTGAGTGTGTTTTGCCA
+CAATTAAAAAACAGAAAACAATAGTATTTGAAGATATAGCCCATTTCATCTGGTAGCTAGCTGTATGTGTCTGCTCAGCT
+GTGGATAGATATTTATGCTGTTTCTGAACTGGAGGTTACTGGTATTCTCTAAGGTATAGGAAAATTGATCTTATGTGGCT
+AACTGAGGTGGATATAGTTGTATCAGTCAGACAGTAATGATATAATCATGAGAGTTTACATTTAACACAACTTACATGTG
+TTATACTTTAGAAAGCACTTCATAGATATTATTACTTCATACCATTTTCCCCTTACCATATTTGTCTCTCCTTTCTCTGC
+TGACTTCACAAAATGTCTGTTCCACACTACTCAGTTCATCTCCTTATTTTGTCAAAATTTTAAGTATCAGAGTCTCCAAA
+AAAAAAATTCCAGACAAGTCTCATCAGCTTTCGGTCATTGATTAATCGATGCTCTTAGGGGGTTTCCCTTCCCCTTCTCA
+ATTTAACATAGCTTCCCAATTCACTTGCCAGTAAAACAACGTTGTTTATATGGTTTTGTATTTCTTCCTGCTGAATTATG
+AACATTTTTTCATATTGACATTGATTTCCATCTGGCCATGAAATACTCAAGGTGAAGAAAATGAAACAAAATTGTTAATA
+TTTATCAAGAGCCAGGTACTGTTTTCAGTGTCCTAGAAGCATAAACTAGTGTAATGCTCACCAGCTCATATGAGAGGCAA
+AAATGGATGAAACTATAAAACCAAGACACAGAGAGTTAAGGGAAATTTCCCAAGATCACACAGCCACTGAGGCTATGAAC
+TAGATTTTGACTTCAGGCTGTTTACCCCAGAACCTGTGCTTTCAACCGTCATTTGTACTGCTTCCCTAAATTACACTTGA
+GTACACTGTGACATCAAATTTCACCTATGACACTGATTATCTGGGGAACTTTGCAACTTATTCTGAATATGACTCAGTTT
+TATTTTCTGTACAGAGGTAAAGTCAGTATATACTCCATAGATTTGTTATAAGGATTATATGAAATGACACAAGAGAAGGC
+TTTAAAAGGACTTGGAAAGCAATAAGCTTTTTTGTTTTCAATCATTTTTCAATAGGTTATCAGTGTTGTGAATGGTTCAT
+GAAAAATGACTTTGGGCTCAGTGAGTAACTGAAATGTTGTTGTTTAATAGTGTGTTTAATATTAATATAATAGTAACAAA
+TGTTGTTATTGTTGAGTAATAGTTATTTGGCTTTTCGCTGCTTTTCTTTTCTAGTGTTTTAGTCAGAGACTTCTTAAGTC
+TCCTCAAGTGTGTACACATTTCTTAAATTTGTTGGAAAGTACCAAATTTAAGATATCTTGCATTTTTTTTCTTCCTTAGT
+AAACACTAAATTCCATCACACAGCAGAAAAAGGGCTCTGTTATTGTCGGTACAATGGTTTAATATTCACTTTTATTCCCT
+ATATCATAGTTCTGATTTAATTACTATTTAGAATAAAACTTGTTCCTTATTATATGCATTCATAATTATAAAATTTGTGT
+TTTAATGCCTTCTACAAATATACTGTGCTTTTCTTTATTTCCTTATATCTTTTACTGTTGCATGATCCTGGTATTTGAAA
+TCTCATTAAGTGTTGTTTGGGGATTTGCTGGGAATGTCATCTACATAGTAATGTCTCAAGTTTGTAGCATCTTTAAAACA
+TAATAGGAGACGGAATTCACAGCAGAGTAATAAGCCTCCCACTTTAGGCAATATATTTGTGATTGACATGACTTTGTTCC
+CAGAGGGCAGTTAGCAACACAATATTATGGCTATGTGTACAAGGAAAACAAATTTTAAAAATCTCCTTTAAAAAAAACAG
+TTCCCAATCCTAACCAGTATCATGCACAGAATCCGTTTCCACCTCTTCATATGGAAGCAGCTCTGGGTGGACATCTAGAA
+AGTTGGATAACTATTTATCACATATGATCTGCAAAGGGAGAGATTTACAAGAAAGTATTAAATTTGATCCATAGGAAAGC
+AAAATCAAAAATCAATGAATTTTCCATTTGTAGTTTTCCCTAGGGGCTCTACTCTCAAGATGGCTTAAATGCATTAGAAT
+TCAATGACTAGGCACAATTGTTTGTTAGTGACATAAAAGCAAGGTAAGCATTTCTGCTTTATGAGTCCAAGCACAGGGAC
+ACTTGGTTTTATTTGGAAGGATATCATAAAGGCAATGTTTATGAGTAATCCCTCATTGTCTTTTCTTCTGTAACATTCAA
+TGCTATGTACTAAAATTTAGAAATGAAAACTACTTAAAGACCACCTGCTAAGTTTCATGTATTTCAAATTCTCAGATCCT
+GAGTGTTTAAATATGTCTGTATTTCTTCATGAAAAAAACATAATCTTTGCCAAGTTAGAAAAAGAAAAACTCATATTCCC
+ATTTTTAACTAAAGGTAATCAGAAATTTGGACAGTTTTTAGCCTTCAATACATTATGTAATTTGGGTAACAGGCTGAATA
+AAAGTAAGGAACTTCAGTTAAATTCAGAGAGAAGCTAGTAATTATTTCTAAGACTCAAATGTATAGAACAGATCTCTGAT
+ATTAAGGATAAGACCCTCAAAGGTGACACAAAATGGGTAATTGTGGGATTATTTTAGGAGGAATCATCATATATTCTTAC
+TGCTATAAATACAGTCAAATGTCAGATAAATATACTCCACTGAAATGTTTTCTAGGAATGATGTTGGAACCAATGTTGAT
+AATTTTGAAAGTCATGGAGAGGATTTCAGATGATAGAAGTCGGGCAAATATCCCAGGTTTAATAAATAAGCAAATGGTGA
+GTTCCAAAGCTGCTGTATCAGTGAGCCTCATATTGATCCTGAGCACAATTTCAGAAAGGTCCATTAAATGGGTCTTTGGA
+GAGCACTTAGGATAAGAGGCTGGCAGCAAAGATTCACTAAGAACAATGGTGCCAAGCTTGCCTCTTTGACATTGTTATCT
+GACTGATAAAGGTAGGCTATAGCGGTACACAATACAGATATTTCCAGGGTAAAGCTGTTGAGCTGCTGTAATTTTCCTTG
+CCTCTTCACTATCTTCTCCTATAGTTGACTCCCAAGGAAAAGATAGTTGACTTTTAATCTAGAAATGTATGTTAATTGGC
+AAAAACAAAAAAATGAAACCATAAAATATCCTTTAGGCTTCTCTCTGCTCAGTTATTAGATCATATGAATGTGCCCTTAG
+GAAACACAGAAGGCCCACACTATTATAGAGACAAATTTCTTACATTTTCTGCTGTTTAATGCTCTTCAAAAGACTTACCA
+TACTTCACTAATTTGATTACCAACTACTTGAACACAGGGACCAATGTGTTTATTAAAAATTTTGCATATTATGAGGGCAT
+TAACTTTTAATTATTCAATTGTTAAGACAATGCATATTATTATAAACCATAGAAGGCACACTTATTACCTCCTTAAGAAC
+CAGTCTCTATTTCTCTGGTTCATAGATCTCCAAGTTTATAGAGGGATTGGACATTTGTGTCTTCAATTGGCCCTCTTCAG
+CCCTAGGATAAATAGTGATTAATCTAAGCCAATAGTTTTCGGCTCTGTCTGACCCAATGCCTCTACTTCATAAGAAATTC
+TTCAAAGACCCCTTCGCTATTCTAAATTATTAGTGAGAACACAATGTATGCATTCAAGTTGTTGAAAATATTAATGTTAT
+GCTGAAGCTAACAAAGGAGAAATAATTTACTGTCAATATATAATCCAAAATGTATATAATTGTGCCAGAACATTTACATA
+TCAAATACAATAGAACATAATGAAATTTTCATATCTTCGCCCATACCTTGAAACATTGTAAATAAAAAGTTACAAATATA
+GTTTCATTGAATAACCAAATGCCAAAACTGCCATTTCTATTGTTGACATGATTGAACTGCATTGGTAAATGTAATAGATA
+CTTTCTGTTTGGCCCTTGTCCCCTGTTATACTCCCCTGTTTACTGGCTTTTGTCCAAGTGGAATGGTCCATATTGAATAT
+CTCCATCGGTTCCCTTGTTCATTGCCTTCTATGCATGTTCATGAGATTAAAGGGAAGGAGGAAATGGGATTGGGGGTTAT
+TGCCTAAACTCCCTCTCTGCTAGGTCTCCTGGACCAATCTTTTTCATAAGAAAATTAGTGGGCCGGGCGCGGTGGCTCAC
+GCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGTGGATCATGAGGTCAGGAGATCGAGACCATCCTGGCTAACAAGG
+TGAAACCCCGTCTCTACTAAAAATACAAAAAAAATTAGCCGGGCGCGGTGGCGGGCGCCTGTAGTCCCAGCTACTCAGGA
+GGCTGAGGCAGGAGAATGGCGTGAACCCGGGAAGCGGAGCTTGCAGTGAGCCGAGATTGCGCCACTGCAGTCCGCAGTCC
+CGCCTGGGCGACAGAACGAGACTCCGTCTCAAAAAAAAAAAAAAAAAAAAAAAAGAAAATTAGTGTTTCCTTCTGTGATC
+AGGGCAGTTTACTCCCCAGAACTCTGCCTGCCACTGCTAAACCAGAGTTATTGTGGGGTGTGATTTCTGTGATTCCTTCA
+CCCTTCTCCCCTGACATTTTCGTAAGTAGTCTCTTTGGAAATAAACCCAAGTTAAATTATCTTAGTTGGTGTGTTTTCTA
+TTTTTCCTATTGAGACTCTGAAATGAGTATCTAAATGAAAAAATCTACCATCTTTTCTCATATACATAGTAGTTTCATTC
+TTGCAAAATTCATGGCATATTAAAACCATGCAAAAGTACTTGGCACTTAGGCAGTTACATTTGGGATTGTATAATTAAAC
+AAAGCAGGTTATTCACCTTCATTACATTAGAAAGTCATTCAGGACATACAGCAATCCTTAATTAGGAGCGGCTATCCCAT
+GTTACTGGACATATGGTATCCCTGTTTTCCAATCCTTAAAGTTTTTCCTTAAAGTTCCAAAATGCCGTCACTTATTATTT
+CCTGCCATTGAATTTTGTCACATGAAGTTGTAAGGCTGGGAATGCCATAGCCATCTTGAAAACATAGGTAGAAAGATAAG
+ATAATCACAAAGAAGTGGATCATGTTGAATTAACTAAGCATTGATACCATCTATTTGTTGTGCGATTTGGTCAAAGTTGC
+TCCTTGTTTTTTTCTTCAATTAGTTGACTATTCTGTTACTTGAAGCTAAAAACATTTTTTTCTTTTCTTTCTTTCTTTTT
+TATTTTTGACAGAACCTTACTCTGTTGCCCAGGCTGGAGTGCAATGGCATGATCTTGGCTCACTGCAACCTCTGCTTGCC
+TAGTTCAAGCAATTCTCACACCTCAGCCTCCCTAGTAGCTGGGATTACAGGCACATGCCACCACACTCAGATAATTTTTG
+TATTTTTAGTAGAGATGGGGTTTCACCCTGTTTGCCAGGCTGGTCTCAAACTCCTGACCTCAAGTGATCTGCCCACCTCA
+GCCTACCAAAGTGCTGGGATTACAGGTGTAAGCCACTGACCCTGCCCAAAAACATTTTAACCAATAGCAAAGCACATTAT
+TTCATTATATTTTTTCAAAAGTTGCCAGTAATTTCAATTAAAAATATTATATAACATGTTTAATGCAATACAAATGCTTT
+TCAATGTACAATGGGGTTAAGTCTTGATAAGCTGATTATAAATTGAAAATACAGTAAGTTAAAAATGTATTTAATATCTA
+ACTTACTGAATATCACAGCTTAGCCTGGCATACCTTTAATGTGCTCAGAACACTTACATCAGCTGACAATTGGGCAAAAT
+CGTTGAATACAAAACCTGCTTTATAATAAAGTATTGGATATTTTATGTAATTTGTTGAATATTGTACTGAAAGTGAAAAG
+CAGAATGGTTATATTCTATTCAAAGCTATCACTTTTGCTTCATCATAAAGTAAAAAAAATCATAAATTGAACCATTATCA
+GTTGTTGACCATCTGTGTATATGTATGTGTGTGTGTACCTATATAAACATGTATGTTTATATATATATTTGTGTGTGTAT
+ATAACCTCGGTGTGTGTGTATGTGTGTTTGAGAGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTATGAAATTGTAGGGA
+AGTAAAATTTCTTATCTTATAGATATAGCATGTTAACATGTTGGTACCTATCCTCCTGAGATGTAAAATTTTACATGGAT
+AGATATGTACATTTACAGTTGTACACAAAGAGAAACATATTCTGTAGCCTCTTTGTTTTTCTGCAGTTTCATTCAACACT
+ATCTGGTTTATATGGGTGATTACATATATAATGCTAGGATTAAGGTTCTTCTCTATAACTTTGAAACTTCCTTGATTATC
+AACCCTTGGTATATTCTTAGAATAGGATTTTATTTTAGAATTAAGATCCTTGTTTCAGGACTCTAGACTGTTAACACACA
+GAGCCAATAATTATTCAGAATGGTTGGGTTAGTTCACATTCCCACAGAGGAGACATCAAATGTCCAGGCTGCACAGCCTC
+ATTAACACTAGATTTTCTGGACATTTTATATAGTATTTCATCTGATATAAAAAGTAGAATTCTAATTTGATTATGGTTGT
+TAATTTATTTTTACATTTTTAGTCATTATGAAAATGTTTTACTATATGAAAAGCCAGGAAAGGGTTTTTTTTTTAAGGTA
+GTACTTCAGAAATATTTATGATACTTAATTTTTAAAAATTAACTCTAAAAGAATCTTGAAATGTCTGTTTAATAAAATAG
+GACATAAATACTGGTCACATTAGTGAAAAATATTTGTTTAAAATTAAAATATCTGCAGCAATCCTGTAGATATTAATAAG
+TTTTTAAAGAATGACATCTTCAAAATATAACTGTTCTAAACTAATATGTCTCCATTAATACATATAGATACATATATCTA
+AATAAATTTGAAGACTTCAAATTCCAGGTACAATATAATTTTTTTCTATCCTTTGATTTGTCATTCAGGATTTTGTAATT
+TGAACAGAAATATTTTGCCCTAACCACATGTCTGTCTTTTCTCTTACTAAAAATATTATTTTATGTAAAATTGTTTTAAA
+GGTCAGTTAGCACGTTTGAAAAGCATCTTGTACAATGTTACATAATATATTGCATATTTGGTTACATTGTTTTAACTGTG
+TAAATATGGTTTAAATCTGTGATGCTCTTTAGTGCTGATCATGATGCAAATCAAATTTATCTGTATAAAAGGCTCCTGTT
+AATTTTTTTTATTATACTTTAAGTTTTAGGGTACATGTGCACAACGTGCAGGTTTGCTACATATGTACACATGTGCCATG
+TTGGTGTGCTGCACCCATTAACTCCTCATTTAACATTAGGTATATCTCCTAATGCTATCCCTACCCCCTCCACCCACCCC
+ACAACAGTCCCCGGTGTGTGATGTTCCCCTTCCTGTATCCATGTTTCCTCATTGTTCAATTCCCACCTATGAGTGAGAAC
+ATGTGGTGTTTGATTTTTTGTCCTTGTGATAGTTTGCTGAGAATGATGGTTTCCAGCTTCATCCATGTCCCTACAAAGGA
+CATGAACTCATCATTTTTTATGGCTGCATGGTATTCCATGGTGTATATGTGTCACATTTTCTTAATCCAGTCTATCATTG
+TTGGACATTTGGGTTGGTTCCAAGTCTTTGCTATTGTGAATACTGCCACAATAAACATACGTGTGCATGTGTCTTTAGAG
+CAGCATGATTTATAATCCTTTGGGTATATACCCAGTAATGGGATGGCTGGGTCAAATGGTATTTCTAGTTCTAGATCCCT
+GAGGAATCACCACACTGACTTCCACAATGGTTGAACTAGTTTACAGTCCCATCAACAGTATAAAATGTTCCTATTTCTCC
+ACATCCTCTCCAGCACCTGTTGTTTCCTGACTTTTTTATGATCGCCATTCTAACTGGTGTGAGATGGTATCTCATTGTGG
+TTTTGATTTGCATTTCTCTGATGGCCAGTGATGATGAGCATTTTTTCATGTGTCTTTTGGCTGCATAAATGTCTTCTTTT
+GAGAAGTGTCTGTTCATTTCCTTTGCCCACATTTTGATGGGGTTGTTTGTTTTCTTCTTGTGAATTTGTTTGAGTTCATT
+GTAGATTCTGGATATTAGCCATTTGTCAGATGAGTAGGTTGCAAAACTTTTCTCCCATACTGTAGGATGTCTATTCACTC
+TGATGGTAGTTTCTTTTGCTGTGCAGAAGCTCCTTAGTTTAATTAGATCCCATTTGTCAATTTTGTCTTTTGTTGCCATT
+GCTTTTGGTGTTTTAGACATGGAGTCCTTGCCCATGCCTATGTCCTGAATGGTATTGCCTAGGTTTTCTTCTAGGGTTTT
+TATGGTTTTAGGTCTAACATGTAAGTCTTTAATCCATCTTGAATTAATTTTTGTATAAGGTGTAAGGAAGGGATCCAGTT
+TCAACTTTCTACATATGGCTAGCCAGTTTTCCCAGTACCATTTATTAAATAGGGAATCCTTTCCCTATTTCTTGTTTTTG
+TCAGGTTTGTCAAAGATCAGATAGTTGTAGATATGCGGCATTATTTCTGAGGGCTCTGTCCTGTTCCATTGGTCTATATC
+TCTGTTTTGGTACTAATACCATGCTGTTTTGGTTACTGTAGCCTTGTAGTATAGTTTGAAGTCAGGTAGCATGATGCCTC
+CAGCTTTGTTCTTTTGCCTTAGGATTGACTTGGCAATGCAGGCTCTTTTTTGGTTCCATATGAACTTTAAAGTAATTTTT
+TTCCAATTCTGTTAAGAAAGTCTTTGGTAGTTTGATGGAGATGGCATTGAATCTTTAAATTACCTTGGGCAGTATGGCCA
+TTTTCACGATATTGATTCTTCCTACCCATGAGCATGGAATGTTCTTCCATTTGTTTGTATCCTTTTTTATTTCATTGAGC
+AGTGATTTGCAGTTCTCCTTGAAGAGGTCCTTCACATCCCTTGTAAGTGGTATTCCTAGGTATTTTATTCTCTTTGAAGC
+AATTGTTAATGGGAGTTCACTCATGATTTGGCTCTCTGTTTGTCTGTTATTGGGGTATAAGAATGCTTGTGATTTTTGCA
+CATTGATTTTGTATCCTGAGACTTTGCTGAAGTTGCTTATCAGCTTAACGAGATTTTGGGCTGAGACGATGGGGTTTTCC
+ACATTCACAATCATGTCATCTGAAAACAGGGACAATTTGACTTCCTCTTTTCCTAATTGAATGCCCTTTATTTCCTTCTC
+CTTCCTGATTGCCCTGGCCAGAACTTCCAACACTATGTTGAATAGGAGTGGTGAGAGAGGGCATCCCTGTTTTGTGCCAG
+TTTTCAAAGGGAATGCTTCCAGTTTTTGCCATTCAGTATGATATTGGCTGTGGGTTTGTCATAGATAGCTCTTATTATTT
+TGAGATACGTCCTATCAATCCCTAATTTATTGACAGTTTTTAGCATGAAGATTTGCTGAATTTTGTCAAAGGCCTTTTGT
+GCATCTATTGAGATAATCATGTGGTTTTTGTCTTTGGTCCTGTTTATATGCTGGATTACGTTTACTGATTTTCATATGTT
+GAACCAGCCTTTCATCCCAGGGATGAAACCCGCTTGATCATGGTGGATAAGCTTTTTGATGTGTTGTAGGATTCTGTTTG
+CCAGTATTTTATTGAGGATTTTTGCATCACTGTTCATCAAGGATATTGGTCTAAAATTCTCTTTTTTTTTTTGTCTCTGC
+CAGGCTTTGGTATCAGGATGATACTGGCCTCATAAAATGAGTTAGGGAGGATTCCCTCTTTTTCTATTGATTGGAAAAGT
+TTCAGAAGGAATGGTACCAGCTCCTCCTTGTGCCACTGGTAGAATTTGGCTGTGAATCCATCTGGTCCTGGACTTTTTTT
+GGTTGGTAAGCTATTAATTATTGCCTCAATTTCAGAGCCTGTTATTGGTCTATTCAGAGATTCTACTTCCTGGTTTAGTC
+TTGGGAGGATGTATGTGTCGAGGAATTTATCCATTTCTTCTAGATTTTCTAGTTTATTTGTGTAGAGGTGTTTATAGTAT
+TCTCTGATGGTAGTGTGTATTTCTGTGGGATTGGTGGTGATATCCCCTTTGTCAATTTTTATTGCATCTATTTGATTCTT
+CTCTCTTTTCTTCTTTATTAGTCTTGCTAGTGGTCTATCAATTTTGTTGATCTTTTCAAAAAACCAGCTCCTGGATTCAT
+TGATTTTTTGAAGGGTTTTTTGTGTCTCTATTTCCTTCAGTTCTGCTCTGATCTAAGTTACTTCTTGCCTTCTGTTATCT
+TTTGAATGTGTTTGTTCTTGCTTCTCTAGTTCTTTTAATTGTGATGTTAGGATGTCAATTTTAGTTCTTTCATGCTTTCT
+CTTGTGGGCATTTAGTGCTATAAATTTCCCTCCACACACTGCTTTGAATGTGTCCCAGAGATACTGGTATGTTGTGTCTT
+TGTTCTTGTTGGTTTCAAATAACATCTTTATTTCTGCCTTCATTTCCTTATGTACCCAGTAGTCATTCAGGAGCAGGTTG
+TTCAGTTTCCATGTAGTTGAGTGGTCTTGAGTGAGTTTTTAAATCCTGAGTTGTAGTTTGATTGCACTGTGTTCCGAGAG
+ACAGTTTGTTATACTTTCTGTTCTTTTACATTTGCTAAGGAGTGCTTTACTTCCAACTATGTGCTCAATTTTGGAATAGG
+TGTGGTGTGGTGCTGAAAAGAATACATATTCTGTTGATTTGGGGTACAGAGTTCTGTAGATGTCTATTAGGTCTGCTTAG
+TGCAGAGCTGAGTTCAATTCCTGTATATCCTTGTTAACTTTCTGTCTCGTTGATCTGTCTAATGTTGACAGTGGGGTGTT
+AAAGTCTCCCATTATTATTGTGTGGGAGTCTAAATCACTTTGTAGGTCACTAAGGACTTGCTTTATGAATCTGGGTGCTT
+CTGTATTGTGTGCATATATATTTAGGATAGTTAGTTCTTGTTGAATTGATCCCTTTACCATTATGTAATGGCCTTCTTTG
+TCTCTTTTGATCTTTGTTGGTTTAAAGTCTGTTTTATCCGAGACTAGGATTGCAACCCCTGCCTTTTTTTGTTTTCCATT
+TGCTTGGTAGATCTTCCTCCATCCCTTTATTTTGAGCCTATGTGTGTCTCTGCACGTGAGATGGGTTTCCTGGATACAGC
+ACACTGATGGGTCTTGACTCTTTATCCAATTTGCCAGTCTGCACCTTTTAATTGGAGCATTTAACCCATTTACATTTAAG
+GTTAGTATTGTTATGTGTGAACTTGATCCTGTCATTATGACGTTAGCTGGTTATTTTCCTCGTTAGTTTATGCAGTTTCT
+TCCTAGCCTTGATGGTCTTTACAATTTGGCATGGTTTTACAGTGGCTGGTACCAGTTGTTCCTTTCCATGTTTAGTGCTT
+CCTTCAGGAGATCTTTTAGGGCAGGCCTGGTGGTGACAAAATCTCTCAGCATTTGCTTGTCTGTGGAGCATATTATTTCT
+CCCTCACTTATGAAGGTTAGTTTGGCTGGATATGAAATTCTGGGTTGAAAATTCTTTCCTTTAAGAATGTTGAATATTGG
+CCCCTACTCTCTTCTGGCTTGTAGAGTTTCTGCAGAGAGATCAGCAGTTAGTCTGATGGGCTTCCTTTTGTGGGTAACCC
+GACCTTTCTCTCTGGCTGCCTTTAAAATCTTTTCTTTCATTTCAACTTTGGTGAATCTGACAATTATGTGTCTTGGAATT
+GCTCTTCTCGAGGAGTATCTTTGTGGCATTCTCTGTATTTCCTGAATGTGAATGTTGGCCTGCCTTGCTAGATTGGGGAA
+GTTCTCCTGGATAATATCCTGCAGAGTGTTTTCCAACTTGGTTCCATTCTCCCCGTCACATTCAGGTAAACCAAATAGAC
+GTAGATTTGTTCTATTGACATAGTCCCATATTTCTTGGAGGCTTTGTTCATTTCTTTTTATTCTTTTTTCTCTAAACTTC
+TCTTCATGCTTCATTTCATTCATTTCATCTTCCATCGCTGATACCCTTTCTTCCCGTTGATCACATCGGTTACTGAGGCT
+TATGCATTTGTCGTGTAGTTCTCGTGCCATGGTTTTCAGCTCCATCAGGTCCTTTAAGGACTTCTCCACATTGGTTATTC
+TAGTTATCCATTCGTCTAATTTTTTTTTCAAAGTTTTTAACTTCTTTGCCATTGGTTCGAACTTCCTCCTTTAGCTCGGA
+GTACTTTGATCTTCTGAAGTCTTCCTCTCTCAACTCGTCAAAGTCATTCTCCGTCCAGCTTTGTTCCATTGCTGATGAGG
+AGTTGCATTCCTTTGGAGGAGGATAGGCACTCTGATTTTTAGAGTTTCCAGTTTTTCTGCTCTTTTTTTCCCCATCTTTG
+TGGTTTTATCTACCTTTGGTCCTTGATGATGGTGACATACAGATGGGGTTTTGGTGTGGATGTCCTTTCTGTTTGTTAGT
+TTTCCTTCTAACAGTCAGGATCCTCAGCTGCAGGTCTGTTGGAGTTTACTGGAGGTCTCCTCCAGACACTATTTGCCTGG
+GGGGTATCAGCAGTGGTGGCTGCAGAACAGTGGATATTGGTGAACTGCAAATGCTGCTGCCTGATCGTTCCTCTGGAGGT
+TTTGTCTCAGAGGGGTACCCGGCCATGTGAGGTGTCAGTCTGTGCCTACTGGGGGGTGCCTCCCTGTTAGGCTACTCGGG
+GTTCAGGGACCCACTAGGTGGGGGTTGTCTGCCCGTTCTCAGATCTCAAGCTGCGTGCTGGGAGAACCACTATTCTCTTC
+AAAGCTGTCAGACAGGGACATTTAAGTCTGCAGAGGTTATTGCTGTCTTTTGTTGGTCTGTGCCCTGCCCCCCGAGGTGG
+AGCCTACAGAGGCAGGCTGTCCTCCTTGAGCTGTGGTGGGCTCCACCCAGTTCGAACTTCCTGGCTGCTTTGTTTACTTA
+CTCAAGCCCGAGCAATGGCGGGCGCCCCTCCCCCAGCCCAACTGTTACCTTGCAGTTTGATCTCAGACTGCTGTGCTAGC
+AAACAGCAAGGCTCCGGGGGTGTAGGACCCTCCAAGCCAGGTGCTGGATATAATCTCCTGGTGTGCCATTTGATAAGCCT
+GTTGGAAAAGCGCAATATTAGGGTGGGAGTGACCTGATTTTCCAGGTGCCATCTGTCACCCCTTTCTTTGACTAGGAAAG
+GGAATTCCCTGACCCATTGTGCTTCCCAGGTGAGGTGATGCCTTGCCCTGCTATGGCTCATGCATGGTGCGCTGCACCCA
+CTGTCCAGCACTCCCCAGTGAGATGAACCTGGTACCTCAGTTGGAAATGCAGAAATCACCCGTCTTCTGCATCACTCACG
+CTGGGAGCTGTAGACTGGGGCTGTTCCTATTCGGCCATCTTGGCTCCACCCTCTAATTTTTAACTTTCAAAAGATTAGAT
+ACATTTACCCTCTTCTGAATTTTAATTCCTGATGGAAAGATTGTTAAATCTCAAGAGGATAATGAATCAGCTTCTGATTA
+TATTCTGGAATTGAGATTTGATCACACATCCAAGAATGCGTATTCACTTTCCTTTACATTACACACATGCATAATACCAT
+GCTGAGTGATGCTGGAAAAAGTGAGTCTTCAGTAATGCCCTCATGAAATTACAGTCTCACTACACTGAGGTATTTTCTTT
+TTAACCCTTTCTTTCTTTTCTGTTGTGAATATGTGCCTTCTAAGAGGTGAACAACGGGAAAAACTGAAAAAGAACCTTCA
+TCTAATTCAGTAACTAGCTTTTGTATCTTCTAAGAATTTTTATCTCCAGAAAGCTCTAATTTTCATTCCTTTACAAGCTT
+TATTTCTCTCTCCTTTCCAACACCAGACTTTGCTCTGTTTGTTTTCTATATCTTTTTTAGAGATGCCTACAATAGAAAAA
+AACTATGACATATTCAATACATCTGAAAAGTTTGCATTACCATAGATAAGAATGAATTATCTGTTTATAAAGAAAAAATA
+ATAAAAATATGAACCAAGAAAGCACAGAAGGTAGTAACCACATTTAGATGTTTATCATGGAATCATCTTTATAATCCATC
+TGTTACAATTCCACACAGCAAATTACAAGTAATTGTGAAGTTTCCCCTCATTTTATAGGTTTGGTCTTATAAGGCATACA
+ACACAAATTTTAATTACTTTTCATTTTGGTATTTGGATCTTAATTTGGTCAAGTGGCCACTGGGTAGATGATGGAGACTC
+TTGAGACATGCAGTCTTCTTCTAGCTTTGTTGATGTGAATATTACTGTGTATTTTTCATTCACTTATTATTTTACAAGGC
+ATGCTTCATTAAAAATATATCAGCATCTCATTGACAAATCACAAAATTTAAAAGGCACCAGAAAAAAACAATTTATTTAA
+AACATTCCTGTGTAAAAGTAGTCACCACTCTTACTATTCTTCAAATATTTTTGTTATATTTAAACGCAAGCCTAATTTTA
+TCTCGAGTGTCTGACCTTGATGATATTGTACACTTTAAACTTAGAAAAATTGAACTCTATTGGAAATTTCCTACAGATCA
+GCTTTTCTAGATGCCAAGCGCCTTGTTTCAGCCATGGTGATGACAGCAAATTGGGTTCTCAGGGATTCTGGCCTCTGGCA
+TCATCTCAGTTGTTTATAATTGAAGTTGGGTCTGATGAGAATGCAGCTTAGATGCATGGTGGGACTGCTGGGCTTAAGGC
+TGGCCTGCCAGGAGGTTGCATTGAGGTGTAACTAGGCAAAGAAAGAAAGAGTTTATTGAGGCACTACTGACAATAGCAAA
+GACTTGGAACCAACACAAATATCCAACAATGATAGACTGGATTAAGAAAATGTGGCACATAAACACCATGGAATACTATG
+CAGCCATAAAAATGATGAGTTCATGTCCTTTGTAGGGACATGGATGAAGCTGGAAACCATCATTCTCAGCAAACTATCAC
+AAGGACAAAAAAACACCGCATGTTCTCACTCATAGGTGGGAATTAAACAATGAGAACACATGGACACAGGAAGAGGAACA
+GCACACACTGGGGCCTGTTGTGGGGTGAGGGGCTAGGGGAGGGAAAGCATTAGGATATATACCTAATGTAAATGACGAGT
+TAATGGGTGCAGCACACCAACATGTCACATGTATACATCTGAAACAAACCTGCACGTTGTGCACATGTACCCTAGAACTT
+AAAGTATAACAAAAAAAAGAAGGGGTTGGGCAAAATTACCAACAATCTTCAACTATGAATTTGTATGAGTAGGGGCTTCC
+CACACCATCAGGCACTCCAAGTTGGAGAAACAAGGGATGTTGAATTGAAAGTTTAATTTTTTAGGCTTGATAGTCAGATT
+AGGGAAGTTTTGGGTACACCTTTCCCACTCTAGAGCCCTCTCCCTAACTCAGGGCCAATTTGGGGCATGGGCTCAGTGGA
+AAAGAGTGCCAAGATCAATTAGAAATATCCATCATGAACATAGGAGGCAGGCATTCTGGCCAAGGGGGTGCGTCTTCCCA
+CTTTGTAATCCAGAGTGCGATTTCTTCTTGCTATCTGTCCCTCCATAAATAAGCGTTGGGTGAAGGACAAGCTAGTTACC
+ATTGAGGATGGAATGAAATTCTACCAGAATTGCAAAATGAACTGAGAGCCACGAAAGTGTTCCTCTGTGAACCAGCAGAG
+AGATCTGTGGAAGGCAAAGGAAGAGGAGCCAAGAGGCCAGATAACATTCCTGCTTCCCCTACATTGTGAGAATGAAATTG
+TCAGCAACCTATATCAGTAATTAACCAGAGGCTTTTTAGAGAGTAAGACTTCTGATAGATACCTCAATAGCTGAAGTTGC
+TCTTCAGTACTGCATATAACCTGTGGACAGATTTTCATCATTATTAATTCATTTATTCAACACATAGATATTAAATTCAA
+GGCATAGTTCTAAACTATGCAGTCAATTCCAAAAAAATAATAATGAAGGACATTAGCTCTGCCTCATAGGACGTTAAAAC
+TCTAGCTGCTAAATGAGACAAATAGGGTTCCCACATCAAGTGCCTGGGCTCCTGTTATAAACTGGACTCTGCTTCCATTG
+GCTTCTCTTGTTTCTGTTTTGATAAACCCATCATTTGGCTTAGTCCCTACCCTTGCCATGCTCACCCACCAGTAGGTGGA
+CCTCCTTGACCATAACTACAGACATAGATCTTTGATTGTTTCTTTTCAAATGTTTGCTTACTTTAGAGGTGCAGTCAAAT
+GTTGTTTGGACCATAAGACTCTGAGGCTGTGTGTGGTTGAAACTGTCTTGAGACTCAAAGAGCTATTGAGGAAGGGATTT
+GATAGCTGAACTCTTTTGAAGATTTTCTCTGTCTCTCACACATCTGGGGCTGAATAGCTTAACGTTTAATTTCTACATCG
+ACGATAGTTTAATAGCCATTCATGCAAATCAGTTCTGCTGTGAGTTATTATCCTACTCCAATACCAACTAACGCATTCCT
+TAAATTTAGTGTACGTATAAATTCTACATGGTAAGTGAAGCTGTCTTTATGAATGGCTGCTATAGAATATACTAGATTGT
+GTAGGTTTTAGTACAATCCAGGTAATTATCAAGCCTTTGTAATATTTTCAGGAAAGTGATCCTTTCACTTCAGTTTATGG
+AACTTGAAAATTTTTTAGTTGCCACCGACTAGTTAGATCATAAACTTAATTCTATGTATCTGAATTTATTTTTCTATTTA
+TAAATTTATCTGGGTGCTTTCTCAAAGGTTTCCCAGATTAAAAACCTATGGCTTTGTAAAATTAAAACAGAAGTAATAGG
+AATTGAGTTAAAAAACAGAATCAGGCCAAGCCTGGTGGCTTATGCCTCTAATGCCAACACTTAGGGATGCCAAGGGGGGA
+GGATCATTTGAGGTCAGGAGTTCAAGACCAGCCTGGACAACATGGCAAAACCCCACATGTATTAAAAAAATACACAAATT
+AGCTGGGCGTGGTGGTGAATGCCTTTAGTCCTAGCTACTCAGGAGGCTAAGGCAGGAGAATAGCTTGAACTTGGGAGGCT
+GAGGTTGCAGTGAGCCAAGATAGAGCCACTGAATTCCAGCCTGGGTGACAGAGTGAGATCCTGTCTTAAAAAAAAGGAAA
+AAAGAAAAAGAAAAAACAGAATCATACAATGAGAGAGATTGTATAGCCCTTTTAATTGTCCATAATGGATACATTTCTTT
+TTCTGTGTAATAGTTTATCAGCATGAAAAAAAAGAGTCACTGAACACTGATAATTTTCTTTCTCTCTCTCTCTCTCTTTT
+TTTCCTTTTTTTGGAGATGAAGTCTCGCTCTGTTGCCCAGGCTGGAGTGCAATGGCAACATCTCTGTTCACTGCAACCTT
+CACATCCTAGGTTCAGGAGATTTTCCTGCCTCAGCCTCCCAAGTAGCTGGGATTCCAGGTGCACCCCGTCATGCCTGGAT
+AAATTTTTGTATTTTCAGTAGAGACGGGGTTTCATCATGTTGGCCAGGCTGGTCTCAAACTCCTGACCTCAAATGATCTG
+CCCGCCTTAGCCTCCCAAATTACTGGTGATGATTTTCATTTAATGGTGTTCTACAAAGTGAGTTTAGGCAGTGAAAGGGC
+ACAAGTCACCTATAGTTTACCAGAAAGTAGTACCAGTCACCAGCGTTGAAGCATACTCCTAACCCTGGAAAATTTCTTCA
+GCAGCCAGAATTAATCTCAGATTTTGAGACTGGAGCAAAGACTAATTTGGTATGTGTTCATTTAGATTAGTAAATCTCTA
+TGGTGTTCGAACTGTATAATATCAAATCTAAAATAAGTCAAGAGAGTTAGACAATATCTCATCTCTGAGAAAATAAATGC
+ATTATTTTCAAGAGATTTAAGACTGAATTTAAAATCCAACTGCCTGTTCCAGTGTATGTTTCTCTAGAATATGGCCAAGC
+CCTACAAATTCTGGATAGTGTCCTCCAAGTTGTGTTTAAGTGATAATTGATAAATCTATGGTTTAGATACATATGGAATT
+CCACATAACTCTAGCTCCAGCCTTTGCGAGTAAAAATTGGCAGTGATCATGGCCTTAATGGAGGCATGTGGTTAATTGGA
+ATTTTCAGTACCAAAAGTCTTCTTCTTACTAGTTTATCAGGGTACCCTATAATAGTATCCATAACTCATTGCCCACTTGT
+CTGTTTTGATTTCTTTTTTAAAAATACAATTTTAAACTTAGAGGTTTTGTACCTCAAGTCACATATGTGGCACATTTATA
+AAATAATCATGCCTTTGCATATTTTTCCTTTGATATCTTAGCTGTATATACAAGCCACTTATGTTTTTTAATTGCCACAA
+ATATTGCACTGAAATAGAGCACTAGTTGCAAAAATGCCTGGTATAACATTTCTTCTTGTTCTTGAAAGTAATTTTAATAC
+CCCATGTTTAAGATATTCCTAGACAACTATAAGTAACTATAATTATTTCTGCTAGGTAGAAGCATCTTTGTGGTCAACAT
+TTGTGAAGTGCCTTGTAAATAAAAATATGCTTGAGTAGAAATAGTATTTTGCAAATATAGCCCTAAATAGAGCTACTGAT
+TTTACCAAGAATTAAAGACTCCAAACTCTGTCTTTGTCTTTCAAATTTGGTTTATAGGAACATCTCATTACAATGAGCAA
+AAAGCCTATCTGATCATTAAAGAGATAATTTTGACACTTTCAAATGACACAGCTAATTTGTAGTAGGTTTTTGAAGTGCA
+AATTACATGTTTCTGTTTAAATAATGACAGTAAAATATGGACAGTATCTATTCACACTCTATATACACAACTACAACTTG
+TCCCTAGAAGAAAAGTTTAATTTTTCAGGTCTCATCCACAAAACGAAGTGTTAATAAGCTGTATCTTTATTCTCACTTTT
+AAGTAATTTCTTCTTAACCATTGTATAACACTACCACAGTTTGTATGGAGATAAAATAACTCATGGTGAGAAATCCAGTT
+CTTTAAAAAATCATTTTACCTTCAATCACTTTTAGGATGTCTATGGTTTTAGAGTAAGGACGAAAATTTATAAACATTTG
+TTAAATTATGCAACTGTGTTTGAAGATGTTTCAAAATTACATTTTCCTTATTTTACAACTAGTTTTCATGTATTCACAAA
+CGAGACACATGAGAATTATTAATGCAAAAGCACAGCATATCAACAAAGTTTATAATGTAAATAGTTTAGTTAGGAAAGAA
+TGTAATGAGTGAATTATCAAAGCTATTTTATTGGTTCCTAGATTTACCATAGAAGAAAATAAAGCAGCCTTTTAAAAATT
+TCTCAGCACTGAGTCCATTTAAGGTCTGTATTGTATGCCGTAAGTTCTAAAAAATGTTATTGCCGGTAAAAATAGGATGC
+CTATCTAAAATTGTCTTTTTAAAGGTTCTCTCTTATTCTTCTTCCCTTCCTATGTGTGCACAGAATACTGTCATCGTCTT
+GATTGGAACACTATCTCTCTTTCTCAGAGGGTGTACATTTGTAGAAGGGAGAAAATATTTGTTAATTTTGCTTCCTGTAA
+GATAAGGCCTCAATGGTTAAAGGTTAGTTAACAAACTTTTTTTTTGTCTTCATACTATTATCTTGGAATTACCAACTGGC
+TTCTACTTGGATTTTGAATAATGAAAGAAAAGAACTAGCATTTGTTAGAACTTACTGTGTTCCAGAAACTTTATTGATGC
+CATTTCTTCTCATTATCACATTCATTCTTTCAATTAGTATAATTTTTTTCCATCTTAATGAGGTGGGACCAGAGGTTTTG
+TAATATCCACTAACCCACTGTGGATTTCATGGATTGCAAGAGGAAGAAAGCATGGTGGAACTCAGGTTTCTCTGATTCCA
+AATTCACATCCTTTCATGTCTCCATGAAAAATAGAAAAGATAAATTGGGCTCTATTCTAACAAGAGAAACACAATTTTCC
+GTCAGTTAAATGAATAATTCTTTTTTATTTTATTTTATTTTATTTTTATTATTATACTTTAAGTTTTAGGGTACATGTGC
+ACATTGTGCAGATTAGTTACATATGTACACATGTGCCATGCTGGTGCGCTGCACCCAGATACCTAAATGAATAATTCTTA
+AACCTCAAAAATAAAAAGTAGCAAAACCTCAATTCAAGAGTAGACATAAAGGTTGTGACATGGGTGGCAGGGACATATGT
+CTAGTGACAGATTTCCTATGCATATCTGACCAAAAATCCCCCTCATGATTTTCAAATCAGATGCTTAACTACATTTAATG
+AGTTTCCCGAATCATTAATATCAGGCTCTGAAAACAGGATCNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNGATCCTGTCGAAAGAAACGAATGGAATGGAATGGAGTGG
+AGTGTAGAGGTGTGGAGTGATGTGGAGTGGAATGGAGTGGAATGGAATGGGATGGATTTGAATGGAAGGAGTGTAGTGGA
+GTGGATTGGAGTGGAGTGGAGTGGAATGGAATGGAATGGAGTGGAATGGAATGGAATTGAATGGAATGGAATGGAATGGA
+ATGTTGTGGAGTGTATTGGAATGGAGTGAAATAGAGTGTAATGTAATGTAGTGGAATGGAGTGGAGTGGAGTGCAGTGGA
+GAAGGGTGTAGGGGAATGGAATGGAATAGTGAAATGAAATGTGAGCTGAGATACTCCCACTGCACTCCAGACTGTGTGAC
+AGAATGAGATCCTGTCGAATGGAATTGAATAGAGTGGAATGAAATGGAGTGCAGTGGAGTGGAGCAGAGTGGAGTGGAGT
+GGAGTGGAATGGAATGGAATGGGATGGAATCAAATGGTATGAAGTGGAGGGGAATGGAGTTGAGTGGAGGGACGAGGAGT
+GGAATGGAATGGAATTGGATGGAATGGGATGGAGTGGAGTGGAGTGGGGTGGAGAGTAGTGGAGTAGAGTGGAATGGAAT
+GGAGTGGAGTGGAGTAGAGTGGATTGGAGTGTAATGGAATAGAATGGAATGGATAGGAATGGAATGGTATAGAATGGAAT
+GATGAAATGAAATATGAGCTGAGATTGTGCCACTGCACTCCAGCCTGGGTGACAGAGTGAGATCCTGTCAAAGGAAAGGA
+ATGGAATGGAAAGGAGTGGAATGGAATGGAATGGAATGGAATGGATTGATTGGAGTGCAGTGGAGTAGAGGTGAGTGGAG
+TTCACAGTAGTGCAATGGAATGGAATGGAGTGGAGTCGAGTGCAGTGGAGTGGGTTGGAATGGAATGGAGTGGAATGGAA
+TGGGATGGAATGGAATGGAGTGGAGTGGAGTGGAGTTCACAGGAGTGCAATGAAATGGGATGGAATGGAATTGAATGGAA
+TGGAATGGAATACAATGGAGTGGAATGCAATGGAATGCAATCGAATGGAATTGTGAAATTAAATGTGAGCTGAGATAGTG
+CCACTGCACTCCAGCCTGTGTGACAGAGTGAGATCCTGTTGAAAGAAATTAGTGGAATGTAAGAGAATAAATTATACTGG
+AATGGAATGGATTGGAGTGGAGTGGAGAGGAATGGAGCTTAATGGAGTGGAAGGGAATGGGAAGGAGTGGAATGGAATGG
+AGTGGAGTGGAGTGGAGTGGAGAGAAATGAGGTAAAATGCAATGGGATGGACTGGAATGGAGTGGAGTGGAGTGGAGTGA
+ATTGGAATGGAGTGGATTTGACTGGATTGCAGTGGAATGAAGTGGAGTGGAATGGAATGGAATGGAATGGAATGGAATGG
+AGGGGAGTGGAGTGGAGTTGAGGGGAATGGAGTGGAGTGGAATGGAGTGGAATGGAATGGAATGGAATGGTGAAATAAAA
+TGTGAGCTGAGATTGTGCCACTGTGTTCCAGCCTTTTTTTGACACAGTGAGAGCCCGTCAAAGGAAAGGAATGGAATGGA
+ATATGGTGGAATGGAATGGAGTGGAGTGGATTGGAGTGGAGTGGAGTGGAGTCAAGTGGAACAGAGTGGAATGGAATAGG
+ATGGAATCGAATGGCATGTAGTGGAGTGGGATGGAGTGAATTTCAGTGGATTAGGGTGGAATGGAGTGGAATGGAATGGG
+ATGGAAGGGAATGGCATGGAGTGTAGTGGAGTGGACTGGAGTGGGTTAGAATGGAATTGAATGGAATGGTATGGAATGGA
+ATGGAATGGAATGGAATGGAATGGAATGGTGAAATTGAATGTGAGCTGAGATAGTGCCACTCCACTCCAGCCTGGGTGAC
+AGATTGAGATCCTGTCAAAAGAAAGGAATGGAATTGAATGGAGGGGAATGGAATGGGATGGAGTGGGGTGGAGTGGAGTG
+GAGTGGAGGGGACTGGATTTGAGTGGAGTCTAGTGGAACAGAGTGGAATGGAATGGGGTGGAGTGGAACGGAATGGAATG
+GAGAGGAGTGGTGTGGAGTGGAGTGGATTGGAATTCAATGGGATGGAATGGTATGGAGTGGAGTGGAATGGAGTGGAGTG
+GAATGGAATGGAATGGAATGGAATTGGAATGGAATGGAATGGAATGGAATGGAGAGGAGAGGAATGGAGTGGAATGTAGT
+GTAATTGGAAAGGATGGAATGGAATGGAATGCCATGGAATAGAATTAGGAATAGAATGGAATGATGTGGAATGGAATGGA
+GTAGAATTCAGTGGAGTGGAGTGGAGTGGAGTGGAATGTAATGTAATGGAATGGGATGGGATGGAGTGGAATGGAATGGA
+GTGGAGTGGAGTGGAGTGGAGTGGAATGGAGTGGAATGCAATGGGATGTGATGGAATGGAGTGGAGTGGAGTTGAGTGGA
+TTGAAATAGAATGGAATGGAGTGGAGAGGAGTGTAGTGGAATGGAATGGAATGGAATGGAATAGAAGGGAATAGAATGGT
+GAAATGAAATGTGAGCTGAGATTGTGCCACTGCACTCCATCCTGGGTGACAAGTGAGATTCTATCGAAAGAAAGGAATGG
+AACGGATTAGAGTGGAATGGAATGGAGTGGGGTAGAGTGGAGTGGAGTGTAGTGGAGTAGAGTGGAATAAAGAGGAATGG
+AATGGGATGTAATGGAAAGGAATGCAGTGGAGTGGAGTAAAGTGGAGTGGAGCGGAGTGGAGTGGTGTGGAGTGGAGTGG
+AGTGGAGTGGAGTGGAGTGGAGTGGAATGGAATGGAATGGAATGGAATGGAATGGAATGGAATGGCACGGTGAAATGAAA
+TGTGGGCTGAGATTGTGCCACTTCACTACAGCCTGGGTGACGGAGTGAGTTCCTGAAGAAATAAAATAATGTAATAGAAT
+GGATTGGAATGAAATGGAATGGAGTGTAGTGGACTGGAGTAGAGTGGAATGGAGTGGAGTAGAGTGGAAGGGAATGGAAT
+GAAATGAGATGGAATGGAATGGAATGGAATAGAATGGAGTGGAGTGGAGTGCAAAAATCCTCAATAAAATACTAGCAAGC
+CGAATCCAGCAGCAAATCAAAAAGCTTATCCACCATGATCAAATGGGTTTCATCCCTGGGATGCAAGGCTGGTACAACAT
+ACGCAAATCAGTAAACGTAATCCATCATATAAACAGAACCAACGTCAAAAACCACGATTATCTCAATAGATGCAGAAAAG
+CCCTTTGACAAAATTCAACAACCTTCATGCTAAAAACTCTCAATAAATTAGATATTGATGGGACGTATCTCAAAATATTA
+AGAGCTATTTATGACAAACCCGCAGCCAATATCATACTGAATGGGCAAAACCTGAAAGCATTCCCTTTGAAAATCTCAGT
+TACCTTTCTTTTGGATATGAGTGTGAGGAAAGAGTACCTACAAACTACCCTTTTAGTTAAATGCCATATACAATAAATAT
+TAATACCTACAGTCCTCATGTTGTACATTAGAGCCCTCAATTTGTTAATTCTACATATCTGCAACTTTGCATCCTTCGAA
+TTCTCTCTCTCCATTTTCTCTTCTCACCCCCAGCCCCTGGTAACCACTGTTTTATACTCTATCTCTGCATATTTACCTTT
+TTGCTGTTTTTGTCCTGAGAAGTTTATTGGGACTTTCAGCTAGGAGATAATATGTTCTGAGTCTTGATTATCTCAAATTA
+TATCAGGTAAGAAGTTTTACTTGTCTGGAGAGCAGAGAGCAAGTGCATAATTTTATGCAGAAGAGGGAATAAAGAGAATT
+CTCTAATTAAAATTTGAATCAGAAATATGGTTCATATTTTTATGTACAACAATCTCTAAGGCTTCTTGGAAAGGAACTAG
+CTGCCTGATTTTTTTTCATTTGAATCCTCTAATACATGAAGAGGATGATTCTACCATACCAATCACAAGAATGATATCTG
+TAGAATCAAAAGTTGAAATCCTATTATCCTAGAAGTTTTTTCAGAAAACTTGAACATGTATTGAAATGTCATGCTGGTCA
+TATATGTTACAGCAATGAGGAATTTACTTATTATATACTTTAGCTTTTTAGCCCTTTCCCTGCCCACCAGAAATGTTTCT
+TTTGAATCCAATTTGGTCCTCAGAGTTTTAGAATGTTAAGGAATGTATAGGGGTATCTTTTTTCTTCTTTTTTTTTTTTG
+TCAAGCTATTGTATATTTTTACTTTACCTGTTTGGGTATGTAGTTACTATTTGTTTGGACTGTCATACTAGAAAAAAATT
+ATAGTGAATTTTACATGTATGGAGTCTTCATTAATCTGTATAAACTGACAAGTTTAGAATATCAAAGTCATCACTGTAAA
+AAATAAAGTGATATTTTATCTGGTCTATCTCCTTGCAAATAAAAGAACAATAAATTCTGTACTATTATGAAATATAAGAG
+CTTGAAAATTTTTCAAGCGTCTTGTATAGAATGGCCTTCGAATTCAAGGTATAAACTACAACAAAACACTTTCGAGATCT
+AGTTTGATTGAAGAGTGCATTCTTACTTATTTATATATTCTCTGCAATTTCTGATTCAAGATAGTCCACAATTCTAAAGG
+AAAACCAAAATAGAATAATAATTATAAAACAAAATAAAAAGTAATTGAGGAAGATAATTGAGCAACTTATTCTTGACAAC
+TAGCTTTTTAACAAACAGAAAAGTGCACTAAAAATACTGCCTAGTTGTTTAAAGTAGGCTGAGATCTGACTGTATCCCCT
+GAGTGACATAAAGAAAAAACTACCTTTGAGGAAGCACAGTGATTAAATTTAAAAGATTTATTGTTTAACCTCAGGAAGAC
+TTTCAATAACTTTTTATAAATTGTATACAGTTAATGGAAGACATGCTAGGCTTCTTGTAAGAATTTATTCCAGATCACTG
+TGTATAATTAATGGCTTGTCTACATATTTTTTCAATAAAACTGTTCATGAAGAATTTTAAGTATGCACTTTCAGTAACAG
+AATGGATATATCTTTGTGAAGATACCATTTATGTACCATAACATCCAAAGCATAATGAGGAGATTTTGATACACTGCAGA
+TTTAGAACTGAGTTGGAAAACAAAACTTCAACATACTGTCTTGTCAGCACAAAGCAAATAAATTCATCACTGTCAGTTGA
+GATTTAAATTCACAAACTTATAAGCAATAAAATCAAAGTAATTTTGTTATAAGTTTTTTGACACCAGATTATATGGGGTA
+AATAATTTACTTTCTAATCAAGCAATTGATACATATAAACCTGAAATAGAGAGTTTTGCCTTTTAATTAGATACATTGTT
+ACGCAAAGAGTGGTTGAAGCTGCAGTGGAGAATTATCTTTTTCTCTTTTTGCTGTTCTTACAAGGACAGTTTCTAGAATT
+CTTTTCAAAACAATAGAACAGAAAATTTCACTTACTTAATTTAACTTTTGTTAAAGTACTTTGCCAAAATTAGCAAACTA
+TGAAAGGAAAGTTTGCCTTTTCAATATATATGTAAGTGGCAAGTAAAAGTTGTTCTTAAATCACGTGATCTCATAGAAAT
+AACATGGGATAAAAAGAGTTCTGAAGGTGTTCAGGAAATGCCAGTCCTAAATGTATTGCTTTGGTGTACTGATTATATTG
+AACTGAAAATATTTGAAAAATAACAACTGTGGGACATGCTTTCTCTGCACTCCCTTTACGTTCCTCAAGAGAGATCCTCC
+AAAAGGAATTTTCAATACCAGAATGAATGTATCTTTGTAAAGATACCATTTATCTACCATAACATCCAAAGCATGATGAG
+GAGATTTTTATACACTGCAGATTTAGAACTGAATTGGAAAACAAAGCTTCAAGATACTGTCTTGTCAGCACAAAGACATT
+CATCTCCTTCCTAGGAGTTTCATCAGCCAGGGAAAATTGACTCCTCATATCACAGGAGACAAGACTAAGAGTCCATTCCA
+CACCAAGGCAGACTGCCACAAACTATCATCTATGCTCCTAGGGGTCTGGTCATCTTTCCCAAAAGTCATTTGCTCTCCCT
+TAAGTTTCCTACATTCTTCCTCCTGCCTTCCCTACAAGCTCCTAAATCTCCCTGGGTTTTTTTCTTGGTGGTGTTTTGTT
+TGTTTTGTTTTGTTTTGTTTTTGATTCCACGTTTCTTTCCTGTGATTCCCCCATGCGTGTAATAAATGTGTACACATTTT
+CTTCTTTTGACATGCCTGTTATCCATTTATTCAATAGACTCAGTTATCATACCTTCAGAGGGTAGAGGGAAAGGTCTCCA
+AGTCCTACAGTTCCAAAAGAAAATAGCTAGTAATGCAAATGCAAAAGTCATATAAACTTTGAGACAAAGTTAGCGTCTTA
+TATGGTACGTTCAGAAAGAAGAGGTGTGTTGCACACCAAGGGACATTTTCCTGGTCTTAGCTACTAAGTTTCTCTGAGAA
+GAAGCAGTTACTACTACAGCAGTTAAACACCTGACTCCCAAATTATAAAAGCCTAAGTGTTCAGATTAAATCATGAGTTG
+TCACCTGTGAATCATAAAGTCTGAACTCTTTAAATTGCAAACACACATAGAAAAACGATTCTTACACAGGGATCCATGAG
+TGGAACTGCAAAGTTTTGTCAAAATCTATATATATGTTTTCTTTTTAAGGGTCATAGATTTCAATATATGTTAAAAGATA
+TCTGGGGCTAGTAAAGATTAATAACCATTTTCAGTTTTGTGCTGAAATTGGCAAATCATGGCTTTTCTCCCTCCAACACA
+CCCCCTCCCGCCTCAGCACACCACTGGTTAATCTTTATTTTCAGGAGAGAAAAAGATGTTTCATATTAATAAAGGTAAAT
+TATACACTTGTTTGTTTTATACTTTTCCACCTCTTAAGCTTAATCGTGTTTTAATAATGTATAATTGAATACATTAACAT
+TTAGATGTTAGTAAATGAATAACAAATGTTCATTGTTTCTCTTGAGTAGAGGAATAAAAATTTTGCACCTGTTGTATAGA
+ATGATTTTTAAAATCTCATTCAATCCTTCAGAAGTTTAAGATTTTTTTTCAAGAATTTTTATATAAAATTTTTACTCAGA
+AGCAGATAGCTCACAAAAAGTATGGGGGTTTTTTGGATTATAAATAAACTATTCCACTCACCAAGATGCTGAGACATCCT
+ACTTTGTGGTATTTTGAAATAAAAACAAAACACAAGAATGTGACGATGAAGCAAGTGAAAATTAAAAGGGTACAGACTTT
+AGAAGTATCATAAGCTAAAAATAAAATGAAACAAACCTGTAAAGGAACCAAAAGCCCAGGTAGATATAATACAGTAAATT
+ATGAGAAGCTAAAAAAAATGAGGAGGTGATAAACAATAAAGTGAAAATCTCTGATAAAGCCAAAACTACTTGAATGGCAG
+TCAACAAAACAGAAGAAAAAAATGACACTCATAACAAGGCAGAACTATGGAAATGCATCCCTGGGGGTAAGATAATAGAG
+TTGCGAATTAGTGGAGGTGGATCACAAATACTGGCCAAATAAAGCCCGCTAACATAATTCAAAGGACAGAGAAATTCTGT
+AGCTGAAGATAAAAAGTCTGAAATACATTATGGTATAGTCAAATGAGTGTCATTTCTAATAGACACACTTATTTTTCAAG
+AGTTGATAATATGAAGTAAAAATTTAGTATGCAAAGTTCAAATTTTCCATTTGTTGAGTAATTCATTTTTATTAAATATT
+TTCCAACAATTAAACTAAAGTGGTACCAGAACAGAGTAGACATTCTGAGTTTTCACTTTACAAAAGCTACACATAAATCT
+ACGTACTTCTTAAGGGTATAAACAAAGTCAGAGCCTGTGTGATATTTCAATAAACAATTGGAGAAAATTTTTTTAAGAAT
+AGATTATATACATATTGACCTCTTTGAGAACCTTATTTTTTTCTTTTTTTTGTGTGTGTGTTTGTTTTTTTTTGTGTGTG
+TGTGTTTTCATTTCTTTTTTAAATTATTATTATTATACTTTAAGTTTTAGGTTACATGTGCACAACGTGCAGGTTCGTTA
+CATATGTATACATGTGCCATGTTGGTGTGCTGCACCCATTAACTCGTCATTTACATTAGGTGTATCTCCTAATGCTATCC
+CTCCCCCCTCCCCCCACCCCACAACAGTTCCCAGTGTGTGATGTTCCCCTTCCTGTGTCCATGTGTTCTCATTGTTCAAT
+TCCCACCTATGAGTGAGAACATGCGGTGTTTGGCTTTTCATGCTTGCGATAGTTTACTGAGAATGATGGTTTCCAGCTTC
+ATCCATGTCCCTACAAAGGACATAAACTCATCATTTTTATGGCTTCATAGTATTGCATGGTGTATATGTGCCACATTTTC
+TTAATCCAGTCTATCATCGTTGGACATTTGGGTTGGTTCCAAGTCTTTGCTATTGTGAATAGTGCCACAATAAACATACG
+TGTGCATGTGTCTTTAGAGCAGCATGACTTATAATCCTTTGGGTACATACCCAGTAATGGGATGGCTGGGTCAAATGGTA
+TTTCTAGTTCTAGATCCCTGAGGAATCACCACACCGACTTCCACAATGGTTGAAATAGTTCACAGTCCCATCAACAGTGT
+AAAAGTTTTCCTATTTCTCCACATCCTCTCCAGCACCTGTTGTTTCCTGACTTTTTAATGATTGCCATTCTAACTGGTGT
+GAGATGGTATCTCATTGTGGTTTTGATTTGCTTTTCTCTGATGGGCAGTGATAGCATTTTTTCATGTGTTTTTTGGCTGC
+ATAAATGTCTTCTTTTTAGAAGTGTCTGTTCATATCCTTCACCCACTTTTTGATGGGGTTGTTTTTTTCTTGTAAATTTG
+TTTGAGTTCATTGTAGATTCTGGATATTAGCCCTTTGTCAGATGAGTAGGTTGTGAAACTTTTCTCCCATTCTGTATGTT
+GCCTGTTCACTCTGATGGTGGTTTCTTTTGCTGTGCAGAAGCTCTTTAGTTTAATTAGATCCCATTTGTCAATTTTGGCT
+TTTGTTGCCATTGCTTTTGGTGTTTTAGACATGAAGTCCTTGCCCATGCCTATGCCCTGAATGGTATTGCCTAGTTTTTT
+TCTATGGTTTTTATGATTTTAGGTCTAACATGTAAGTCTTTAATCCATCTTGAATTAATTTTTGTATAAGGTGTAAGGAA
+GGGATCCAGTTTCAGCTTTCTACATATGGCTTGCCAGTTTTCCCAGAACCATTTATTAAATAGGGAATCCTTTCCCTAGT
+TCTTGTTTTTGTCAGGTTTCTCAAAGATCAGATAGTTGTAGATATGCGGCATTATTTCTGAGGGCTCTGTTCCATTCCAT
+TGGCCTCTATCTCTCTTTTGGTACAAGTACCATGCTCTTTTTGTCACTGTAGCCTTGTAGTATAGTTTGAAGTCAGGTAG
+CGTCGTACCTCCAGCTTTGTTCTTTTGGCTTAGGATTGACTTGGCAATGCAGGCTCTTTTTTGGTTCCATATGAACTTTA
+AAGTAGTTTTTTCCAATTCTGTGAAGAAAGTCATTGGTAGCTTGATGGGGATGGCATTCAATCTATAAATTACCTTGGTC
+AGTATGGCCATTTTTACAATATTGATTCTTCCATGAGCATGGAAGGTTCTTACATTTGTTTGTATCTTCTTTTATTTCAT
+TGAGCAGTGGTTTGTAGTTCTCCTTGAAGAGGTCCTTCACATCCCTTGTAAGTTGGATTTCTAGGTATTTTATTTTCTTT
+GAGACAATTGAGATCCTTATTGTGAGATTCAATTCAGAATCTGGGATTTTGTTTTGTTTTGAGATGTAGTCTCTCTCTGT
+CACCAGGCTGGAGTACAGTGGCACAGTCTCTGCTCACTGCAATCTCTGCCTCCCAGGTCCAAGCGATTCTCCTGCCTCAG
+CCTCCCAAGTAGCTGGAACTACAGGCACGTGCCACCATACCCAGCTAATTTTTGTATGTTTTTTTAGTAGAGATGGGGTT
+TCACCATGTTGGCCAGGATGGTCCGGATCTGTTGACCTTGTGATTTGCCTGCCTCAGCCTCCCAAAGTGCTGGGATCACA
+GACGTGAGCCACTGCGCCCAGCCCAGAATCTGGGTCTTAACCAGATTTGCCATTATAAAATGAAATGAAACCAAAACCAC
+CAGTCAATATCTGTAGCAGTGTTATTGTTGGGGAGCATGCACGTTTGTTTGAGCTTACTTATATATACAGAGCTGTTTAG
+CTTAATCTTTAAATTGAAAATTTTTAGTATTTTTAGAATGTTTTAGATACTTTGATGAATAAATTTTGTGTTAACAAACT
+ACTATTAATAAAACTTATAAATTAAATATTTCTGAACACATTGGCAGAGGTAAATGATAAAGTTATATTCACACAATTAG
+TTTTTCTTGCACAGAATTGACATTTTGTCAAAAAGTTAAAATCTTTGAAATGTGTTTTCTGTCCTCATCTTTCCCCTTTT
+GAGCATTTTTCCCCCTGGGTTGGTTATAAAATATTCCCTAACTTCAGGCATCTGGTTAAGTTTTGGGTTTTCTCCCTTCC
+TACCAACTCATATGGATTGGTAATGATCTGATAGAAGACAATTGCCAAAAATCAATCAAATATGTTTTAAATGAAACTTC
+TCCAATATAATATTCATATTAATCCTTCTTTAATTGCTGAATGAGTAATAATTGTTCACTGACCCTTAAAATCCTAAAAC
+TTAATCAGCTCACTGGAGTGGATGAATGGATGTTTGTCTAGATGAGAATGCTTAAGGTCTACTTCAAGGAGTTCATAATA
+GTAAAAATGTCCTTTTCTTCCATCAGAAACTTCATCCTGGATCTTTTCATCCTTGTTTTTCTACTAGCTCTCCTTCTCCA
+AATACACTGACTTGGAACCCCAGTTTCAGTGGTGGACTGCGTGGATTTGATTCTAAGCTATGCTATAATGAGAAAGTAGC
+TCTAACCCTATTCTCATTTTCTTCACCTGTAATATAGAAATCATCATCAGACAATATACAGATAGGGGCCAGTTATTTCA
+CAGATAATAATAGGTGAAAATGGAGAGTATGTGGGTGTAGGCATGGAAAAGTGAGTTTAAATGGTTTCAGGAGTCTGTGG
+AAATTGTCTTTGGGTGATTCAATGTTCTCAGGGAAGCACACCCAAAAAGACATTGAGGATTTCTGGGGAAGTGTTAGGGA
+TTAGAGACAGGATAAGATACAAAACTAGGAAAATTAGAGAGTGAATAGATTGGAACATATGTAGTATGATTGATTGATAG
+GCGGCACTCAGATTCTCAGTTTCAAGTGGGATATTTTTCAATTTGATCCATATAGCTAGTTGCTCAAATATATGTGGGGA
+TTTGGTGGAAATTTGGATCTAACCAGGGTTATTTTCCTGGTTAGATTCAGCAAAGTGAAAGAGAAACAAGGAAGTTGAAG
+TTTCCTGGGAATGGTTGGCCACGGAATTTAGGCAGGAGAACAGGAAATAGAGTACATCAGTAGGTAAGGGTCATTATAAA
+TATGATAAAATAATAGTTTGGAGGTCTCAGAGAGATTGAAAAATTTTATGTCAGCGTATGAAAGGGAATGATTAAAGGAG
+GAGTTCAGAGAATGGGATGAATGAAATTGTGACTACAGAGGAGTTGCTATTATAAGTAACGACGATGCTTCACTCAGTTC
+CTCTGGAAGTGTGGTCCCTGTGCTAGCAGCAACAGCATCACCTGGGAACTTACTAGAACTGCATATTCTCAGGTTACACC
+CCCAGATCTTCCAAATATGAAATTCCAATAGCAGGTCTCAGCAATCTCTTATTGTAACACAACTGTAGGCAATTTTGAGG
+CACACAGTAGTTAGCACAACTGGTCTAGTGTATGTCTTATTTGGTCTTCCTCCCCGGTTCACAGTACAGAGCTCCTAAAA
+CTTGAAATTTCCTGATAGAGATGAGAGGACCATCCTTTACTAGTCATAAGTCCCTCTTAGCCATACCTGAGTTTATGCTA
+TTGACATGACATGAGTGGTGGTTGGAGACCCGTATGGCTTCAGGGTGGGTGCTTGACACCAGAAACATGAAGGCATAACT
+AGATGGTTAAAACTGTCAGCCCTCTCCTCCATCACCTCTGAGGTGCTGGGAGGGTGATACAATGATATACTTAGAGGTCA
+TATGGCAGCTTTGTGCCCCTTTTCACCATACCTGGCCCTATGTGTCTCTTCTATTTGACAGTTCCTGATTTGTATCAGAC
+AGTAACTGTAAGCAAAGTGCTTTCTTGGGTTCTGTGAGCCATCTTAGCAAATTATAGAACCTGGGGAAGGAATCCATGGA
+AGTGCTAATTTATAGCCAGTTGGTCAGGAGTATGGCAGGCCCACAACTCCTCTCTAATGGTGGGGGGGTGGCAGTCTTGT
+GGTCCTGAACCCTGAACTTGTGGGATCTGATGCTATCTCCAGATAGATAGTCTCAGGATTGAATTGAATTGTAGGACACC
+ATTTGGTGTCCTGAGAGTTGAAAAATTGATTGGGGTGAGACTAAACCAACAAATTTCATCTCAGAAGTTTGAGTAGAAAT
+AATTCTGTTGGAAAACATTATCGTTAGATCAAAGGTGTTAATAGAACTGAGAGATCAAAATGGCAAAGAGTCATCTATGA
+TTACACATTGTTGACATAAACTAGAATTAAGACAAGAGTATGTCAAAGAGAGTTACATTGAGCCAGGGATAAAAATAGTC
+AATTGATTGATTAGAATTTAGTAAATGCTATTTTTTGAAAAATTAATATTTAGTTAATTAATGGAGATACTTAATTATCT
+CCATTTTGACTTTAAATTTAAAACTAAAAAATTGATATTTTATCTATTTTCCATAAATGGAAATTGAAAAGCAATAGCAA
+AATGCTTCGACAGAAAATCTGCCCTTCTTATTCCCAAGAGGAAGAATGGCCAACTACCTTAAGAATAAGTTAGAAAAACT
+TTCATAATTTTTTAAGTTGCCATATTTTATAGCTTTGAACAAGTTGAATTATTAACTAGACTGCCCAGGGAATGGAGCCA
+TCTAATATAATTTCCTTACTATTTAGATAAAATCTTTGTTGTTGAAAATCTTAATCATTTTTAATGATCCTGTTTTCAGA
+GCCTGATATTAATGATTCGTGAAACTCATTAAATGCAGTTATGCATCTGATTTGAAAATCATGAGGGAGATTTTTGGTCA
+GATATGCATAGCAAATCTATGTCACTAGACATAAGTCCCTTCTACCCATGCCACAACCTTTATGTCTACTCTTGAATTGA
+GGTTTTGCTACGTTTTATTTTTGAGGTTTAAGAATTATTCATTTAACTGTCGGAAAATTGTGTTTCTCTTGTTAGAATAG
+AGCCCAATTTATCCTTTCTATTTTTCATGGAGACATGAAAGTATGTGAATTTGCAATCAGAGAAACCTGAGTTCCACCAT
+GCTTTCTTCCTCTTGCAAGCCATGAAATCCACAGTGGGTTAGTGGATATTACAAACCTCTGGTCCCACCTTATTAAGATG
+GAAAAAAATAAGACTAATTGAAAGAACAAATGTGGTAATGAGAAGAAATGGTATCAATAAAATTCCTGGAACACAGTAAG
+TTCTAACAAATGCTAGTTCTTTTCTTTCATAATTCAAAATCTAAATAGAAGCCAGTTGGTAATTCCAAGATAATAGTATG
+AAGACAAAAAAAATGTTTGTTAACTGACCTTCAACCATTGAGACCTTATCTTATAGGAAGCAAAATTAACAAATATTTTC
+TCCCTTCTACAAATGTATACCCTCTGAGAAAGAGAGACAGTGTTCCAATCAAGACGATGACAGTATTCTGTGCACACATA
+GGAAAGGAAGAAGAATCAGAGAGAACCTTTAAAAAGACAATTTTAGATAGGCATCCTATTTTTACCGGCAATAACATTTT
+TTAGAACTTACGGCATACAAAATAGACCTTAAATGGACTCAGTGTTGAGAAATTTTTATAAGGCTGCTTTATTTTCTTCT
+ATGGTAAATCTAGGAACCAATAAAATAGCTTTGATAATTCACTCATTACATTCTTTCCTAACTAAACTATTTACATTATA
+AACTTTGTTGATATGCTGTGCTTTTGCATTAATAATTCTCATGTGTCTCATTTGTGAATACATGAAAACTAGTTGTAAAA
+TAAGGAAAATGTAATTTTGAAACATCTTCAAAAACAGTTGCATAATTTAACAAATGTTTATAAATTTTCGTCCTTACTCT
+AAAACCATAGATATCCTAAAATTGACTGAAGGTAAAATGATTTTTAAAGAACTGGATTTCTCACCATGAGTTATTTTATC
+TCCATACAAACTGTGGTAGTGTTATACAATGGTTAAGAAGAAATTACTTAAAAATGAGAATAAAGATACAGCTTATTAAC
+ACTTCATTTTGTGGATGAGACCTGAAAAATTAAACTTTTCTTCTAGAGACAAGTTGTAGTTGTGTATATAGAGTGTGAAT
+AGATAGTGTCCATATTTTACTGTCATTATTTAAACAGAAACATGCAATTTGCACTTCAAAAACCTACTACAAATTAACTG
+TGTCATTTGGAAGTGTCAAAATTATCTCATTAATGATCAGATAGGCTTTTGGCTCATTGTAATGAGATGTTCCTATAAAC
+CTAATTTGAAAGACAAAGACAGAGTTTGGAGTCTTTAATTCTTGGTAAAAATCAGTAGCTCTATTTAGGGCTATATTTGC
+AAAATACTATTTCAACTCAAGCATATTTTTATTTACAAGGCACTTCACAAATGTTGACCACAAAGATGTTTCTACCTAGC
+AGAAATAATTATAGTTACATATAGTTGTCTAGGAATATCTTAAACATGGGGTATTAAAATAACTTTCAAGAACAAGAAGA
+AATGTTACACCAGGCATTTTTGCAACTAGTGCTCTATTTCAGTGCAATATTTGTGGCAATTAAAAAACATAAATGGCTTG
+GATATATAGCTAAGTTATCAAAGGAAAAATATGCAAAGGCATGATTATTTTATAAATGTGCCACATATGTGACTTGAGGT
+ACAAATCCTCTAAGTTTAAAATTGTCTTTTTAAAAAAGAAATCAAAACAGACAAGTGGGCAATGAGTTATGGATACTATT
+ATAGGGTACCCTGATAAACTAGTAAGAAGACGACTTTTGGTACTGAAAATTCCAATTAACCACATGCCCCCATTAAGGCC
+ATGATCACTGCCAATTTTTACTCACAAAGGCTGGAGCTAGAGTTATATGGAATTCCATATGTATCTAAACCATAGATTTA
+TCAATTATCACTTAAATACAACTTGGAAGACACTATCCAAAATTTGTAGGGCTTGGCCTTATTCTAGAGAAACATACACT
+GGAATAGGCAGTTGGATTTTAAATTCAGTCTTAAATCTGTTGAAAATAATGCATTTTTTTCTCAGAGATGAGATATTGTT
+CAACTCTTTTGACTTATTTTAGATTTGATAATATACAGTTCAAACACCATGGAGATTTACTAATCTGAATGAACACATAC
+CAAATTTGTCTTTGCTCCAGTCTCGAAATTTGAGATTAATTCTAGCTGCTGAAGAAATTTTCCAGTGTTAGGAGTATGCT
+TCAATGCTAGTGACCGGTACTACTTTCTGGTAAACTATAGATGACATGTGTGCTTTCATTGCCTAAACTCACTTTGTAGA
+ACACCATTAAATGAAAATCATCACCAGCAATTTGGGAGGCTAAGGCGGGCAGATCATTTGAGGTCAGGAGTTTGAGACGA
+GCCTGGCCAACATGATGAAACCCCGTCTCTACTGAAAATACAAAAACTTATCCAGGCATGGCGGGATGCACCTGGAATCC
+CAGCTACTTGGGAGGCTGAGGCAGGAAAATCGCCTGAACCCAGGATGTGAAGGTTGCAGTGAACAGAGATGTTGCCATTG
+CACTCCAGCCTGGGCAACAGAGTGAGACTTCATCTCCAAAAAAAGGAAAAAATGAGAGAGAGAGAGAAAAAAAATTATCA
+GTGTTCAGTGACTCTTTGTTTTTCATGCTGATAAACTATTACGCAGAAAAAGAAAATTTTCATGCTGATAAACTATTACA
+CAGAAAAAGAAATGTATCCATTATGGACAATTAAAAGGGCTATACAATCTCTCTCATTGTATGATTCTAGTTTTCTTTAT
+TTTTCTTTTCTTTTGTTCTTTTTTTTTTTTAATACAGGATCTCACTCTCTCACCCAGGCTGGAATGCAGTGGCTCTATCT
+TGACTCACTGAAACCTCAGCCTCCTGGGTTCAAGCTATTCTCCTGCCTTAGCCTCCTGAGTAGCTAGGAATAAAGGCATT
+CACCACCACGCCAGCTAATTTGTGTATTTTTTTAATACAGGTGGCGTTTTGCCATGTTGCCCAGGCTGGTCTTGAACTCC
+TGACCTCAAATGATCCTCCCACCTCGGCCTCCCTAAGTGTTGGCATTACAGGCATAAGCCACCATGCTTGGCCTGATTCT
+GTTTTTTAACACAATTCTTATTACTTCTGTTTCAATTTTACAAAGCCATAGGTTTTTAATCTGGGAAACCTTTGAGAAAA
+CATCCAGATAAATTTATAAATAGAAAAATAAATTCAGACACATATAATTAAGTTTATGATCTAACAAGTGGGTGACAACC
+GAAAACTTTCCAAGTTCCATAAACTGAAGTGAAAGGATCACTTTCCTGAAAATATTACAAAGGCTTGATAATTACCTGGA
+TTGTACTAAAACCTACACAATCTAGTATATTCTATAGCAGCAATTCATAAAGACAGCTTCACTTACCATGTAGAATTTAT
+ACATACATTAAATTTAAGGAATGCATTACTTGCTATTGGAGTAGGATAATAAACTCCCAGCAGAACTGATTTTCATGAAT
+GCCTATTAAACTATCATTGATGTAGAAATTAAACAATATATTAAGCTATTCAGACCCAGATTGTGAGAGACAGAGAAAAA
+CTTCAAAAAAGTTCAGCTACCAAATCCCTTCCTCAATAGCTCTTTGAGTCTCAAGACAGTTTCAACCACACACATCCTCA
+GAGTCTTATGGTCCAAACAACATTTGACTGCAGCTCTAAAGTAAGCAAACATTTGAAAAGAAACTCAAAGATCTATACCT
+GTAGTTATGGTCAAGGAAGTCCACCTACTGGTGGGTGAGCATGGCAAGGATAGGGACTAAGCCAAATGATGGGTTTACCA
+AAACAGAAACAAGAGAAGTCATTGGAAGCACAGTCCAGGTTATAACAGGAGCCCAGGCACTTGATGTGGGAACCCTATTT
+GTCTCATTCAGCACCTAGAGTTTTAACGTCCTATGAGGCACAGCTAATGCCCTTCATTATTTTTTTGTAATTGACTGCAT
+AGTTTAGAACTATGCCTTGAATTTAATATCTATCTGTTGAATAAATGAATTAATAATGATGAAAATCTGTCCACAGGTTA
+TGTGCAGTACTGAAGAGCAACTTCAGCTATTGAGGTATGTATCAGAAGTCTTACTCTCCAAAAAGCCTCTGGTTAGTTAC
+TGATATACGTTGCTGATAATTTCACTCTCACAATGTAGGGGAAGCAGGAATGTTATCTGGCCTCTTGGCTCCACTTCCTT
+TGCCTTCCACAGATCTCTCTGCTGGTTCACAGAGGAACACTTTCGTGGCTCTCAGTTCATTTTGCAATTCTGCTAGAATC
+TCATTCCATCCTCAGTTGTAACTACCTTGTCCTTCACCCAACGCTTATTTACGGAGGGACAGATAGCAAGAAGAAATCGC
+ACTCTGGATTACAAAGTGGGAAGACACACCCCTTGGCCAGAATGCCTGCCTCCTATGTTCATGAAGGATATTTCTAATTG
+ATCTCGGCACTCTTTTCCATTGAGCCCATGTCCCAAATTGGCCGTGAGTTAGGGAGAGGACTCTAGAGTGGGAAAGCTGT
+ACCCAAAACTTCCCTAATCTGACTATCAAGCCTAAAAAATTAAACTTTCAATTCAACATCCCTTGTTTCTCCAACTTGGA
+GTGCCTGATGGTGTGGGAAGCCCCTACTCATAAAAATTCATAGTTGAAGATTGTTGGTAATTTTGCCCAACCTCCTTCTT
+TTCTTTTTATTCTACTTTAAGTTCTAGGGTACATGTGCACAACGTGCAGGTTCATTACATATGTATACATGTGACATGTT
+GGTGTGTTGCACCCATTAACTCGTCATTTACATTAGGTATATCCCCTAATGCTTTTCCTCCCCTAGCCCGTCACCCCACA
+ACAGGCCCCGGTGTGTGATGTTCCCCTTCCTGTGTCCGTGTGTACTCATTGTTTAATTCCCACCTATGAGTGAGAACATG
+TGGTGTTTGTTTTTTTGTCCTTGTGATAGTTTGCTGAGAATGATGGTTTCCAGCTTCATCCTTGTCCCTACAAAGGACAT
+GAACTCATCATTTTTATGGCTGCATAGTATTCCATGGTGTATATGTGCCACATTTTCTTAATCCAGTCTATCATTGTTGG
+ACATTTGGGTTGGTTCCAAGTCTTTGCTATTGTCAGTAGTTCCTCAATAAACTCCTTCTTTCTTTGCCTAGTTACACCTC
+AATGCAACCTGCTGGCAGGCCAGCCTTAAGCCCAGCAGTCCAACCATGCATCTAAGCTGAGTTCTCATCAGAGCCAACTT
+CAATTATAAACAACTGAGATGATGCCAGAGGCCAGAATCCCTGAGAACCCAATTTGCTGTCATCACCACAGCTGAAACAA
+GGCGCTTGGCATCTAGAAAAGCTGATCTGTAGGAAATTTCCAATAGAGTTCAAGTTTTTCTAAGTTTAAAGTGTACAATA
+TCATCAAGGTGAGACACTCAAGATAAAACTAGGCTTGTGTTTAAATATAATAAAAATATTTGAAGAATAGTAAGAGTGGT
+GACTACTTTTACACAGGAATGTTTCAAATAAATTGTTTTTTTCTTGTGCCTTTTAAATTTTGTGATTTGTCAATGAGATG
+CTGACATGTTTTTAATGAAGCATGCCTTGTAAAATAATAAGTGAATGAAAACTACACAGTAATATTTACATCAACAAAGC
+TAGAAGAAGACTGTATGTCTCAAGAGGCTCCATCATCTACCCAGTGGCCACTTCACCAAATTAAGATCCAAATACCAAAA
+AGAAAAGTAATTAAAATTTGTGTTGTATGCCTTATAAGACCAAACCTATAAAATGAGGGGAAACTTCACAATTACTTGTA
+ATTTGCTCTGTGGAATTGTAACAGATGGATTATAAAGATGATTCCATGATAAACATGTAAATGTGGTTACTACCTTCTGT
+GCTTTCTTGGTTCATATTTTTATTGTTTTTTTCTTTATAAACAGATAATTCATTCTTATCTATGGCAATGCAAACTTTTC
+AGATGTATTGAATATGTCATAGTGTTTTTCTATTGTAGGCATCTCTAAAAAAGTTATAGAAAACAAACAGAGAAAAGTCT
+GGTGTTGGAAAGGAGAGAGAAATAAAGCTTGTAAAGGAATGAAGATTAGAGCTTTCTGGAGATAAAAATTCTTAGAAGAT
+ACAAAAGCTAGTTACTGAATTAGATGAAGGTTCTTTTTCAGTTTTTCCCAGTTGTTCACCTCTTAGAAGGCACATATTCA
+CAGCAGAAAAGAAAAATGTGTTAAAAAGAAAATACCTCGGTGTAGTGAGACTGTAATTTCATGAGGGCATTACTGAAGAC
+TCACTTTTTCCAGCATCACTCAGCATGGTATTATGCATGTGTGTAATGTAAAGGAAAGTGAATACGCATTCTTGGATGGG
+TGATCAAATCGTAATTCCAGAATATAATCAGAAGCTGATTCATTATCCTCTTGAGATTTCACAATCTTTTCATCAGGAAT
+TAAAATTCAGAAGAGGGTAAATGTATCTAATCAGTTGAAAGTTAAAAATTAGAGGATGGAGGCAACATGGCCGAATAGGA
+ACAGCTCCAGTCTACAGCTGCCAGCGTGAGTGATGCAGAAGACAGGTGATATCTGCATTTCCAACTGAGGTACCAGGTTC
+ATCTCACTGGGGAGTGCTGGACAGTGGGTGCAGCGCACCATGCATGAGCCAAAGCAGGGAAAGGCATCACCTCACCTGGG
+AAGCAGAATGGGTCAGAGAATTCCCTTTCCTAGTCAAAGAAAGGGGTGACAGATGGCACCTGGAAAATCAGGTCACTCCC
+ACCCTAATACTGCGCTTTTCCAACAGGCTTATCAAATGGCACACCAGGAGATTGTATCCAGCACCTGGCTTGGAGGATCC
+TACACCCACGGAGCCTCACTGTTTGCTAGCACAGCAGTCTGAGATCAAACTGCAAGGTTACAGTGGGGCTGGGGGAGGGG
+CTCCCGCCATTGCTCAGGCTTGAGTAAGTAAACAAAGCAGCCAGGAAGTTCGAACTGGGTGGAGCCTAACACAGCTCAAG
+GAGGCCAGCCTGCCTCTGTAGGCTCCACCTCTGGGGGCAGGACACAGACAAACAAAAGACAGCAATACCCTCTGCAGACT
+TAAATATCCCTGTCTGACAGCTTTGAAGAGAGTAGTGGTTCTCCCAGCATGCAGCTTGAGATCTGAGAATGGGCAGACAG
+CCTCCTCATGTGGGTTCCTGACCCCTGAGTAGCCTAACTGGGAGGCACCCCCCAGTAGAGGCAGACTGACATCTCACATG
+GCCGGGTACCCCTCTGAGTCAAAACTTCCAGAGGAACGATCAGGCAGCAGCATTTGCAGTTCACCAATATCCACTGTTCT
+GCAGCCACCGCTGCTGATACCCAGGCAAATAGTGTCTGGAGTAGACCTCCAGTAAACTCCAACAGACCTGCAGCTGAGGG
+CCCTGACTGTTAGAAGGAAAACTAACAAACAGAAAGGACATCCACACCAAAACCCCATCTGTATGTCACCATCATCAAGG
+ACCAAAGGAAGATAAAACCACAAAGATGGGAAAAAACAGAGTAGAAAACCTGGAAACTCTAAAAATCAGAGTGCCTCTCC
+TCCTGCAAAGGAATGCAGCTCCTCACCAGCAATGGAACAAAGCTGGACGGAGTATGACTTTGACGAGTTGAGAGAGAAAG
+GCTTCAGAAGATCAAAGTACTCCGAGCTAAAGGAGGAAGTTCGAACCAGTGGCAAAGAAGTTAAAAACTTTGAAAAAAAA
+TTAGACGAATGGATAACTAGAATAACAAATGCAGAGAAGTCCTTAAAGGACCTGATGGAGCTGAAAACCATGGCACAAGA
+ACTACATGACAAATGCATAAGCCTCAGTAACCGATGTGATCAACTGGAAGAAAGAGTATCAGCGATGGAAAACGAAATGA
+ATGAAATAAAGCATGAAGAGAAGTTTAGAGGAAAAGGAATAGAAGAAATGAACAAAGCCTCCAAGAAATATGGGACTATG
+TGAAAAGACCAAATGTATGTCTAATTGGTGTACCTGAAAGTGACGGGGAGAATGGAACCAAGTTGGAAAACACTCTGCAG
+GATATTATCCAGGAGAACTTCCCCAATCTAGTAAGGCAGGCCAACATACAAATTCAGGAAATACAGAGAATGCCACAAAG
+ATACTCCTCGAGAAGAACAACTCCAAGACACATAATTGTCAGATTCACCAAAGTTGAAATGAATGAAAAAATTTTAAAGG
+CTGCCAAGAGAAAGGTCAGGTTACCCACAAAAGGAAGCCCATCAGACAATCTGCTGATCTCTCCTCAGAAACTCTACAAG
+CCAGAAGAGAGTGGGGGCCAATATTCAACATTCTTAAAGAAAATAATTTTCAACCCAGAATTTCATATTCAGCTAAACTA
+GGCTTCATAAGTGAAGGAGAAATAATATACTCCACAGACAAGCAAATGCTGAGAGATTTTGTCACCACCAGGCCTGCCCT
+AAAAGAGCTCCTGAAGGAAGCACTAAACATGGAAAGGAACAACTGGTACCAGCCACTGCAAAAACATGTCAAATTGTAAA
+GACCATCAAGGCTAGGAAGAAACTGCATCAACTAACGAGGAAAATAACCAGCTAACATCATAATGACAGGATCAAATTCA
+CACATAACAATACTAAGCTTAAATGTAAATGGGTTAAATGCTCCAATTAAAAGGCACAGACTGGCAAATTGGATAAAGAG
+TCAAGACCCATCAGTGTGCTGTATTCAGGAAACCCTTCTCACCTGCAGAGACACACATAGGCTCAAAATAAAGGGATGGA
+GGAAGATCTACCAAGCAAATGGAAAACAAAAAAAGGCAGGGGTTGCAATCCTAGTCTCTGATAAAACAGACTTTAAACCA
+CCAAAGATCAAAAGAGACAAAGAAGGCCATTACATAATGGTAAAGGGATCAATTCAACAAGAACTAACTATCCTAAATAT
+ATATGCACACAATACAGAAGCACCCAGATTCATAAAGCAAGTCCTTAGTGATCTACAAAGTGACTTAGACGCCCACAAAA
+TAATAATGGGAGACTTTAACACCCCACTGTCAACATTAGACAGATCAACGAGACAGAAAATTAACAAGGATATACAGGAA
+TTGAACTCAGCTCTGCACCAAGCAGACCTAATAGACATCTACAGAACTCTCTATCCCAAATCAATGGAATATGTATTCTT
+TTCAGCACCACACCACACCTATTCCAAAATTGACCACATAGTTGGAAGTAAAGCACTCCTCAGAAAATGTAAAAGAACAG
+AAAGTATAACAAACTGTCTCTCAGACCACAGTGCAATCAAACTACAACTCAGGATTAAGAAACTCACTCGAAACCACTCA
+ACTACATGGAAACTGAACAACCTGCTCCTGAATGACTACTGGGTACATAAGGAAATGAAGGCAGAAATAAAGATGTTCTT
+TGAAACCAACAAGAACAAAGACACAACATACCGGAACCTCTGGGACACATTCAAAGCAGTGTGAAGAGGGAAATTTACAG
+CACTAAATGCCCACAAGAGAAAGCAGGAAAGAAATAAAATTGACATCTTAACATCACAATAAAAAGAACTAGAGAAGCAA
+GAGCAAACACATTCAAAAGCTAGCAGAAGGCAAGAAGTAACTAAGATCAGAGCAGAACTGAAGGAAATAGAGACACAAAA
+AACCCTTCAAAAAATCAATGAATACAGAAGCTGTTTTTTGAAAAGATCAACAAAATTGACAGACCACTAGCAAAACCAAT
+AAAGAAGAAAAGAGAGAAGAATCAAATAGATGCAATAAAAATTGACAAAGGGGATATCACCACCAACTCCACAGAAATAC
+AAACTACAATCAGAGAATACTATTAACACCTCTACACAAATAAACTAGAAAATCTAGAAGAAATGGATAAATTCCTTGAC
+ACATATACTCTCCCAAGACTAAACCAGGAAGAAGTTGAACCTCTGAATAGACAAATAACACTCTCTGAAATTGAGGCAAT
+AATTAATAGCTTACCAACCAAAAAAAGTCCAGGACCAGATGGATTCAAAGCCGAATTCTACCAGTGGTACAAGGAGGAAC
+TGGTACAATTCCTTCTGAAACTATTCCAATCAATAGAAAAAGAAGGAATCCTCCCTAACTCATTTTATGAGGCCAGCATC
+ATCCTGATACCAAAGCCTGGCAGAAACAAAACAAAAAAAGGAGAATTTTAAACCAATATCCTTGATGAACATTGATGCAA
+AAATCCTCAATAAAATACTGGCAAACCAAATCCAGCAGCACATCAAAAAGCTTATCCACCATGATCAAGCGGGTTTCATC
+CCTGGGATGCAAGGCTGGTTCAACATATGAAAATCGGTCAATGTAATCCAGCGTATAAACAGGACCAAAGACAAAAAACA
+CATGATTATCTCAATAGATGCAGAAAAGGCCTTTGACAAAATTCAGCAAACCTTCATGCTAAAAACTCTCAATAAATTAG
+GGATTGATAGGATGTATCTCAAAATAATAAGAGCTATCTATGACAAACCCACAGCCAATATCATACTGAATGGGCAAAAA
+CTGGAAGCATTCCCTTTGAAAACTGGCACAAGACAGGGATGCCCTCTCTCACCACTCCTATTCAACATAGTGTTGGAAGT
+TCTGGCCAGGGCAATCAGGAAGGAGAAGGAAATAAAGGGCATTCAATTAGGAAAAGAGGAAGTCAAATTGTCCCTGTTTG
+CAGATGACATGACTGTGTATGTGGAAAACCCCATCGTCTCAGCCCAAAATCTCCTTAAGCTAATAAGCAACTTCAGCAAT
+GTCTCAGGATACAAAATCAATGTGCAAAAATCACAAGCATTCTTATACACCAATAACAGACAAACAGGGAGCCAAATCAT
+GAGTGAACTCCCATTCACAATTGCTTCAAAGAAAATAAAATACCTAGGAATACCACTTACAAGGGAAGTGAAGGATCTCT
+TCAAGGAGAACTACAAACAACTGCTTAATGAAATAAAAAAGGATACAAACAAATGGAAGAACATGCCATGTTCATGGGTA
+GGAAGAATCAATATCGTGAAAATGGCCATACTGCCCAAGGTAATTTATAGATTCAATTCCATCTCCATCAAGCTACCAAT
+GACTTTCTTCACAGAATTGGAAAAAAACTACTTTAAAGTTCATATGGAACCAAAAAGAGCCCACATTGCCAAGTCAATCC
+TAAGCCAAAAGAACAAAGGTGGAGGCATCATGCTACCTGACTTCAAACTATACTACAAGGCTACAGTAACCAAAACAGCA
+TAGTACTAGTACCAAAACAGAGATATAGACCAATGGAACAGGACAGAGCCCTCAGAAATAATGCCACATATCTACAACTA
+TCTGATCTTTGACAAACCTGACAAAAACAAGAACTAGGGAAAGGATACCCTATTTAATAAATGGTGCTGGGAAAACTGGC
+TAGCCATATGTAGAAAGCTGAAACTGGATCCCTTCCTTACACCTTATACAAAAATTAATTCACGATGGATTAAAGACTTA
+CATGTTAGACCTAAAACCATAAAAACCCTAGAAGAAAAACCAGGCAATACCATTCAGGACATAGGCATTGGCAGAGACTT
+CATGTCTAAAACACCAAAAGCAATGGCAACAAAAGCCAAAATTGACAAATGGGATCTAATTAAACTAAGGAGGTTCTGCA
+CAGCAAAAGAAACCACCATCAGAGTGAACAGGCCACCTACAGTATGGGAGAAAAGTTTTGCGACCTCCTTATCTGACAAA
+GGGCTTATATCCAGAATCTACAATGAACTCAAACAAATTCACAAGAAAAAAACAAACAACCCCATCAAAATGTGGGCAAA
+GGAAATGAACAGACACTTCTCAAAAGAAGACATTTTGCAGCCAAAAGACACATGAAAAAATGCTCATCATCACTGGCCAT
+CAGAGAAATGCATATCAAAACCACAATGAGATACCATCTCACACCAGTTAGAATGGCCATCATTAAAAAGTCAGGAAACA
+ACAGGTGCTGGAGAGGATGTGGAGAAATAGGAACACTTTTACACTGTTGGTGGGACTGTAAACTAGTTCAACCATTGTGG
+AACTCGGTGTGGTGATTCCTCAGGGATCTAGAACTAGAAATACCATTTGACCCAGCCATCCCATTGCTGGGTATATACCC
+AAAGGATTAAAATCATGCTGCTATAAAGACACAAGCACATGTATGTTTATTGTGGCACTATTCACAATAGCAAAGACTTG
+GAGCCAACACAAATGTCCAACAATGATACATTGGATTAAGAAAACGTGGCACATATAAAACATGGAATACCATGCAGCCA
+TAAAAAATGTTGAGTTCATGTCCCTTGTAGGGACATGGATGAAGCTAGAAACCATCATTCTCAGCAAACTATTGCAAGGA
+AAAAAACCAAACACCACATGTTCTCACTCATAGGTGGGAATTGAACAATGAGAAAACATGGATGCAGGAAGGGGAACATC
+ACACACTGGGGACAGTTGTGGGGTGGGGGGAGGGGGGAAGGATAGCATTAGGAGATATACCTAATGTTAAATGACGAGTT
+AATGGGTACAGCACACCAACATGGCCCATGTATACATATCTAACAAACCTGCACGTTGTGCACATGTACCCTAAAACTTA
+AAATATAATAATAAAAAAAATTAACAGGAGACTTTTATACAGATAAATTTGCTTTGCATCATGATCAGCACTAAAGAGCA
+TCATAGATTTAAACCATATTTTCACAGTTAAAACAATGTAACCAAATATGCAATATATTATGTAACATTCTACAAGATGT
+TTTCAAACATGCAAACTGACCTTTAAAACAATTTTGCATAAAATAACATTTCTAGGTAAGAGAAAATACAGACATGTGGT
+TAGGGCAAAATATTTCTGTTCAAATTACAAAATACTGAATGACAAATCACAGGATAGAAAAGAATTATATTGTACCTGGA
+ATTTGAAGTCTTCAAATTTATTTAGATATACGTATCTATATGTATTAATAGAGACATATTTAACTGATTATAAGTTTAGA
+ACATTTATATTTTAAAGATGTCATTCTTTAAAAATTATTAATATCTACAGGATTGATACAGATATTTTAATTTTAAACAA
+ATATTTTTCACTAATGTGACCAGTATTTATGTCCTATTTCATTAAACAGACATTTCAAGATTTTTTAGAGTTAATTTTTA
+AAAATTAAGTATCACAAATATTTCTGAAGTACTACCTTAAAAAATAAAACTCTTTCCTAGCTTTTCATATAGTTAAACAT
+TTTCATAATGACTAAAAAGATAAAAATAAATTAGCAACCATAATTAAATTAGAATTCTACTTTTTATATCAGATGAAATA
+CTACATAAAATGTCCAGAAAATCTAGTGTTAATGAGGCTGTGCAGCCTGGACATTTGATGTCTCATCTGTGGGAATGTGA
+ACTAACCCAACCATTCTGAATAATTATTGACTCTGTGTGTTAACAGTCTAGAGTCCTGAAACAAGGATCTTAATTCTAAA
+ATAAAATCCTATTCTAAGAATATACCAAGGGTTGATAATCAAGGAAGTTTCAAAGTTATAGAGAAGAACCTTCATCCTAG
+CATTATATATGTAATCATCCATATAAACCACATAGTGTTGAATGAAATTACAGAAAAACAAAGAGCCTACAGAATATGTT
+TCTCTTTGTGTACAACAGTAAATGTACACATCTATCCATGTAAAATTTTACATCTCAGGAAGATAGGTACCAACATGTTA
+ACATGCTATATCTATAAGATAAGAAATTTTACTTTCCTACAATTTCATACACACACACACTCTCAAACATACATACCCAC
+ACACCGAGGATATATACACACACAAATATATATATAAACATACATGTTTATATAGGTACACACACACATACATATACACA
+GATGGTCAACAAATTATAATGGTTCAATTTATGATTTTTTTTACTTTATGATGAAGCAAAAGTGATAGCTTTGAATAGAA
+TATAACCATTCTGCTTTTCACTTTCAGTACAATATTCAACAAATTACATAAAATATCCAAAACTTTATTATAAAGTGGGT
+TTTGTATTCAATGATTTTGCCCAATTGTCAGCTGATGTAAGTGTTCTGAGCACATTAAAGGTATGCCAGGCTAAGCTGTG
+ATATTCAGTAAGTTAGGTATTAAATACATTTTTAACTTACTGTATTTTCAATTTATAATCAGCTTATCAAGACTTAACCC
+CATTGTACATTGAAAAGCATTTGTATTGCATTAAACATGTTATATATATTTTTAATTGAAATTACTGGCAACTTTTGAAA
+AAATGTAATGAAATAATGTGCTTTGCTATTGGTTAAAGTGTTTTTGGGCAGGGTCAGTGGCTTACACCTGTAATCCCAGC
+ACTTTGGTAGGCTGAGGTGGGCAGATCGCTTGAGGTCAGGAGTTTGAGACCAGCCTGGCAAACAGGGTGAAACCCCATCT
+CTACTAAAAATACAAAAATCATCTGAGTGTGGTGGCATGTGCCTGTATTCCCAGCTACTAGGGAGGCTGAGGTGTGAGAA
+TTGCTTGAACTAGGCAAGCAGAGGTTGCAGTGAGCCAAGATCAAGCCACTGCACTCCAGCCTGGGCAACTGAGTAAGATT
+CTGTCAAAAAAAAAAAAAAGAAGGAAAGAAAAGAAAAAAAGTTTTCAGCTTCAAGTAACAGAATAGTCAACTAATTAAAG
+AAAAAAACAAGGAGCAACTTTTACCAAATCGCACAACAAATAGATGGTATCAATGCTTACTTAATTCAACATGATCCACT
+TTGTGATTCTCTTATCTTTCTACCTATGTTTTCAAGATGGCTATGGCATTCCCAGCCTTACAACTTCATGTGACAAAATC
+CAATGGCAGGAAATAATAAGTGACGGCATTTTGGAACTTTAAGGAAAAACTTTAAGGATTGGAAAACAGGGATGCCATAT
+GTCCAGTAACATGGGATAGCCGTTCCTAATTAAGGATTGCTGTATGTCCTGAATGACTTTCTAATGTAATGAAGGTGAAT
+AACCTGCTTTGTTTAATTATACAATCCCAAATGTAACTGCCTAAGTGCCAAGTACTTTTGCATGGTTTTAATATGCCATG
+AATTTTGCAAGAATGAAACTACTATGTATATGAGAAAAGATGGTAGATTTTTTCATTTAGATACTCATTTCAGAGTCTCA
+ATAGGAAAAACAGAACACACTCAAACTAAGATAATTTAACATGGGTTTATTTCCAAAGAGACTACTTACGAAAATGTCAG
+GGGAGAAGGGTGAAGGAATCACAGAAATCACACCCCACAATAACTCTGGGTTAGTAGCAGCAGGCAGAGTTCTGGGGAGT
+AAACTTCCCTGATCACGCAAGGAAACACTAATTTTCTTATGAAAAAGATTGGTCAAGGAGACCTAGCAGAGAGGGAGTTT
+AGGCAATAACCCCCAATCCCATTTCCTCCTTCCCTTTAATCTCATGAACATGCATAGAAGGCAATGAGCAAGGGAACAGA
+TTGAGATATTCAATATGGACCATTCCACTTGGACAAAAGCCAGTAAATAGGGGAGTATAACAGGGGACAAGGGCCAAACA
+GAAAGTATCTATTACATTTACCAATGCAGATCAATCATGTCAAAAACAGCAATGGCAGTTTTGGCATTTGGTTATTCAAT
+GAAACTATATTTGTAGTTTTTTATTTACAATGTTTCAAGGTATGGGCGAAGATATGAAAATATCATTATGTTCTATTTTA
+TTTGATATGTAAATGTTCTGGCACAATTATATACATTTTGGATCATATATTGACAGTAAATTATTTCTCCTTTGTTAGCT
+TCAGCATAACATTAATATTTTCAACAACTTGAATGCATACATTGTGTTCTCACTAATAGTTTAGAATAGTGAAGGGGTCT
+TTGAAGAATTTCTTATGAAGTAGAGGCATTGGGTCAGACAGAGACGAAAACCATTGGCTTAGATTAATCACTATTTATCC
+TAGGGCTGAAGAGGGCCAATTGAAGACACAAATGTCCAATCCCTTTATGACCTTGGAGTTCTATGAACCAGAGAAATAGA
+GACTGGTTCTTAAGGAGGTAATAAGTGTGCCTTCTATGGTTTATAATAATATGCATTGTCTTAACAATCGAATACTTAAA
+AGTTAATGCCCTCATAATATGCAAAATTTTTAATAAGCACATTGGTCCCTGTGTTCAAGTAGTTGGTAATCAAATTAGCG
+AAGTATGGTAAGTCTTTTGAAGAGCATTAAACAGCAGAGAATGTAAGAAATTTGTCTCTATAATAGTGTGGGCCTTCTGT
+GTTTCCTAAGGGCACATTCATATGATCTAATAACTGAGCAGAGAGAAGCCTAAAGGATAGTTTTTGGTTTCATTTGTTTG
+TTTTTCCAATTAACATACATTTCTAGATTAAAGGTCAACTATCTTTTCCTTGGGAGTCAACTATAGGAGAAGATAGTGAA
+GAGGCAAGGAAAATTACAGCAGCTCAACAGCTTTACCCTGGAAATATCTGTATTGTGTACCGCTATAGCCTACCTTTATC
+AGTCAGATAACAATGTCAAAGAGGCAAGCTTGGCACCATTGCTCTTAGTGAATCCCTGCTGCCAGCCTTTTATCCTAAGT
+GCTCTCCAAAGACCCATTTAATGGACCTTCCTGAAATTGTGCTCAGGATCAATATGAGGCTCATTGATACAGCATTTTTG
+GAACTCACCATTTGCTTATTTATTAAACCTGGGATAGTTGCCCGACTTCTATCATCTGAAATCCTCTCCATGACTTTCAA
+AGTTACCAACATTGGTTCCAACATCATTCCTAGAAAACATTTCAGTGGAGTATATTTATCTGACATTTGACTGTATTTAT
+AGCAGTAAGAATCTATGATGATTCCTCCCAAAATAATCCCACAATTACACATTTTGGGTCACCTTGAGGGTCTTATCCTT
+AATATCAGAGATCTGTTCTATACATTTGAGTCTTAGAAATAATTACTAGCTTCTCTCTGAATTTAACTGAGGTTCCTTAC
+TTTTATTCAGCCTGTTACCTAAATTACATAATGTATTGAAGGCTATAAACTGTCCAAATTTCTGATTACATTGAGTTAAA
+AATGGGAATATGACTTTCTTTTTCTAACTTGGCAAAGATTATGTTTTTTTTCATGAAGAAATATAGGCATATTTAAACAC
+TCAGGATCTGAGAATTTGAAATACATGAAACTTAGCAAGTGGTCTTTAAGTAGTTTTCATTTCTAAATTATAGTACATAC
+CATTGAATGCTACAGAAGGAAAGACAATGAGGGATTACTCATAAACATTGCCTTTATGATATCCTTCCAAAGAAAACCAA
+GTGTCCCTGTGCTTGGACTCATAAAGCAGAAATGCTTACCTTGCTTTTATGTCACTAACAAACAATTGTGCCTAGTCATT
+TAATTCTAACGCATTTAAGCCATCTAGAGAGTGGAGCCCCTAGGGAAAACTACAAATGGAAAATTCATTGATTTTTGATT
+TTGCTTTCCTATGGATTAAATTTAACACTTTCTTGTAAATCTCTCCCTTTACAGATCATATGTGATAAATAGTTATCCAA
+CTTTCTAGATGTCCACCCAGAGCTGCTTCCATATGAAGAGGTGGAAACGGATTCTGTGCATGATACTGGTTAGGATCGGG
+AACTGTTTTTTTTTAAAGGAGATTTTTAAAATTTGTTTTCCTTGTACACATAGCCTTAATATTGTGTTGCTAACTGCCCT
+CTGGGAACAAAGTCATACCAATCACAAATACATTGCCTAAAGTGGGATGGTTATTACTCTGCTGTGAATTGCACCTCCTA
+TTATGTTTTAAAGATGCTACAAACTTGAGACATTACTATGTAGATGACATTCCCAGCAAATCCCCAAACAACACTTAATG
+AGATTTCAAATACCAGGATCATGAAACAGTAAGAGATATAAGGAAATAAAGAAAAACACAGTATATGTGTAGAAGGCATT
+AAAACACAAATTTTATAATTACGAATGCATATAATAAGGAACAAGTTTTATTCTAAATAGCAATTATATCAGAACTACGA
+TACAGGGAATAAAAGTGAATATTAAGCCATTGTACCAACAATAACAGAGCCCCTTTTCTGCTGCGTGATGGAATTTAGTG
+TTTACTAAGGAAGAAAAAAATGCAAGATATCTTAAATTTGGTACTTTCCAACAAATTTAAGAAATGTGTACACACTTGAG
+GAGACTTAAGAAGTCTCTGACTAAAACACTAGAAAAGAAAAGCAGTGAAAAACCAAATAAGTATTACTCAACAATAACAA
+CATTTGTTACTATTATATTAATATTAAACACACTATTAAGCAACAACATTTCAGTTACTCACTGAGCCCAAAGTCATTTT
+TCATGAACCATTCACAACACTGATAACCTATTGAAAAATGATTGAAAACAAAAAAGCTTATTTCTTTCCAAGTCCTTTTA
+AAGCCTTCTCTTGTGTCATTTCATATAATCCTTATAACAAATCTATGGAGTATATACTGACTTTATCTCTGTACAGAAAA
+GAAAACTGAATCACATTCAGAATAAGTTGCAAAGATCCCCAGATAATCAGTGTCATAGGTGAAATATGATGTCACAGTGT
+ACTCAAGAGTAATTTAGGGAAGCAGTACAAATGATGGTTGAAAGCACAGGTTCTGGGATAAATAGCCTGAAGTCAAAATC
+TAGTTCATATCCTCAGTGGCTGTGTGATCTTGGGAAATTTCCCTTAACTCTCTGTGTCTTGATTTTATAGTTTCATCCAT
+TTTTGCCTCTCATATGATCTGGTGAGCGTTACACTAGTTTATGCTTCTAGGACACTGAAAACAGTGCCTGGCTCTTGATA
+AATATTAACAATTTTGTTTCATTTTCTTCACCTTGAGTATTTCATGGCCAGATGGAAATCAATGTCAATATGAAAAAATG
+TTCATAATTCAGCAGGAAGAAATACAAAACCATATAAACAACTTTGTTTTATTGGCAAGTGTATTTGGAAGCTATGTTAA
+ATTGAGAAGGGGAAGGGAAAGCCTCTAAGGATATCGATTAATCAATGACTGAAAGCTGATGAGACTTGTCTGGAATTTTT
+TTTTGGAGACTCTGAGACTTAAAATTTTGACAAAATAAGGAGATGAATTGAGTAGTGTGGAAGAGACATTTTCTGAAGTC
+AGCAGAGAAAGGAGAGACAAATATGGTAAGGGGAAAATGGTATGAAGTAATAATATGAATGAAGTGCTTTCTAAAGTATA
+ACACATGTAAGTTGTGTTAAATGTAAACTATCATGATTATATCATTACTGTCTGACTGATACAACTATATCCACCTCAGT
+TAGCCACATAAGATCAATTTTCCTATATCTTAGAGAATACCAGTAACCTCCAGTTCAGAAACAGCATAAATATCTATCCA
+CAGCTGAGCAGACACATACAGCTAGCTACCAGATGAAATGGGCTATATCTTCAAATACTATTGTTTTCCATTTTTTAACT
+GTGGCAAAACACACTCAACAATTTGCATCTTAACCACTGTTAAGTATACAATTCAGTGTCATTAAATGTATTCACAATGT
+TGTGCAAGCATCACCACCACCCATCTCCATAATTCTTTTTGTCTTGTAAAATTGAAATCCTGTATCCATTAAAAAGTGAC
+TCCCCATTCCCCGCCCTCCCAGGGCAACTACAGCTTTACTTTCTGTCTTGATGAATCATACAATATCAAATGGAATTTTT
+AAGATATTTAAAATCTTTTGTCTGGAAGCGAAGTGGAAGTTAATATCAATGGCTATGCCTTCAGTGAGTTCTCAGACCAC
+ATTGCAATGTAGTTAACTATTTATCTTTTATTACGTTCTAGACCTATTATTTCTTTCTGGCACAAAAATATCTTCTTTGT
+AGATTATGGCATTTACCTCTATCTCCACACCTTTCTGCGTCTGTCATCCACAGATCATCAGCCTCTTGCTTAGTCTCCAT
+TTAATGACAATTTCTATTTAGTATACAAGAGTCCTTCCCCCCGCAATGCAATTTCTTCACTCTGAAAAGCTCTGACTCTG
+GCTACTGGTCTCTGTTATGTACTCATCCCTAGAGCCATGTCTTCAAATATTTCTCAGAACTTCTTCACATCTAAAAATCT
+AGGATTAATGTATTGCTCTCTGATTATATTCTTCTGGTATTTCAAGTATCCTGTATAACTTCTTAATCTGCCCTTGCTTT
+TCTTCTACATCAAGATGTCAAGTTCATGGACTTCCCAATTAATCATTTTAAAAATTTATTTCAAGCCTTTATGAGTCACT
+ACAATGTACATATGTTCTCAGGAATTATGACCCTACAATTTGTATATGAGAATAGTTCCTGTGTCAGGCATTATGGACAT
+AAGAGTAAATAAATTCTAGCCCCTGCCTACAAGGAGCTAATTCCCAGCCTAAGACAGATTGTAAGCCAATAATATGCAGA
+GTTGTAAGTCTAAGGAAACTGATTGGGATGTACCTAATTCGAGCTGGGAATATCAGAAGAAAAACTTTTTGAAAGTCATG
+AGACATGAGCTGTGCCTAAAAGAATAAAAATAGAGTGAGAAATAAGAATAAATAAATTAGGGAATGAGATGGGTATAATA
+GTAGAAAAAGGAAATCTCTTTAGATAAAACAGGGTATGAGAGTATATAGGAGGCTTATTAACTCAGTGTGGTTGGAGAAA
+ACAGGTTGAAAGGAGAAATAGGGGAGAAAGTTACAAACGGAATCAAATGTGTTAGTAGTTGAGGGAAAAGCAAATATTGA
+ATACGACCCATAGATCTTGGATAAACCAGTGTTTGGTAGTAGCTTACATAAAAGGTTGTGACAAAATAACCAGAGATATG
+TGGAGAAAATAATAACTGAGTGTTATATTTCCTCTCAGTAGAAACCTAAAAAAAAAAATGTAATCACATACTGATCTTTT
+AACTGACCTCCACTTATCTTGCTGCCTGGATTGGTATATATAGTCCTCTGCCTGAAAAATTGCTCAACACTTTTGAAAGG
+TTGTTTCTCACTGTTACTCCCAGGCATTTCTGCTAACACCAGCTGAGTTGTAAGGGGCCCTCCAGGAGGCCTAGGTTGTT
+CTCCTTATCACATATATTATTTTCATTTATCAATTTACCCAGAACATCTAATGAAATATTATGGACACCAATAACATGAA
+TACAAATACAGCTATAGTCCCTAAAAAGTAAATTGATGTCTCTTGAAAGACTGTCATGTCTTAGATGCTAAATTTGGAGT
+ATGTGTTTCTAATAAATAAATAGCTTGCTTTAATAATTCTGATTTATGAATGTAATATAGGGGAGCAGCTTTTATTTCAT
+CCTTAATCTTAAAAAACAATTAGCCGGGTGTGGTGGCGGGCACCTGTAGTCCCAGCTACTCAGGAGGCTGAAGCAGAAGG
+ATGGTGTGCACCTGGGAGGCGGAGCTTGCTGTAAGCCAAAATTGTGCCACTGCACTCCAGCCTGAGTGACAGTGTGAGAC
+TCTGTCTCAAAAAAAAAAAAAGATCATACAAGGAGTAAATGATGATTTAGCATAAAACACAACTTGAGCCTAAAACACAG
+ATCATAGGGCTCAAACTTTGTTATTTTATGGGATTATACTAACCAGTTAATATTCAAAATTTACCATTTCAAAAGAAGAA
+ATATTGGTCTGGGAAAATTGCCATTGCTTATACAGAAGAATGGAGGAATCTTTTTGAGAACAACAATTCTAATGATCTTC
+TTCATCACTGGTTCCAAAACCAAGCCCATGGAAGGCCTGCATAGGGATAATGTGGAGAACATGCAAAAGAAATACACATC
+ACCAGCTCCTCAACTCAGAGCTCCAAGGGAATGGCCAAGGAAGCTGTATTGTCCTAAGAAATTTGAGACTGTTTTGACTT
+ACAATAAATCCTGCTCAATCATATGTATGTATACACACACACACACACACACACACACACACACACACACACACACACAG
+AGAGAGACGGTGTTGTCAGTGAAGCCTCTAACATTGCATCTTGACTGGACAGATTTGACAATTAAAATGGCTGGAAGAAA
+ATAGGTTTCATAAGTGTCACTTACTATTCACATTACACATTAAAAGGACATCTCCTAAATTAAATCTTGTCTATGATAAA
+AAGGCAGAAAATCATGACCCAAGGCTCATTTATAACTGCAGAAATGTTCAAGATAGACCAATGCTTGCTATGTTTAGGAA
+GTGACAAGAAAGTACAGGTCTTTATAATCTACCATTTTGCCCCTTTTGATACCAGGTGACCTTCATAATGCAGGCTATAG
+AGAATTACAAGATTTGTCAGAGATTTCAGGTCTTTGGAAAGCTATGCTCTTTCACAAGAACAGCCATGGAAACAGGAAGA
+AAGTCAATTCCCACTTAAGGTAATCAGAATTTACAAATATTCCTTTCTTCATTGAAATCTCAATAGGCACTTCGATATTA
+TATTTAATTAATTCTTTTGTTACAATCATGCATCACATAACAACATTTTGGTCAAGGATAGTGCACTTGTATGACAGTGG
+TGCAATAAAATCATAACACTGTATTTGTACTGTACCTCTTGTATGTTTAGACATGTATAGATACACTTACTGTATAGATA
+TAAGTAAGTATAGATATACTTGACTGTTGTGTTACAATTGCCTACAGTATTCAGTACAGTAATATGCTGCACAGGTTTGT
+AGCCTAGGAGGAATAGGCTATACCACCTAACCTAGGTATGTAATCAGCTATAGCATCTAGGTTTGTGTAAGTAAACTCTG
+TGATGTTCATACAATAATAAAATTGTCCAGCAATGCATTTCTCAGACACACCCCTGTCATTAAGTGACACATGACTGTAT
+TAGTTAACACTATGGGAACTTCATGGGAAATGGAAACTCTAGCTCCACCATAAATAGAAACTGTTTATGTCAAAAAACAC
+ATAAAAGCATAGTTGCAAGATTACAATTCCTTCCAGATATTTCATTTAGTACCTCCACAGATCATTAACTTCCTGAATAG
+CATTCTTGTGTTTCCACTGCCTCACATTTGATTTCAGGTCATTCCATCAATTGTCAAAGATGCATCCAACATTAACTTTC
+CAACTACTGGTTTGGATTGTGTCAACATTGTCATTCTGGGCCACAACAACACAAAAGGCTAACTCAGTGAGGTAACATAG
+ACACTGATCAGTGAGATTTTGCTGATGGGTGGTACCATTTTTTACCAAGATCAAGGGTATTCATACCTTCAGGCATCTAG
+ATAACATTGAAAAGGGCAGGTTCATAGATAAAAAGGGAAAGAGGAAGGGTTTTTGCAGTATATATGAGCTACCATGTCTC
+CTGTTTAACCTGAGTCTGAAGAGATTCAAAGTGCAGGTGCCATGAATGCCTCTTTAATAGCTCCATGTCGATTGATTTTC
+TGGGCTACACTTCACAGCATTCAAATTGCTAAATGCATATAAACTAGTAGTGGATATTTATTACCTGGATTTGTGCTTCT
+GAGTGACCTTATCTATAGTTATGTGCTTCAGTGATCCAACTTGGGGTGCACTCCAAACTTTGATAAATTAAAGACCCTGC
+ATATTCCTTGAGTAATGCTACACTGACAACTGATTTAGTGAATGCCACTGTGAAAATAATAAACCTCTTGGTGAATATAT
+TTAGACATGTCTTGGAACCTGTACACATCTTAACCAGGGATGTTACTTGGTCTTTTTATGTTCAGGAGTGTCTGGAGATA
+GAAAAACATACAGGATTGTAATTGTTGCCTGTGAGACTAACAGAAAATTCAAGCTAGTGAAGTATGTTTATGCATGGCAC
+ACTTCTCACCAGCAATTTTAGGAAGTTCTACCCAAAACCCATATGTCTTTTGATATGTGGGTTACCTTGGAGAGAGAAAA
+AGAAACTCTGAACACAAGCATGAAATACTTTCGAATTTTATCTAGAGTTGGACTTGTCCAAAAGCTTTTGTATAAACTTG
+CTTGATGAGGAACTGTTGTCCACTGGTACCTTCACCGTACAATCCACACACAACCAAACTTCACAACTGGACACCTAAAT
+TTGCACTTTGTTTGGTAAAGTTGAATGAAAAATGTTGTCTTTTAAGCTTATGTTCTCTTATGAAAGCTGTTATTAGATTA
+ATTTTACTCCAAGATATGCCAGTTAGGAAATAACTTCCCATTTAATTTCACCAGCTCCCTGTGGGTGGACTAATTACTCA
+TACATCACACTTACTCTGGTGGTGAAATGCCTCAAAAGAAGGTCTCCAGAGTGTTGCTTTACCTAGAAGGACAAGAATGA
+AGAGACATAAAAATCATTCATTAAGAAGGAAGAAGATTTTCCTTGCCTAGATTTTCACTTGTAGGATCCAATTAATTTTA
+TCACTTAATATTTTGTCCTGGTGGGCATTCACTTTACAAATTTGTAAGGTCAGTAGAAATGGAAACTTTTGGAAATCAAA
+CTGTAAATGTTCTGTAGACGTTGACAGAAATAGCTATTTGGAGCTGCGCTTTTACCAACTAGACACTCTGAGAAAATCTG
+TGAGAGGATATTTTCAGGAAGTCATTGAACCCCCTAAAAATCAAGTTTATTGCTTTTACCATAAATATAAATAATTTATT
+TATTATCAACCACTTTCAATATTCGAATAAGTAACTTTATTGTTTATTTCATTGACATGATCTACATAGACATGCAATTT
+ATCTTCTTTTGCGATACATTCAGAAAATTTGCTGCCTGAAAAAGCCATGTTTTGAAATTACCACCTGTTTGATGATTGTT
+TTTTTATACTTTAAGTTTTAGGGCACATGTGCACAACGTGCAGTTTAGTTACATATGTATACATGTGCCATGTTGGTGTG
+CTGCACCCATTAACTCGTCATTTAGCATTAGGTATATCTCCTAATGCTATCCCTACCCCCTCCCCCCACCCCGCAACAGG
+CCCTGGTGTGTGATGTTCCCCTTCCTGTGTGCATGTGTTCTCATTGTTCAATTCCCACCTATGAGTGAGAACATGCAGTG
+TTTCATTTCCACTTAAGTATCTATCTCTATTTGTCCTTTATTTTCACTTCCATCCTAGCTCTATCCCTTGTCTCCTTCGT
+GTGTCTCAGGATAAGTGCCCTATAACATTTTATACTTCTGGAAACCCTTCAATATAATGTTATTACATGTAAAAATAAAA
+AGTCTGTTTTAGCGAACTCTTTCTTCACAAATTATCTTTTCCTAGTTCCCTATTAGACAATTTTTGTAGGCTCCATATGC
+TATTAATTTATATTCACTTTTAGAAGAATTTTCATCAAAAGCAAAAGTCTTATGTAAATTGTTTATATGCTATTTGTACT
+TCTGTGTATATATACACGCATAGCCACATACATATATGTAACTCCAGAGGCCTGCCTGGACTGTTAAATAACTGTATTGA
+ACAAGAGCAAGAAAAAGCTGTTCAAATTTCAAACAACAAGTTGAACTGAAGAGCTCACATCTTTGAATGAAATAGACACT
+AGTAGCCTTTAAAAATTAAACATTTCTACTGTTTTAAAAAATTATAGGCATCATAGTATGAGGAAAAAATGTAATAATCC
+TTTTTAATTCTTGCCATTTTATTAACCACGGACTCCTTATGCCCTGACTTCATTGGTACCCCTTTGAGATTAAACAGAAC
+AATTAATTTAAAATTCTGATAAATAAATAAATTGCACCATTTACTTGAGGTACTCTTTCACCTTTCAAGAAATATTTTGA
+GAATTCATTAGCTAAATATTGCTATAATGCAGGCTAGTAATAAATAGAAATAATGATTTCATTGTAAAGTTTAAACTAAT
+GATGGTATGATCTGGAATTTTTGTTAACCTAGAAGACATAATTAGCTCTACAGGTGAAATGAAAGACAAAATCACTTCAC
+ATATTAGTAAAAAACAAATAATATAACTTTATAAGCTACTGCTGTTCTTTTGCTGTTAAGAACCAATGACATCTGTAATT
+AGGGAAGATTATGGAAAGGTTAAAGGTGAGAATACATGCAGTCTATATACAGGAAACCCAAACTGTTTAGGTTAAGGAAA
+TGTCTGCAAAAGTCATATTAATTAGCAGGTTCACAGGAAACTAACATTTATTGTATAAATTCCATGGTCAAATGTATTAG
+GGCTCCACGCACACAACTGTGCGCCATCTATGTTACCACACAGTGCTTACTTCTTTCAGTCTTTATTCATTTCTCTCCTA
+ATGGTCTGTATGGTATTATTACATAGTTGTAAAAACCTTCCATGAGGCAGAAATTTCATTTAATGATACAGCTTCAAAAA
+TAATGAAAACCAGTAGAAACTTTCTATTTTTTTATGAAGCAGAATTATTGAATTTTCAGTTTCATTTATTTAATGTTACA
+AGAGGACAAACATATTTCTTTAATTTCTTCTTGTTATTTTTGTCTAACATCAACCAAAGCCATTTCTTTCATTTGAAGGT
+GAAAACTGCCCATCAAAATCCAGTTTAAGCTGAGAAACAAAGAACTAAATTTTTCATGAGACATTATACACTGCATAAAT
+ATAAAACAGGACCAGAAACAATGGTAGATTAGCAATTTTTAAAAAGACAAAAGCAGGTTATCATCAAATCAATTGTTACC
+AACACCCCAATTCCACCTCTAGGCCAGAAAATTAGCCACTTACCCACGGGAAAACAATGATTGAAGACAAAGTTCACCAG
+TGAAAGTTGGTTATTTTTTTCCATCCTTGCATCAACTAAAATGATATCTACAGTTCCTCTTAACACTTTTCTTACTTTCA
+AATATGTGGGTATGCTTCATTAGCTGAGGCCATGTTCCATATGTACATCTGGCTACAAGGGAAACTGGGAATGTGAGCCC
+TGATCCTAATGTTTGTAGAGTATTTAGAAGTTTGAGAATTCCCTCCAAATAAAGAGTTTTCTAAAACATTGTTAGACAAA
+AAGTTTGAATTACAAAATATGGTAGTAGCTCAGGTCTAAACCAAATGCAGGGAAAAAACTATTAACTCTAAAAAAATTAA
+ATACACATTTGATGGTAATGAGAAACAGAAAACAGGCAGAAATAGTAGAATACTCCACTAATGATGAAGTATTTGACCAT
+TGAATAAAGAAAATTGCAATGATTTAAATTTGCATCAAGGCAACTTCACACCTGATGACGCTTCCCAGTCTGTGCAAAAT
+TAGATGTCTACGAGTAAAGCGGTGAGTTTTACTAGCTTGAGGAATAAGAGCACAGAGTTCCAGGCTGACAGAAAAGAGGA
+ACTGGGAAATTTGAATGACATGGGAGGACATCTCACACAACTGAAAGTCACAGAGGAGAATATCACAGAGTAAAAATCTA
+AAATCAGCACTTCAACTTCATTCAAATATATGATGGCTGCTACATTTCACATTCATAAAAGGAGACTCCATAGAATCCAG
+CAGAAAACAACAGCTAAATTGCTAGTACAGAGCAGAGATTTCAACCATTGCATATAGCTCAGGAGTGAAAGTTTGGTGTT
+TGACTACGGAAAGAAGACTAATGTTAGAAAAGAGTCATTCTTCAAAGGAAAATAAACGAACCTATCTCTACAAAATATCA
+TACACATAATCTAACATATAATTTAATCTGCCTAGACATAGAACCAGGAAAATATGACACACAACAAAAAAAGTAAACAA
+TGGATTAAGACATTGAAATGGCCCACACGCTGGAATTAGAATATAAGAAATTTAAAATAAGCTATTATAAGCATATTCAA
+GGATTTAAGGAATAGATGGTCATAAGAGGGAATATATGGAGAATCTCAACAGAGAAGTAAAAATGATAAAGAGATAATAG
+GACAAATTTTAGAACTGAGAGACAGAGTATCTTATATAAAAATGTCATTTTATGTACATACCAGTAAATTAAAGATGGCA
+CAAAAAATTATCAGTGCACTTAAAAACAGATCAAGAACAATTTCCCAATACAGTTAATACAAACGAAGAAAAATAATAAA
+AATAGGGCCAGTCTAGCACTAATATAATTGGAGTCCTAGAAAGAGTAGAGAAGTAAATGAGACAGAAAAAGCTATTTGAA
+AAAGTAAAGACCAAAAGTTTTCCTAATTGTCAGCATATATCAACTTACAGGTTTAAGAAACTCAAAGAACAGAATAAAAA
+TAAAGAGAACCAAATGTAGACATACCATAGTCAAACCACTGGGCAAAAAAAAGAGTAAATCTTGAAAGTAGCTAAAGGGA
+GGGAGAAAATAATTTACATCCGTGGAAACAAATAGACAGATGACCTGTCATCAGAAATGACACTTTAAAAAGCAATGAAA
+CTACATCTTCAAAAGAAAACTGTCAACTCCAAATTCTATAACTAGATAAAGTAATTCTTGAGAAAAGAAAGATTTATTCA
+GATAAAAGAAAGCTTCAAACAATTGTCATTAGCAGACCAAGATACAAGAAATGCAAACGGAAATTTTTTAGGCTAAAGAA
+AGATAACAACAGACGGAAATTCTGACCTACAGGAAGCAAGGAGAAGCTCTAGGAATGGCATGTGCATAAACGTGAAAAAC
+TAAGGCTTATTTTTTTAGTTTTATAACAAACAACTGATGCTTTGAATAAAAAATTAAGTGTACTATTGATAATGTGTGTA
+AAATATTCTAAATTAATAGCTCTGACAGGAGACTAAACGCAACAATTTTGCCGCAACTTTTCCTTATGTTACATGAAAAC
+GCTGACTATTAACACTAAGTGGACTGCGATAAGCCCAGGATGTTTATTATAATCCCTAGAGAACCACCACATTATATGAA
+GATATTCTTCTAAAATGCCAATAAAGGAATTAAAATGGAACGCTGAATATTGTTCAGTTAATATAAAAAGGCATGAAAGA
+AAGAGGAGCAAAAAATGATGGAGCAAATAGAACATAAGAGCAAAATAGGCTGGGCGCAGTGGCTCAGCCTGTAATCCCAG
+CACTTTGGGAGGCCGAGGCGGGTGGATCACGAGGTCAGGAGATCGAGACCATCCTGGCTAACACGGTGAAACGCCGTCTC
+TACTAAAAATACAAAAATTAGCCGGGCATGGCGGCGGGCACCTGTAGTACCAGCTACTCCGGATGCTGAGGCAGGAAAAT
+GGCGTGAACCCAGGAGGCAGAGCTTGCAGTGAGCCGAGATCCCGCCACTGCACTCCAGCCTGGGTGACAGGGCCAGACTC
+CGTCTCAAAAAAAAAAAAAAAAAAAAAAATAGTAGGCTTAAATCCAACCTTTTCAGTAATTATTTCAAATGTAATTTAAA
+TACTCCAAATAAAACACAGATTGTCAAACTGGATAATAAAAGTACCTATAAGAGATGCATGCCAAATATTATGGTATAGA
+TAAGTTGAGAGTAAAATAATTTCCAAGTATACCAAGGAAACAACAAGGACAAGAAATCTTATGTGGCTATATTAATATAA
+GAAAAAGTAGACCTCCAAACGAGCAATATTACAACAGACAGCTATTTCATAATGACAAAATGTCAAGTAATTATGAAGAC
+ATAATGCTGTATTGCTGACAGAATAACTAAAGAAAATTAAGATAAAATAATTTTGACAACAGCTTGACCTAATCGATATT
+GACCAAAACAATAGAATATATGTTCTATTATGCTACACATGAAACATTTATCATTAGGCTATAGACCACAAAATATCTCT
+CAAGAAGTTTCAAAACACTGTAATCATGGAAAGTATGCTTTCTGACCATAATGAAAATGAGTTGAAATGGGTAAAAACAA
+GCTACCCAGGAAAGTCTACACTATTGGAAGATTTAAATACACCTTAAAATGCCCTTTAGCTCAATGAAGAAATCATAAGA
+AACACCTTTAAATACATTGAACTGAATACAAATAAAAATATACTATATCAAAATCTATGGGATAAAGTTAAGCAGACCCA
+GAGTAAATTTTTTGTATAAATGCTTATTCTAGAAAAGAGAAGTTCAAAACAAGTGAACTAATTTCTACCTTAAAAAGAAA
+ATCTAAAACAAGAGAGCAAATTAAGTCCAAAACAAGTAGAGGAAAGGAAATAAAACAGAAATTAGAAATCAATGAGACAG
+AAAACAGAAACAGGAGAAAATCAACATGGCCAAAAGTTAGTTCTGTGAGAAAGAAAGAAAATGCAAATTATACATATCAG
+GGATTAATGAGATTGTACAGTTGTAGACACAAGAGACATTAACAAGATAATGGAATATTGTGAAACATTATATACAAATT
+TTCATTACTTGGATGAAAGGGTGACTTCCTTGAAAAAAACTTATAAAAAATTTCACAAGATTTAATGGAACATAGGAAGT
+AATTGACATTTATTAAAGTAATTAAATTAATTGTAAAATACCTGCACATAAAAACATAAAATTAAATAAATAAAAAAATA
+AGCAAACCCCAGATCTAAAGAATTTCACTGGTGAATTCTTTCAAACGTTTAAAAAAAATAAAATTTTTAAATTATTTCAG
+AAATAAAGAAGGGGGAAATTCCAAACTTGTTTTATGAGTCAGAATCCTGATACCAAAATTACAAAACCTAGGAATGCAAA
+ATTGACTTCAACTTAAAGAGCTATCACCATTCTAATCTGTGATTCTAATGAATTTGGCTATCTTAGATACTTTATACAAG
+TGGAATCATACAGTTTGTCCTTCTGTGACTGATTTACTTTACTTAGCATTAATGTCCTCTAGGTTCATCCATGTTGCATA
+TTGACGGGCTTTCTTGTTTTAAAGCTGAATAATATTCCGTTGTATGAATATACCACATTTTCTTTATCTTTTCATCTGCC
+AATAGACATTAACCTTATTCCCACATTTTAATTAGTGTAAATAATGCTGCAATGAGCCTGAGAATCATCCCAATCTCCAT
+TCTTTAAGATAAATAACCTGAAGTGAGACTACTAGATCATATGCTAGTTCTACTTTCTTAATTTATTTGAGGAACCACCG
+TACTGTTTTCCATAGAGGATGCACAGTTTTACTTTCCCAGAAACACTGTAGAAGTGTTCCAATTTCTCCCCATTGTTAAC
+ACTCGTTATCTTTTTCTTTTAATAAGACCATTCTAAAGGGCTTGAGGCAATATCTCTTTGTGGTTTTGATTTGCATTTCC
+CTGATTAGTGATGCTGAGCATTTTTTTCATGTATCTGTTGGTCATGTGGATATTTTCTTTGGAGAAATATCTATTGCTGA
+TTTAAAAAAAAAAAAACTCTCAGCAAGCTAGATAAAGAAATTTTCTCGAAGTGATGGAAAGCATCCACAACAAAACATAT
+AAACAATACTATATTTAATGGTGAAAGCGTAAATGTTTAAACCACATAGATTAGGACAAGAAAGGGATGTGTGTGTTCAT
+CACCTCTATTCAACATTGTACTGGTGATCCTAGATTTTGAGTATAAATATTTAGGGGACAACTATTGTCACTGAAGCCCA
+AATCTTGGTCATCCTCAGAAAAAAAAAAAAGTTGATTAGGTTGTATATAATCTAATTTTACTTGCTAATTTAATTTCTCT
+AAAATTGCAGCTTGTATCAACTCTAGAATTTATTGGGCACCTCCTAGGACATAAACACTGGAATTTGGTGAGAGACGTCA
+AATAGGAAAGAACCTGGCTCTGACATAAATTCAACACACGGAGGGGGACACATATTATGAGACTGACCTGGCCTCTTCAT
+CTCATAAAAGGGGTTCTTGTTGCTTGTAACACAGATTAAAACTATTTCAATTACATTCAAGATAAAAAGATTAACAATGG
+TATGTAAGATGAAAAAATCACCCCGCAGGAAGACGAAAGTCTCACAAAAGGATATTTAAGCTAGCCAACACTTTGAAATT
+CAGGCAGAGATCGTGATTTCTGGGTGAACTAAAGTAGCAAGAACAAAGTAGAGGCTCCAATTCTAGGAAAAATGGGTCCT
+AATAAGGTTTACAATCCAGAAACTCAAGATATCCAGACAGAAGGATGCAGTCTCAGCTTTCAAGGCAGCAGCAGTACCTG
+GATTACTAAGCCAATCCCCAACACAATCATAAACACAAATTTGATTGAGGAAGAAGCTTGCCCACCAGAAGATTTAGGTT
+ATTACAAGGTAGAATGTGATAGAGAAAATGAGCATGGGACTGGAAAGAAAAGGAGGTAGCCCCATGATTACAACTGGAAT
+ATATGTGTCAGCGATGGTGCGGAAATAAGACTGAAGAGAGAGATCCTTAAACCCCACGTGCCTTACATCAGGACTAATCC
+TGGACACAGGCTGGAAAGCATAGCCTACAGGTGGTGAGGCAGGAGGAGTGGGCTCAGCTGTGAGAAGGAGAAGGAAATAT
+GGCTGAAAACTAGATAGGGGTCTTGAAATCACACCGAGGATTTGGGCCTTTGCTGCTGTCTGCCAGCAGCTGCCAGTAGT
+TCTCACACTTTGGCTGGCAACAAAATCACCTGGGGCAGTGGTGCGGAAGCGGGGGGAGTGTTGTAAAACCACAAGTGACC
+AGGCAAAAAACACCTGTATGTTTTCCAATTCAGTAAGTACAGAAATATTAATTGGAAAAAGGTGGAGGTCAGACATTGAT
+AGTGCTGTGGACTGCTCCAGGGACGTAAGCATGATCTTTAGAGAGGTGACTCTAATCAATTGAGGGCAACCACTGGACAG
+AAAGAGGTCCAGACCAACCATACACAGAGACATCATACAACTACACTTTAGCAACCTCTCCAAATAACATGTCTCTTAGT
+GAAACTTGGGAGGTTGAAAGTTAAAAACATAAAATCCAGTGGCATTTATGTATCCTAGGCACTTACATTTGTCTGATTCT
+TCCAACTTGCCTTTGCTTGGTTAGAGTTTTGGGTAGATAAGAGGTGGATTTACATGTGCTAGTGTGAGAACTTTGATACA
+CTCATTTAGACATTGACCTACTATCTTGTTTGAAAGCTAAAACCCAAAGAAATTTGTCTTTTTACATAAAACAAATTCAG
+CCCTTGCCTTCCTTGTAGATTACATCTTCCACTAGCAGTAATAAAAAAGTAACAATATGCATAAGTCAAAGTATCTTCTT
+AGATCCTCTGTAGTGTTTTATTATTTAGTTGTGTTAATTAAAGTAACTGTCTCAAAATTTCAAGGAATGCCTGAGAAGAA
+ATTCATGTTCAAAGGCTGCCCTCTTGTGACAATGTGTTGTATGCTTTACTGTAAAAGTAACCTTATTTTACCTTATAACC
+TCTACAATCCAATTCATAACAGCATAAAAAGGAAACAAAACCTTACATAATTTTGAATTTTAAAAAGTACCTTGTTTATA
+TGGCTCCTTTAGCTAATGAATAGTCAATTTGGTAAATATTCCAGTGAGTTGAAGGTTTGAATCTATCTCACTTAACTAGC
+TTGATGAATGTATTTCTAAACCTATACAACCCACTCCTCTGCTTTTAAAAAATTAAAGTTAGCTGTAGATTGAGATGTCA
+GTGACACAGTTTATAGAACATAACTTAGATTGTCATCTACATTACTGTAACTACAAATACCACCCTCAGATGGAAGAATA
+AGTTTTATCAGTGAACATCTCTAATTGAACTATAAATGGTGTATGTCTTCTGGTTTTTACAAGCTCTTGGTCTAACACAG
+GGGATATATGATGTAAAAATTACAAAGCAAGGTCACTCACGGTGGCTCATGCCTGTAATCCCAGCACTTTGGGAGACCAA
+GGCGGGCGGATCACAAGGTCAGGAGATCGAGATCATCCTGGCTAATATGGTGAAACCCCGTCTCTACTAAAAATACAAAA
+ATTAGCTGGGTGTGGTGACACACGCCTGTAGTCCCAGCTACTCAGTAGGCTGAAGCAGGAGAATCGCTTGAACCCAGGAC
+GCGGAGGTTGCAGTGAGCCGAGATCGCACCACTGCACTCCAGCCTGGTGGGAGAACGACTCCATCTAAAAATAAATAAAT
+AAATAAATAAATAAATAAATAAATAATAAAATAGCAGTGACTATAATGTTTTGTGATGTTAAACTTTGAGAGCTTTTTTT
+TTTCTTTCTTTTCCCAAGTGCCTTTCCCAGTTCCAGGAGCAGAGTTATTCTAAGCCCACTGATGTAAAGGAATAGAAAGA
+AAAGGTTTGTTGGAAAACCTAATAACCTGCTTTCTGTCTTTTGTTTTTTAAAAGCTTGAGCATTTGGGAGAATTTGGAAA
+GATTGTGGAGTAAGTGCAAAGAAGGAATTTGCTAACAAAAATCATATAAGGTAAAATGAGTTTTTTCCAGGTTAGAAAAT
+ATCCACTCCCTACCCTCCTACATTCCTTTCCCATGGTTAAGAAGAGGAAAAAACAAAGGCCTCTTGGTGAGCAGTGGTGA
+CTTAGGCAGTTTCTTAGAAATATTCTAGAAGGCATAGTCATCTTTTAAAAAAAAAATAGCTACAAGGATATGTCTAAGCA
+GAAGGGTCCATGGGCCAAATTACGTGTAGATTTTTGCATTCCAAATATGGTAAAGAAGAAGCAGGAAGCTGGGGCTCCTT
+AACAGGTCACACAGAAATGGACAAGGAAGAGGCCAGCAGCAGCTTGTGGGGACAAGATGTCAAGCCCCAAATGTTAAACC
+CACCAACCATCCTCCAAATTCTGGCTCTGTTTAACAAGGCTGTGGTCTGACACTACATGCCGCCTCAGTGACTAAAGCAT
+AATTTCCCGTCTCCTGGGAGTGTTGACAGCTGACTCCTGGCAACAATACTCACAGCACAGTAAAGTTCCTTCATCCAAGT
+TCATGTCCCTTCTCAAGGCATCCCACATCCGAGAACTGCTTGGTACAGAAATATAATGGCCTGGTTTTCTTGCTCCAATT
+CGAGGTCTTTAGGTAAACTCACCAAGATCTTGTAGAGTGGACTGCGGCCATGATAGTGATTGCATTCCAGCCGACTTCCT
+GCTCCACCCAATCCTATTGCTTTCACTCTTCCACAGGTGTTGGGAATATCATTTCAACCTCCTTGCATGCAAATCTCCAA
+CTCGGAGTCGGCTTCCTAGGACACCTGACTGGTGATATCTCTATCACTATCACAGTACTTAGAGGGGAGCATCTTAAAAT
+GATTGAAGGCTAACTGCCCTAACAGCACAGACAGATGATGGCTTAAAATAGAATTTAAGTGGATTTAAAAAAACATGAAA
+AAAGTTGACATTGCACGCTCATATGAGCTTATGGATCAAACCATGTGTATGATTTCTAAGATCCCTCGTGCAGTATATAT
+TTGCACTGTTTATAAATGACATCCCCTTGAATTGAATTCAGTGCAATTCAAAACAGTAATTTGTGGGAAAAATTAGATAT
+GCAGATAGCCTGGACTCTAGAGAAGCACACATCATTTGGAGAATAATAGTGAACCGGCTAGTCTATGAGGGAAGAAAACA
+GAGTGAATATAGACTATTAGTGAACAAGGATATTTTCCCATGTATATTCAAATTAAGGTGAACTTCTTTAGAGAATTGTG
+CCTTAGAAAGAAAAAGTATTTCCCTGTATTTGAGTCCTCACAAGTTTTCCTACGATGAGTGCTTTTGTGATTATTTTGAA
+TAACAAAGTAATATTTAAAGAAACTCTCTTGCCCTGAATGTACTTTAATTTATGTCAATTGATGTTGCATATAGAGATCA
+TCAGAGTCAAGCTACAAGAAAAAGAGGAACATAACTTAGCAAAGACTTAGAGGATTAGTCCTAATAGTGTGAAATCAAAA
+TTTCAAGTGAGGGAATAAATGCAGCTTTTAATGATTACTTAAATGAAGTTTAACTCTAGCAGGATCTACAAGAAATTGGC
+AACCTTTGGCTTCAGTAACAGAAACTCAGGATATATGCCCTTCAGTGAGTTTGCATTCTGCATCATACTGAGAGACACTC
+CTTTAAACAGCAATAAGAAACTTCCTGTGACAACATAATAAATTCAAAAAGTTCTGTAACTCAGACAGTTTAGATAGAAG
+TGATAACTTTGGCCTATAAAGCCCTGTTCACCCTAAAGATGGATTAAACAATGAAAAAGATATTGATATCAAAAGTTCAA
+TCAGTAATTTGATTAAAGCATTTCATTAAATGTGATAATTTCTTAGTATATTATCTTACATATGCAATATTCATGTGTAA
+CAAATTATATACAAGTAAACATTTGACCATATTATCTGCAGCATAATTTACATATCAATTTACATATTCAGTTTTGCTCA
+CATGAAAACTTGGATCTTCTCAATAAGAAAATTTGTGAAATCTTTAATTCCTCATCCCCAGGAATGTAAATAAATAAATA
+TAAATACTGTAAGTCAGTGATTTTTTCCTAGGAAGACAGTAAAAATACTTTACACGCCTTCTCATTGTAAACCTAAATAA
+CAAACAGAGAAAGGCTCTCTAAAGGAAAAAGACACATCTTAGGGAGGAGGGCACTGCAATGGGAATATGCATGACAAAGC
+AAACTATATAGATTCAAATGGTAAAGGAAGACAAGGACTTTAAAAGAAAAATGATCAACCTGGAAGTATGGGGGAGTAGA
+AAAAAATAAATAAGGGAAAATGAAGAGGCTTCTATAATTGTTTTGATATAGTTACCTTTGACTATAAAGATCAACAACAA
+GGTCAATATCAGTTGAAGTTTGGGCAGGCGGTTGCTGGATAGATGTCCTCACAGAAGTGTATTTTGTGTAAGGTTGCTAT
+GGCCTTTGTGCAAGGTTGTGGTTTTTGTGGTATTTTGTAATAGTTTTTATCAGGCCTAGAAGCATGAGAACCCTCTCTTC
+AAGGCCTTCTCTGAATCTATTTGTTCAGGTTTCTTCTTCTCTTTTTAAACATTAGTGACAGTTTTGATTCTGATAACTTT
+CATATCATGATCCCTAATTTCAAAAAAAATTAAACGTGTAACTTATTTATGATGTATTTGATAGGCTTGGATTTGTGAAT
+CACCTCTACTATCTATAGTGGTAGATATATCTGAGCCTGCTTACACAGGACTTCCATCAGATTCTCTCTTCAGTGGAATT
+GGAAGAGGGGGTTTTGGAGAAGGGAATGAAGGAATTGTGCTGCATCAGAGGTCCCCACCTAACAGCACAGGGTCTTTTTC
+TGTGCTCTTGCCAGGCCGTTGTATTGGCTGATGCAAAAGTAATTTTGGTTTTGTCATTGAAAGTAATGAGACCATCTGAC
+TTACAATGTCTGTGCCATAACCAGCTCCTAATAGCCTCCGAGGACAGCTTTGCCCTGACATATACTGTCCTGGGACCACG
+TCTGCTTTGTAACTCCTGAGGTAGCTGCCACCATGACCAATGCCTTCTCATCTTTACTCTACATGCAGGTAACAGTTATA
+ATTATATCTCTGTGTATTCACAGAACATTTAGTACATCTGGGACTTCTACAAAATTTCCCTAGCTGATTTTGGTGTTCTG
+GGGTACTGGCAGCTCTAGCCATGAAGGGATGATGCTTTATCAGCATTTCTGTTGAGTTTTTTGCTTTTTCTCAAAAACTG
+ATTCCACTTTTCCCACATACGTTTGACAATTCATTTGAACTCATTTATTCTAAACCCATTATTTCTGGTCTATTATTTAA
+TAGGAAGGGAGGCGAGGGATAAAAGATTACAAATAGGGTGCAGTGTACACTGCTCGGGTGATGGGTGCACCCAAATCTCA
+CAAATCTCCAGTAAAGAACTTACCCATGGAACCAAATACCACCGTACCCCAATAACCTATGGAAAAAAATAAAAAATAAA
+GTAAAAAAGAGTCAGAAAGAAAGAAAATAATCACTGTACTTCCCAAGACACTTAGAAATGGTGACTAGCATTATTTGTAA
+GTGTCACCTAATAATATTATTATTTTTTCTTTATGCTACAAACTAGCTCTGATATTACAATCACTACTATTTGAAGTGTT
+ACTAAATTAAATTAAATGTATTGTATCCTTCTGGACTCAGAAATGGGGGTAAATGCTAAGACAATTTCAGAATATGATCA
+CATATTAAAAGCATATTATATGGGGAAAATTTGATTTTTAAAAAATAGAAATTGGTAGAGAAACAATTTTTGGGTCTTTA
+ATGTACGCACATTTTGCAAGTATGGACGCTGAATTTATTGTTCCAAATGATTGTTTTCAAGCATGTTTGTATAGAAAACA
+GCCTTGGAATAGAAAACCAGTACCTCCTTCTGGAACAAAGGAAAAGTTTATTTACTGTCTAGTATAATGCAGATAATTTC
+TCCCTCTTGGGAAGCATACAGCCAAGTGAACTTCCAATAACACAAGTTGGGGTTTCTTGAGCTTGGGTTTTTCTTCCACA
+GTGTAATGTGCAGGTGCCACTTGGCTCTCTTTGTGTTATCCTGCGAAAGCTGATGGCTATAGGTGCATGTGTTTATTTTG
+GGTTTCTCTATTATGTTTCATTGGTCTTTGTGTCTGTTTTCATATCAGTACCATGTTATTTTGTCTACTGTGGCCTTAGG
+GTATAGTATGAAGTCAAGTAATGTGATGTCTCCAGCTTTGTTCTTCTTGCTTAGATTTCCTTTGGTTGTCTGGGCTCTTT
+AAAAATCCATATGAAATTTAGAATATTTTTTTTTTCTGATTCTGTGAAAAACGACATTGGTTGTTTCATAGGAATAGTGT
+TGAATGTGTAGATTGCTTTTGGCAGTATAGCCATTTTAACAATATTGATCTTTCTAATCCATGAGCATGGAATGGTTTTC
+CATTTGTTTCGAGATCTATGATTTCTTTCTGCAGTGTTTTGTAGTTCTCCTTTTAGATATCCTTTACCTCCTTGGTTAGA
+CATATTCTTCCTTTATTTTATTTTATTTTGGGGGTTGCTGTTGTAAACAGGATTGCACTCTTGATTTACCTTTCAGTTTA
+AACATTTTTGGTGAACAGAAATGCTACTTCTTTTTATATGTTGTTTTTTTTTTATCCTGAAAGTTTGCTGAAGTTTTTTT
+TATCAGTTCTAGGAGCCTTTTGGCACAGTCTTTAGGGATTTCTAGGTGTAGAATCATATCATCAGTGAAGAGAGATAATT
+TGACTTCATTTCCTATTTGGATGCTTTTTATCGCTTTCTCTTGCCTGCTTGCTCTGACTAGGACTTGTTAAACAGGAGTG
+GGGAGAACAGGCATCCCTATTTTATTCCTGTTCATAAGGGGAATGTGTTGGGAGAAAAGCTGAGTGTTGGAAGACAAGCT
+GAGGCAGGGCCATATGTTTCTCATTCACTTGATACACCATTTCCTTTCAACCCCCACATCCTCACCACCTGTTTCTTTGT
+TTGAGCACCAACAAATAGCGTGGGCTCCCAGAGCTTGGGGACTTTGCAGACTCCATACTCGTGATGGTCTCCTGGTCCCA
+CTTTCTCTCTCAAACTGTCTTTTTCTCATTCCTTTGACTCTGCCGGACTTCATCACCCCCATGACCAAGTGTTGGGTCTG
+ATCACCCCAACATTCCTGGCGCCCAACATAGGGTGACAAAGACCCGGTGAAGGAAGGCTAGAGCATATGAAAGCAGAGGA
+CACATCATCAAAAGACACCCGAGGACATACAAAGATGGGGAGTGAAAGTTAGTACTTAGAATTTGTTATTACTCTTTAGT
+ACAGTAAAGCAGTTTTGCCCATGGTTTCCAGAACAAAGGACTATGCAGTTGGATGAATGGGAGAGAATTGGAAGAGATTT
+TTAATAGGCATATAAAGATGGAGCAGAAATTCCAGTTTATATATGGTCAGTGTGGGCACTAATAAAGGCAGCCCTTGAGC
+CATTTCAAACAGATGATGAGGCAGATTCAGATGAGGAAGAGGAGGATGAGTGTAAAAAACTAACTTCAAATTCTGAGTGT
+GAGGAGCAGCTACCGGAGGAGATTAAAGAAAAGAAAGAAAAACTTTAAAAAGTATGTTTTACTAGCCCGTTGGCTCCACC
+TGCTGAATTAAGTGAATGGCCACCTCCTCTCTCTCCCCTAAATGGGTGAGAAAATAAATTAGCTGAAAAACTTACTGCTC
+CTGTAGTTACAACATTAAAACCTGGAGCAGTTGGTGGTGCTAGACAAAATTCTATTCAAAAAGCTAAAGCCAAGGGAGAC
+CTTGAAGCATGGTAATTTCCCGTTACTATAATCCAGCAAGTAGGACAGAATATAGCTAATTAGCCTGCTTTTTCTTCTAA
+GTTACTAAAGAATTTAAGCAAGCCATTAGTCAATATGGACTGAACTGTCTTTTTGTGCAATCTTTATTAAAAAATATGGC
+TCTTGATAATACATTACTACCATATAATTAGGATACTTTGACAAAACCTGTTCTCACTCCCATCTCAGTACTTGCAGTTT
+AAAACTTGGTAGGCTGATGAACTCAAACTCAGGCAAAAGAAAACACACACGTGCAGCCACCTGTGCCTGTTTTCTTTGAT
+CAGTTAATATGAGTTGGTCCTAACTGGGGTTGATTAGAGAATCAAGCAGTAATGGAAGATGTTGCCATTATTCAGCTGTG
+CTTCATGTGCTTACATGCATAGAAAAGGATAAATGTTACAGGGGAAAAGTATCCTTATTTAAGTTCTGTCTGACAAGGAC
+CTAAAGAACCATATATTAATTCTATTGCTCAGCTCCAAGAGGCTGTGTATAAAGCCGTAAATGATCAAAACAGCTCAGGA
+TGTTGTAATACAGCTTCTTGCATACAATAATGCTAATGCAGAGTGTCAAACTGCTATTAGATATCTGAGAGAGAAGGCTC
+ATTTAACTAAATATATTAAGTCTTGCGATGGCATTGGAGGTAACTTACATAAGGCTATTCTTTTAGCTCAGGCTATGGCT
+AGATTAAGAGTAAGAAAAAATATGCTTCATTTCTCAGGCTCTTGCCTTAATTGTGGGCAAATTGGACACAAGAAAGGAAT
+GTAGAAAGGAATTCAAAAGACGAAAACTACTACCATGAATCAACAGAAAAGACCCAGTGTATGTCCCTGGTGTAAGAAAG
+GCAATCACTAGGCAAGTCCGTGTCATTCTAAATTTAGCAAAGATAGACAACCTCTTTGAGGAAATAGGAAGAGGGACCCG
+CCTCAAGCCCCTCAACAAACTGAGGCATACCCAGCACAGCCACTGCCCTTACAAATGTACAGCAATTGTCCCCCGCCTCA
+GCAAGCAGTGCTGCTGTAGACCTCTACAGCACAATTCCCATCTCCTTACTTCCTGGGGAGCCACGAAAGAAGGTCCCCAC
+AGGAGTTAGGGCACCCTTACCCTGAGGAACTGTTGGGAACAAGCCCCCACAAATCCGGCCATAAACTGGCCCCAAAACTG
+GCCATAAACAAAATCTCTGCAGCACTGTGACATGTTCTTGATGGCCATAACACCCACGCTGGAAGGTTGTGGGTTTACCA
+GAATGAGGACAAGGAATACCTGGCCCGTCCAGGGTGGAAAACCACTTAAAGGCATTCTTAAGCCACAAACAATAGCATGA
+GTGATCTGTGCCTTAAGAATAAGGGATACTTTTAGTTAATCTAATATCTATAGAAACAATGCTAATGACTGGCTTGCTGT
+TAATAAATACGTGGGTAAATCTCTGTTCAGGGCTCTCAGCTCTGAAGGCTGTGAGACCCCTGATTTCCCACTTCACACCT
+CTATATTTCAGTGTGTGTGTCTTTAATTCCTCTAGTGCTGCTGGGTTAGGGTCTCCCCAACCGACCTGGTCTCAGCAAGT
+GGTGCCCATTCATGGGGGCTCGAATACAGGTCAAAGGATCGCTGGAGCAACGATTGGAGAATGTGGAACTAGCTGGAGGA
+CATCCGAGTACTCTTAAAGCAATCCCCGTGGTGAGTCAGAAGGGGAGCTCGGAAGCATCAGGGTAACAATGGGACAAGTG
+TGGGCTGTGGTTCGTTCTACCTTGGAACTTTTTCACACTGATAATGAGGAGGAAGGAGAGTATATCAAAGTAACAGTAGA
+GGCTACAGACCAGGTTTATTTGCCACCTAAAGCTAAAGCAGAAAAGGAGGGAGAGGTTCATCCCTACCCTTCTGCACCCC
+CTCATTATTATTTTGAAGAAAACGACCCTCCAGATATTTCTTTTCTGGAGGACACTGGGTGAAAAGTAGTTGCCCTGGTG
+ACTGTTCGAGCACCGCCTTGAGCGACTGCTTAGTTCTATTCAGGCAGGAATTCAGCAAGCTAGACAAAAGTGGGATTTAG
+AGGCTTGGCAGTTCCCTGTTTGAATACACCCCCCAGATCAACAGGGAAATATTATAGCTACATTTGAATCTTTTCCTTTT
+AAATTACTCAAAAAAATTAAACAAGCTATAAATCAGTATGGACCAGGTTCTCCTTTTGAAATGGGACTGTTAAAGAATGT
+TGCTGTTTCCAGTCGGATGATTCCTACTGACTGGGACACTCTTACTCTAGCTTGTCTAACTCCTGCTCAGTTCTTACAAT
+TTAAAACTTAGTGGGCAGATGAAGCTTCCATTCAGGCTGCTCGCAATGCCTGGGCCCAACCTCAAATTAATATAACTGCA
+GACCAACTTTTGGGGGTTGGTGGCTGGGCTGGTTTACATGCACAAGTAGTCAAGCAGGATGATGCCATAGAACAGCTTAG
+AGGAGTGTGCATTAGAGCTTGGAAAAAATCACTTCATATGGAGAACAATACCCTTCATTTAGTGCTATAAAACAGGGACC
+AAGAGAACCATATGTGCATTTTATAGCTTGGTTACAGGAGTCTCTTAAAAAGATGATTGCAGATTTGGCTGCTTAGGATA
+TAGTGTTGCAGTTATTAGTTTTGACAATGCTAATCCCTATTGCCAGGCTGCTCTGCGACCTATCAGAGGGAAAGCACATT
+TAGTTGATTATAACAAGGCCTGTGATGATATCAGAGATAATCTACATAAAGCTACTTTGTTGGCACAGGCGATGGCAGGA
+CTGAGAGTGGATAAAGGAAATACTCTATTTCCTGGAGCTTGTTTTAACTGTGGGAAGCATGGTCATACTAAAAAGAATGT
+AAAAAAAAAATCAGCAAGTCAGGCCACCAGATAGGTGAAAAAAGAAAACTGCTGATCCTGAAATATGTCTAAAATGTAAA
+AAAGGAAAACTTTGGGCTAATCAGTGTCACTCTAAGTTTGATAAAGAAGGGAACCCGATTTTGGGAAACTCCCTGGGGGG
+CCCATTCCAGGCCCTGTTCTAAACCAGGGCATTTCTAGCTCAGGCCATTCCCTCACACCCGTACATTATCTGTCCCCCAC
+CATAGCCCATAGTGTCGCAGTAGACTTATGCTGCACAAAAGCTGTGAGCCTTCTGCCTGGGGAACCCCCGCAAAAGGTCC
+CAACAGGAGTCTGTGGACCGTTGCCAGGAGGGACAATGGGATTACTTTTAGGAAGGTCTAGTTTAAGTTTAAAAGGGGTA
+CAAATACACACTGGAGTCATTGATTCAGATTACAATGAGGAAATTCAAATTGTGATATCTACTTCTGTTCCCTGGAAAGC
+ACAGCCAGGAGTGGGCATAGCACAGCTCCTGAGTGTGAAGTATGTGGGAATGGGAAAAAGTGAAATTAAATGAACTGGAG
+GATTTGGAAGCACAAATAAAAAAGGCAAGGCAGCTTATTGGGTAAATCAAATTACTGATAAACATCCTATCTGTGAAATA
+ACTATCCAGGGAAAGAACTTTAAAGGTTTGGTAGATACAATTTTTTTTTTTTTTTGGTAGGAGTGGACATTTCAATCATT
+TCTCTACAGCACTGGCCGTCCATGTGGCCAATTCAGCCCACTCAATTTAACACAGTGGAAACTGCTAAAGCCCCAGAAGT
+GTATCAAAGTAGCTATATTTTGCATTGTGAAGGGACCGATGGACAACCTGGGACTGTTCAACCAATTGCAACTTCTGTAC
+CTATAAATTTATGGGGGAGAGATTTATTATGACAATGGGGAGCACAAGTTCTAATTCCATAGCAATTATACAGCCCTCAA
+AGTCAACATATGATGCACGAAATGGGGCATGTCCCTGGTATAGGAGTAGAAAAAAATTGCAAGGTTTGAAAGAACTGCTT
+CAAACGGAAAGACAAAGTTCCTGCCAAAGATTAGGATACCATTTTTGATGGTGGCCACTGTTAAGCCTCCAGAACCTATA
+CCTTTAAAATGGTTAACAGATAAGCCAATTTGGATAGAATAATGGCTGCTAAGCAAAGAGAAACTGGATGCTTTAGAGAA
+ATTAGTTACTGAACAATTAGAAAATGGGCACATAGCTCCAACATTTTCCCCTTGGAATTCTCCAGTGTTCATAATTAAGA
+AAAAATCAGGTAAATGGAGAATGTTAACTGACTTAAGAGCCATCAATTCAGTTATACAACCTATGGAAACATAACAGCCA
+GGATTGCCTTCTCCTACTATAATTCCAAAAAATTGGCCTTTAATAGTCACAGATTTAAAAGACTGTTACTTTACTACCCT
+TTTAGCTGAGCAAGACTGTGAACGGTTTGCATTTACAATTCCTGCAGTAAACAACCTGCAGCCTGCTAAGCGTTTTCGTT
+GTTTCACAGATGGGTCTAGTAATGGTAAAGCTTCTTATTCTGGATCAAAAGGTAAAGTTTTCCAGACACCCTATACTTCA
+GCTCAAAAAGCGGAGCTTGTAGCTGTAATTGAGGTATTGACTGCTTTTGATATGCCTGTTAATGTGATTTCTGATTCTTC
+ATACATGGTTCATTCCACACAGTTAATTGAAAATGCTCAGTTACGATTTCATACAGATGAACAACTGATAATAAAAACAA
+AAAAAGGGGGAGAAACAGGGATTACGGGTAGCCCATACACAATTGAATCTATCATTATTAACTTTCAAATTTTTGAGCCT
+GCCCAAAGGCCAGATGTTACCAGCAGCTGAACAGCATCTACAGAAACTAGCTGCAAAGAGAGAAGCAGAACAACTGGTTT
+GGTGGAGAGATCCAATAACAAAAAGTTGGGAAATAGGTAAAATTATAACTTGGCATAGAGATTATGCTTGTGTTTCTCCA
+GGACCGAATCAACAACTGATTTAGATACCATCAAGACACCTGAAATTTTATCATGAGTCAGATGCTGAGGAAGAGATAAA
+AAGCACAATCATCATTGAAATTAGAGCTTCTGGCTGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTCTGGGAGGCCG
+AGGTGGGCAGATCACAAGGTCAGGAGATCGAGACCATCCTGGCTAACACAGTGAAACCCCGTCTCTACTAAAAATACAAA
+AAATTAGCTGGGTGAGGTGGCGGGCGCCTATAGTCCCAGCTACTCAGGAGGCTGAGGCAGGAGAATGGCATGAACCCGGG
+AGGCAAAACTTCAGGTTTTGCCAAGAATGACACTGTAAATGTAACAAAGCTTCTGTGCTTGTTAGTGAACACCAAATCAG
+CTACTCTCCTGTATTCGGAGATCAGGATGAAATGAAAAGAACAAGCAGGCCGGGCGCGGTGGCTCATGCCTGTAATGTCA
+ACACTTTGGGAGGCTTCGGTGTGCGGATCACCCGAGGTTGGGAGTTTGAGACCAGCCTGACCAACATGGAGAAACCCCGT
+CTCTACTAAAAATGCAAAATGTGCTGGGCATGGCGGCACAAGCCTGTAATCTCAGCTTTGGAGGCTGAGGCAGGATAAGT
+GCTTAAACCTGAGAGGGGGAGGTTGCTGTGAGGCAATATTGCACCATTGCACTCCAGCCTGGGCAACAAGAGTGAAACTC
+CATCTCAAAAAAATAAAAAATAAAAAAAGACCCCCAACCTTGTCTAGACTGTGGGGTTTCAGTTTCACCCCAGGGGGGTC
+CTGGTTGGGTTAGAATCCTGAATCTCGTTTGAGTTCGAACCCTAAAGAATAAAGGGAATAAAGGCTGTAGCTACTGAGCT
+ACTCAATCTGGTCTGGTTCTGGCTTTTGTGTGTCTATCTGTATTTTTGGTCTAAATATTTGGCCCAACAGAGGTTAAAGG
+CTTTGATGTTCTCAGCAAAAGCCTTGTGAGATTTCTAGTTTATCTGTGTGCTCAATTGGAACAAAGAGACTCAATAAACT
+AGAAAAACCTAAAGAAAATGGCACACGTGAAAAATTGAGAGCCAACTCCTGTTTGTTGTTCTGTCCACCTCCCTCTCTCA
+CTCCTCCTTCTGCCTTTGCTGTGGTCCCATGGTGTTTCTGTCTTTCCGGGGACCTGAGATTCAGTGTAGGAGTGAAGTCC
+ATGATTTTAAAGCCTTCATGTCTCTGCTTTTTAACTCTGCCTGCTTTGCTGAGCTCTTATAATGAGAAATAAACTATTCA
+GAACAGGTACAACAGGGCATCAGAAAACCAACTTCAGGCAGCGCTCCGGCAAGTACCTCCCTAGAGGGGAAGGGCTTACT
+AAAGGAGATTTAATCTTGAAAAGGCCAAAATGAGAAGCTCTAACCTTTAGCTTGCTAGGTTTTCTGGGACTCGAGCTGGC
+TATATATTATGGACCATTCTAGCCACACACACACACACACACACACACACACACACACACACTTTTTTGAGACAGAGTCT
+TGCTCTGTTGCCCACGCTGGAGTGCAGTGGTGCGCTCTTGGCTCACTGCAACCTCCACTTCCCAGGTTCGAGCAATTCTC
+CTGTCTCAGCCTCCTGAGTAGCTGCGATTACAGGTGTACGCCAACATACCCGGCTAATTTTTGTGGTTTTAGTAGAGATG
+AGGTTTCACTATGTTGACCAGGCTGGTCTCAAACTCCTGATCTCAAGTGATCCACCCACCTTGGCCTCCCAAACTGCTGG
+GATTACAGGCATGAGCCACTGTGCCCAGCCTATATATATATATATATATATATATTTATATTTATATTTATATATTTATA
+TTTATATTTTTTTTCTTTTTCTTTTTGACACACAGTCTTGCTCTGTTGCCCAGGATGGAGTATGGTGGTACAATCGCGGC
+TCACTGCAACCTCCACCTCCCAGGTTTGAGCGATTATCCTGCCTCAGCCTCCTGAATAGCTAGGACTACAGGTGCACACC
+ACCACACCCAGTTAATTTTTGTATTTTCAGTAGAGATGGGGTTTTGTCACGTTGGCCAGGCTGGTCTCAAACTCCTGGCC
+TCAAGTGATCCACCTGCCTCAGCCTCCCATAGTGTTGGGATTACATGAATGAGCCACTGCACCTGGTCTCTAGTGCACAC
+TTTAAACCTGACGGCCAAATTACATGAAAGAAAATTCAGAACTCAAATAGTTACTATTTTTAAAAACCCTAAAATGAAAA
+AGTCTCAGTTCTTTTGCCTATCTTTTTTTTTTTCCCTGCCTACTTTGAATCTGCTGATTTGTCTACTGGTGTTGAGATAA
+GACTTACTGTCTGTGGTGTTACCAATTCAAGGTTACTTGGCTGAAGAAAAACAAAAGAATGAAACAATTTTTTATTTTTT
+TCTCTTTTTGAGACAAGGTCTTACTCTAAGGTCTTACTCTGTTCCCCAGACTGGAGTGCAGTAGTGGGATCATAGATCAC
+TGTTATCTCAACCTCCCAGGCTCAAGCAATCCTCCTGCCTCATCCTCTCTAGTAGCTGGGACAATAGGCATGCACCACCA
+TGCCTGGCAATTTTTTATCTTATTCTTAGTAGAGATTGGATCTCACTCTGTTGCCCAGGCTGGTCTCAAACTCCTGAGCT
+CAAGTGATCCTCTTGCCTCAGCCTCTCAAAGTGCTGGGATACAGGCATGAACCACTGTGCCCAGACAAAAGAGTTCTTTT
+ATAAATGCAAATAATTTAAAAAGTACTGATAAAATAAAAATAGAAATGTCTTCAGAATTGTCAGCATACATTTTTGACTG
+TGTTTTATATTTACATTTGCTAGATATTTTAAGGTGCTAGGGTTTGGCATGAAGGTTATAAAGCTATAAACACAGGAAAA
+AAAGAATATTTGTTTATGTGATTTTTTAAATACATAAGACCAATTTAATACGGTTTGTTGAACAAAAATAATGGAATTTT
+CTGAGTTATTGGTAAAATACCCTTGTATTTAACTTTGAAATCCTCACTTATGTGAACACCTGATATTCACAGGCTATAAC
+ATGGTTAACAAGAAAATAACCTAGAAATGACTAGCTTTGTCTAATACCTCAGTTCTCACAAATACTCTAGATAAACTGTC
+AAAAATAAGTAAATGTAAATGGATAAATGTCTATACAAGACATTTTAATGTATTTTTGAAATTTTTTTGAGCCAATGTCT
+CTGCCTGTCACCCAGGCTGAAGTGCAGTGGCATGATCACAGCTCATTGCAACCTTGACCTCCTGCACTCAAGGGATCCTC
+CCACCTCAACCTCCCAAGAAGTTGTTAATTACAGGCATGCAACACCATGGTCAGCTAACTTTTATTTTTTTTGTAGAGTC
+AGCATCTCGCTATTTGCTCAGGCTGGTCTCACGATACTCCTGCCTTGGCCTCCTAAAGTGTTGGGATTACAGGTGTGAGC
+CACCATGCCCAGCCTATTTTTGAAATTTTAGTTATGTTAAATTAAATAATAGATACTCATTAAATATCTGGGTTATTTCC
+AATTTAAAACTTATGTTTTAGCCCAGGCACTATGGCTCATGCCTGTAATCCCAACACTTTGGAAGGCCAAGGCGGGTGGC
+TCACCCGAGGTCAGGAGCTCAAGACCAGCCTGATCAACATGGTGAAACCCCATCTCTACTAAAAAATACAAAAAATTAGC
+CAGGTGTGGTGGTGGGTTCCTGTAATCCCAGCTACTCGAGAGGCTGAGGCAGGAGAATTGCTTGAACCTGGGGGACGGAG
+GTTGCAGTGAGCTGAGATCACACCATTGCACTCCAGCCTGGGCAACAAGAGCGAAAATCCACCTTAAAAAATATGTTTTA
+GGAACACATAATTCTAAATTATGAAATCATTCTCATATGTAAGATACTGCTATATGACAATTCAAGATTTCTTTCTTCCT
+AAGTTTTTTATTAAAATAAGGGTTACTAAGTGTTAATATCTTGGTAGATATATGTGATTAAGACTACTAGATACAAGAGA
+AACAATTCTGTATGCAAAATCTATACGGGTTTTTGTTTCAGAGAAAGTAAATTCGCTTAGAGATTTTTAAGGATTATTTT
+AAATTGAAGGAATAAAAAAGATAGATAAAACTAAATGTGTATAAAAAGTTGGGAAAGATGGAAAAAATTATACAAGCTTA
+TTAAAAGTTTATGTAAATCTTACCTCGAGGTCAAAACTGATTGAGATCAGATAGATTGTTTATAAGGTTTATTTAAATTA
+GCTGTAATATTAAAAACATAGTGATAAAAAACAAAAAAATTTGGTTAAAACAACAAGGTTTTCTTAATGCATTTATTTGC
+TCATAATAAGAGGTAATAAATATTGACTTTTAATCCTGATATCTGTTACTATAAAATCTTTTCAGATTTGTATATCAGAA
+GTTCAACGTTTCCTGTACTTTCATGTTACACATGACTCACAGATCACATCATTGTCTTCTGTTTCTTCTTGAGAAGGAAT
+AAAAGGCTTGGGTTTCCTGCTTGGCTGGGATGATAACTCCTTCAGCTTTTTCATCAGGTCTAATTTTGTACTCTTGGCTT
+TTAAATATGTCTTAATTACTTCATGTAACCAGGATACAGCAGGAAACTTCCATGCTATCATTGTGAGCTATGGATCCCCA
+CTGCTCTATGCTCTGGTTTTCCTGTTTACATTCCTCTGTAATATTATGCTCACTCATGACCCTGGACACACTCTTTCTAT
+GTCTAATTAAATTCAAGTCTCCGTGTCATCGGTTTGACTTCCAAGTGATTTAAATTAGCTTCCCATAAGAAGACACAGTT
+ATGCCACAGGAGCTTTTACCCTTTAAATGACTGGCCTGTAATAAAGATTTTAGGTTTTATCAAGATAATCCATGTGTTGC
+CTTTATTGTTTTTTTAATTACTTGGGAAAACTGAGGGTTTTCAGTTTTCACATCCATGTAACCTTCTATGTTGCTTTTGA
+TGTCTTTTGGTTGTCATGTTAATTAAATGAATGTTATTTAAAAATGACATGTGGCTGTGTGCAGCGGCTCTTGTCTGTAA
+TTCCAGCCCTTTGGGAGGCCAAGGTGGGTGGATCACTTGAGCCCAGGAGTTCAAGTCCAGCCTGGGCAAAATGGCAAAAC
+CCCATATCTACTAAAAATACAAAAGCTAGCTAGGTGTGGTAGCAAGTGCTCATAGTCCTGGCTACTTGGGAGACTAAGGT
+GGAGGATCACCTGAGCCTGGGAGGTTGAGGCTGCAGTGAGCCATGATTTCACCACTGCACTCCAGCCTGGGCAACAGGGT
+GAGACCCTGTCTCAAAAAAATAATAAAATAAAAAACAATAAACTACCATTCCGTTTTGGTCAAATGTTTTCAATTTTTTG
+ACATCTTTGCTAAAACTTAGTTGATAACATTGTATGGGAAGCATTGCCAAAAGATAAGTAACACTAAATCTTCTTTTTTT
+TTTTCTCTGAGACAGAATCTTGCTTTGTCACCCAGGCTGGAGTGTTGTAGCATGATCTAGTTTCACTGCAAATTCTGCCC
+CAAGGTTCAAGCAATTCTTCTGCCTCAACCTCCCAAGTAGCTGGGACTACAGACAAGTGCCACCATACCCGGCTAATTTT
+GTATTTTTAGTAGAGACAAGATTTCTCCATGTTGGCCAGGCAGGTGTCGAACTCCTGACCTCTGATGATCTGTCCAACTT
+GGCCTCCCAAAGTGTTGGGATTACAGGCATGAGCCACTGTGCCCAGCTTAAATCTTCTTTTGGTTACATTTATAGGTATG
+TTATTAATATAAATATTTTAAAAGTTATATAAATTATAAAAATCTAATAGGGTTTCAGTCATAATTTTGATGATGTTAAA
+TATTTTCTAAAGTTGTATGTGTATAGATATATTATTAATATAAATATTCTAAAGATTATATAAAATTTCTGGAAGTCTGA
+TGGATCTGATGTGTTGCCATCAGTCATGATTCTGGCTGTTATCTTAAAATGCTACATATAATACAAATAACTAAATTTTC
+TCGAGATTTGAGAACTTCCACTGGATTTTAACCAACGGCTATTCTAAGTTTTTGTCATCCACGGTGATTGATTAAAGTTC
+TTCTCTAAAACCCTTTACAGGCCGGGCACGGTGCTGGAGGCCATGCAGGGCACGGTGCTGGGCATGGTGCAGGGCATGGA
+GGCTCATGCGTGTAAATATCCCAGCACTTTGCAAGGCTGAGGTGGGTGAATTGCTTAAACTAAGAAGTTTGAAAGCAGCC
+TAGGCAATATGGTAAAATCCCATCTCTACAAAAATACAAAAATTAGCCAGGCATGATGGTGCATGCCTATGGTCCCAGCT
+ACTTCGGAGGGTGAGGTAGGAGGATGGCTTGAGTTTGGGAGGGAGAGATTGCAGTGAACCAAGATCATGCCACTGCACTC
+CATCCTGAGTGATAGAGCCAGACCGTGTCTCAAAAAAAAAGCCTTTACAATCAGCTATCATCTAAATTACTTTTAATGGA
+AAGGATTCTGACAAGTTCTCTTAAATATGGTTTCAGATAACTTTGGGGATCAAACCATTGGACTAGGAAAATCTTCCAGG
+ACTCTAAAAAGCTGAATGAGAATTTCTAATTGAAATCAAGCAAAACACAAAAAACTGAATGAGAATTGCTATTTGAAATC
+AAGCAGAACAAGATTTAGTTACATGGGAGTGAACTTATAAAAGAAGGAAAAGATTTTATCCATGGCCCTCCCATTAGAAA
+CATTGTTGATCCTCTTTATGTTTTGTTTTCCAAAGTCAATAATTTTTTTTCTTTTCAGCTATTTTTAATTTACAATACAT
+TGGATAAACTACATTGTGAACAAAAATTTGAGCCATTTATCTGTCTCTCTAGCTGATTTCTCCAGAATTCAGAAGCCATT
+CGTGAGCATTCTTAAATTATGGCAATATAATTATTTGCATAATTTCAATAAGAATCTGTTTTTGGTAACAAGATTCAATT
+GGAGACACTGTTTGTTTTATCAAGGCTTTAACTCGAATGGCAGATACAACCAGACCACTTTAAGGAATTGAGGTTGACTT
+TATAGCACCAATACAAAGCCCCTTAGAATGACTGGCTTGATGTCCTGTATACAAAGGTCCTTTACAAAGTTGCTATTCTT
+GTGGTAAGAAATAAAGAATGTCACTTTCTGATGGGCCAGGAACCTCAAGATATTTGGGGACCTTGAGAAGAGAAGACTAC
+ACCCATTCATTAAAGTATTACAGGAGAGCTGGCAAGATGTCTGAATAGGAACAGCTCCGGCCTGCAGCACCCAGTGAGAT
+CAACACAGAAGTCAGGTGGTTCATGCATTTCCAACTGAGCCTCCACTGGTGATACCCAGGAAAACAGGGTCTGGAGTGGA
+CCTCCAGCAAACTCGAGCAAACCTGCAGCAGAGGGACCTGACTGTTAGAAGGAAAACTAACAAACAGAAAGGAATAGTAT
+CGACATCACCAACATCAAAGACCAAAGGTAAATAAATCCACAAAGATGGGGAGAAACCAGTGCAAAAATGCTGAAAACTC
+CAAAAGTCAGAATGCCTCTTCTCCTCCAGAAGATCACAACTCCCCGCCAGCAAGGGAACAAAACTGGACAGACAGTGAGT
+TTGATGAATTGACAGAAGTAGGCTTCAGAAGGCGGGTAATAACAAACTCCTCCGAGCTAAAGGAGCATGTTCTAACCCAA
+TGCAAGGAAGCTAAGAGCCTGAAAAATAAAGGTTAGATGAATTGTTAACTAGAATAACCAGTTTAGAGAAGAATATAAAT
+GACCTGATGGAGCTGAAAAACCCAGCACAAGAACTTCATGAAGCATACACAAGTATCAATAGCTGAATCAATCAATCAGG
+AGAAAGGATATCAGAGATTGAAGATCAACTCAATGAAATAAAGCCAGAAGACAAGATTAGAGAAAAAAGAGTGAAAAGAA
+ATGAACAAAGCCTCCAAGAAATAGAGGACTATGTTAAAAGTCTGAATCTACGTTTGATTGGTGTACCTGAAAGTGACAGA
+AAGAATGGAACCAAGTTGGAAAAAACTCTTAAGGATATTATTCAGGAGAACTTCCCTAACCTAGCAAGACAGGCCAACAT
+TCAAATTCAGGAAATACAGAGAACACCACTAAGATACTCCTCAAGAAGAGCAACCCCAAGACACAAAATCATCAGATTCA
+CCAAGGTTGAAATGAAGGAAAAAATATTAAGGGCAGCCAGAGAGAAAGGTTGAGTTACCAACAAAGGGAAGCCCATCAGA
+CTAACAGTGGATCTCTTGGCAGATAGCCTACAAGCCAGAAGAGAGTGGGGGCCAATATACGACATTCCTAAAGAAAAGAA
+TTTTCAAACCAGAATTTCATATCCAGCCAAACTAAGCTTCATAATTGAAGGAGAAATAAAATCCTTTACAAGCAAATGCT
+GAGAGATATTGTCACCACCAGGACTGCCTTACAAGAGTTCCTGAAGGAAGCACTAAACACGGAAAGGAACAACTGGTAAC
+AGCCACTGCAAAAACATACCAAATTATAAAGACCATTGACACAATGAAAAAACTGCATCAACTAATGAGCAAAATAACCA
+GCTAGTATCATAATGACAGGATCAAATTCACACATAACAATATTAACCTTAAATGTAAATGAGCTAAATGTCCCCAGTTG
+AAAGACACAGTCTGGCAAATTGGATAAAGAATCAAGACCCATCAATGTGTTGTATTCAGGAGACCCATCTCATACACATA
+GGCTCAAAATAAAGGGATGAAGAAACATTTACCAAGCAAGTGGAAAGCAACAAAAGGCAGGGGTTGCAATCCTAGTTTCT
+GATAAAACAGATCTTAAGCCAACAAAGATAAAAAGAGACAAAGAAGGGCATTACATAATGGTAAAGGGATCAATGCAACA
+AGAAGAGCTAACTATCCTAGACATATATGCACCCAATACAGGAGCACTCAGATTCATAAAGCAAGTTCTTAAAGACCTAC
+AAAGAGACTTAGGCTCCCACACAATAATAGTGGGAGACTTTAACACTCCACTGTCAATATTAGATAGATGAGACAGAAAA
+TTAACAAGAATATCCAGGACTTGAACTCAGCTCTGGACCAAGTGGACCTAAAAGATATCTACAGAACTCTCCACCCCAAA
+TCAACAGAATGTACCTTCTTCTCAGCACCGCATTGTACTTATTCTAAAATTGACCACATAATTGGAAGTAAAACACTCCT
+CAGGAAATGCAAAAGAATGGAAATCATAACAAACAGTCTCTCAGACCACAATGCATTCAAATTAGTATGCAGGATTAAGA
+AACTCACTCAAAACCTCACAACTACATGGAAACTGGGCAACCTGCTCCTGAATGACTACTGGGTAAATAACGAAATGAAG
+GCAGAAATAAAGATGTTCTTCAAAACCAATGAGAACAAAGACACAACGTACCAGAATCTCTGGGACACATTTAAAGCAGT
+GTGTAGAGGGAAATTTATAGCACTAAATGCCCACAAGAGAAAGCAGGAAAGATCTAAAATTGACACCTTAACATCAAAAT
+TAAAAGAACTAGAGAAGAAAGAACAAACAAATTCAAAAGCTAGGAGATGACAAGACATAAATAAGATCAGAGCAGAATTG
+AATGAGATAGAGACATGAAAAACCCTTCAAAACATCAATGAATCCAGGAGCTGGTTTTTTGAAAAGATCAACAAAACAGA
+CCACTAGCCAGACTAATAAAGAAGAAAAGACAGAAGAATCAAATAGATGCAATAAAAAATGATTAAGGGGATATCACCAC
+TGATCCCACAGAAATACAAATTATCATCAGAGAATACTACAAAGACTAAACCAGGAAGAAGTCAAATTCCTGAATAGACC
+AATAACAAGTTCTGAAATTGAGGCATTAATCAATAGCCTACCAACCAAAAAAAGTCCAGGACCAGACGGATTCACAGCCA
+AATTCTACCAGAGGTACAAAGAGGAGCTGGTACCATTCCTTCTGAAACTATTCCAAACAATAGAAAAAGAGAATCCTCCC
+TAACTCATTTTATGAGGCCAACACAACAAAAAAAGGAAATTTTAGGCCAATATGCCTGATGAACATCAATGCAGAAATCC
+TGAATAAAATACTGGAAAACTGAATCCAGCAGCACATCAAAAAGCTTGTCCACCATGACCAAGTTGGCTTCATCCCTGGG
+ATGCAAGGCTAGTTCAACATGCAAATCAATAAATGTAATCCCTCACATAAACGGAATCAATGACAAAAACTACATGATTA
+TCTCAATAGATGCTGAAAAGGCCTTTGACAAAATTCAACAGCCCTTCATGCTAAAAATTCTCAATAAACTGGTATTGATG
+GAACATATCTCAAAATATTAAGTGCTATTTATGGCAAACCCACAGCCAATAACATACTGAATGTGCAAAAACTGGAAGCA
+TTCCCTTTGAAAATCAGCACAAGACAAGGATGCCCTCTCTCACTACTCCTATTCCACATAGTGTTGGAAGTTCTGGCCAG
+GGTAATCAGGCAAGAGAAAGAAATACAGGGTGTTCAATTAGGAAAAGAAGAAGTCAAATTGTCTCTGTTTGCAGATGACA
+TGATTATATATTTAGAAAACCCCATCGTCTCAGCCCAAAATCTCCTTAAGCTGATAAGCAACTTCAGCAAAGTTTCAGGA
+TACAAAATTAATGTGCAAAAATCACAAGCTTTCTTATACACCAATAACAGACAAACAGAGAGCCAAACTATGAGTGAACT
+CCCATTCTCCATTGTTACAACAAGAATGAAATACCTAGGAATACAACTTACAAGGATGTGAAGGACCTCTTCAAGGAGAA
+CTATAAACCACTCCTCAAGGAAACAAGAGAGGACACAAACAAATGGAAAAACATTCCATGCTCATCAATAGGAAGAATCA
+ATATCGTGAAAATGGCCATACTACCCAAAGTAATTTATAGATTCACTGCTATCCCCATTAAACTCTATTGACTTTCTCCA
+CAGAATTGGAAAAATATATTTTAAATTCCATATGGAATCAAAAAAGAGCCCGAATAGCCAAGACAATCCTAAGCAAAAAG
+AACAAAGCTTCAAGCATCATGCTACCTGACTTCAAACTATACTACAAGGCTATGATAAACAAAAGAGCATGGTACTGGTA
+CCAAAACAGATATATAGATCAAAGGAACAGAACCGAACCCTCAGAAATAACACCACATATCTACAACCATCTGATCTTTG
+ACAAACCTGAGAAAAACAAGCACTGAGGAAAGGATTCCCTATTTAATAAATGATGTTGGGAAAACTGGCTAGCCTTAGGC
+AGAAAGCTGAAACTGGATCCCTTCCTTACACCTTATATAAAAATTAACTCAAGATGAATTAAAGACTTAAACCTAATACC
+TAAAGCCGTAAAAACCCTAGAGGAAAACCTAGGCAATACCATTCAGGACATAGGCATGGGCAAAGACTTCATGACTAAAA
+CACCAAAAACAATGGCAACAAAAACCAAAATTGACAAATGGGACCTAATTAAACTAAAGAGTTTCTGGACAGCAAAAGAA
+ACTATGAGAGTGAAAGGGCAACCTACGAAATGGGAGAAAATTTTTGCAATCTATCCATCTGACAAAGACCTAATATCCAG
+AATCTACAAAGAACTTAAACAAATTTACAAGAAAGAACAATCCCATCAAAAAGTGGGCAAAGGATGTGAACAGACACTTC
+TTAAAAGAGGACAATTATGCAACCAAGAAACATATGAAAAAAAGCTCATCATCACTAGTCATTAGAGAAATGCAAATCAA
+AACCACAATGAGATGCCATCTCATGCCAGTTAGAATGGCAATCATTAAAAAGTCAGGAAACAAAAGATACTGGAGAGGAT
+GTGGAGAAATAGGATTGCTTTTACACTGTTGGTGGGAGTGTAAATTAGTTCAACCATTGTGGAAGACAGTGTGGCTATTC
+CTCAAGGATCTAGAACTAGAAATGCCATTTGGCCCAGCAATCCCATTACTGGGTATATACCCAAAGGATTATAAATCATT
+CTACCATAAAGACACATGCACACGTATGTTTATTGCAGGACTGTTCACAATAACAAAGACTTAGAACCAACCCAAATGCC
+CATCAATTATAGACTGGATAAAGAAAATGTGGCACATATACACCATGGAATACTATGCAGCCATAAAAAAGGATGAGTTC
+ATGTCCTTTGTAGGGACATGGATGAAGCTGGAAATCTTCATTCTCAGCAAACTAACACAAGAATAGAAAACAAAACACCA
+CATGTTCTCACTCATAAGTGGGAGTTGAACAACGAGAACACAAGCACACAGGGAGGGGAACATCACACACTGGGGCATGT
+TGGGGGTTGGGGGCGGGGGGAGGGATAGCATTAGGACAAATACCTAATATAGGTGATGGGTTGATGGGTGCAGCAAACCA
+CCACAGCACATGTATACCTATGTAACAAACCTCAGCATTCTGCACGTGTACCCCACAACTTAAAGTATAATAAAAACGTA
+GGTTAAAAAAAAGTATTACAGACACAGTATGATGCAAATCTTTGACTTGGCTAGCCTCAAGGCTTTTAAAAGTCTAAGAT
+TCCTTAGTAGAAAGTTCCAACAAAGCCAATTTTAAGAAGCCTATATGGTCAATAAATATTCTTGCTGCACTTTATGCAAA
+TAATCAGACCAGGTATGATAAGACTAAAACTTATTTTGCACAGAAATTTGTCCTACTATGGTTTGTCTTTGATAAAATGA
+TGGACTAGAGAGAGAAAATTCATGTTTCAAATGAAAACTGTGACATATGCTATTAGATTCCAGCCCTGATCATTCTTTTC
+CGAGTTTTTATTATTTGCCTATAATTTGGGCTGAATCCTGAATTATTTCCTGGCTCCAAGTGTTCCCTAGTGAACCCAGA
+TAAAATATATTTTTAAAAAACTTGTTTTATCCTGTCAGTAATGAGATGTATTTTTATAGGACTACTTAAACTAGCAATTA
+CAATTCGATTATTATGATTATAGAATCTCGGGATTTATCTTCCTTCTTGTCAAGGTCTTTACCTGATGTTTGTCTCATTA
+AAAAAAAAAAGAAATCAGACTGATTGCACTCTACTCAAGACTGAAGACATGTACTTTAACCTGTCTCTGTTACCAGTAAA
+CCAAAGCCTTAAATTTCAGAATCCGTCAGGGACCCTGTGTGGTCCCTGGATCAAGCACACATGGGCTTATGAATGTGTTG
+ACCGCTGGCATATGAGAGGTAATTGTCTATTAGGTTATGTGGCTCTTCCTCTTCCTATTTATAACTCCAATGTTTCTGAA
+CGCTGAAGTAGTTCATCGAAATTATTTTCCAGGATTAGACAAACCGTACCTGCAAACCAAGGAGATGAATTTTGGCCTAT
+GTTTGGCAGAAATCTCTTGCAATGGTGGGGAGTAACCTCTCATGAACGTATAATTAGAAATCTGTCAACCACTCTAGGTA
+ACTTAGCAAATGAACTAGCTGAAGCCATAGCTACCAAATAAAGATCTTCAGACTCTTTATTCAGGATAGTCATGGATGAC
+GGAATAACTTTAGGCTACATAGTGGTGAAACAGGGAGAAACTCATATGGCAGCTAGCTAACACTTCATGTTGTGTTTAGA
+TCCATACATCTTCTGAAGTTGAAACACGTGTAAAAAAATAAGACGATATGGGAATTAATTATGACAAATCCTAGGGAAGA
+GGCTGAAAGAGCTGTAACACAAACAGGGCTGAGACATGCCCCTTGCTCGCCACATTGTGGGCAAAGAGAAGGAAAGAAGA
+GCTATGGCCCTTTGGGGAGCCCAGACCTGGGAGCTCCCTGAGCCAGAGCTGTGATTCCTTCTTTGGGGCCTTTGGTTCCT
+GATATCTCCAAGCTTCTGGGTGCCACTGTGTTCCCAGTGTGCCAGCTGTGGAAGCTTCTTGAGGTGCCCGTGGTCCAGCC
+ACAGCCTTGTGGAGAGCTGGCGCCCGTGTTGGCACCTGGAGCTACCTGCTCCACTTCAGCAGCCAGCAAATCTGACTGCA
+CAGTGACCAGACCCCATGCTCACTCACACACCCCTTGCCACTCCATGCAGTCTCCCTTGGCAGGCATGGGATCCAACGTG
+GTAGCATGAGCAGAGCACAGCCTGCCAGGCTGAGTGTGCGGGGCCCAGCAAAACTCAGGCAAAGGTGCCACCAGTCATAG
+AGGTTTCTGTCCAGAAAAGTAACACCCCAAAGATCCCATAACACCGCTACTCTTCCCAGCCTCTGGAAACTCTCAGTCTA
+CTCTCTATCTTGATGAGTTCAATTGTTTTAATTTTTAGCTCCCACAAATGAGTGAGAACATGCGAAGTCTGTCTTTCTGT
+GCCTGGCTCATTGTACTTAACATAATTGTCCTCTAGTTCCATCCATGTTGTTGCAAATGACAGAATCTTATTCTTTTTCA
+TGGCCGAAGAGTACTCCATTGTGTATATGTACTACATTTTCTTTATCCCTTCATCTGTTGATGCACACTTAGGTTGCTTC
+CAAATCTTGGCTATTATGAATAGTGCTGAAATAAATATGGGAATGCAGATATCTCTTTGATATACTAATTTTCCCTCTCT
+TGGGTATATACCCAGCAGTGGGATTGCTGGATCATATGATAGTTCTATTTTTAATTTTTTGAGGGACCTCCATATTGTTC
+TCCATAGTGAATATATTAATTTACATTCCCACCAACAGAGTAAGAGTGTTCCCTTTTCCCCAAATTCTTGAAAGCATTTG
+TTATTGCCTGTCTTTTGCATAAAAGCCATTTTAATGGGGTAAGATGATATATTTTTGTTGTTTTGATTTCAATTTCTGTC
+ATGATAAATGATATTGAGCATCTTTTCATATACCTATTTGACATTTATATATGTTCTTTTTTGTTTTTGCTCATTTTTTG
+AGACAGGGTCTCACTCTGTCACCCAGGCTGGAGTGCAGTGGTATGATCATGGCTTAATGTAGTGTTGACTGCCAGGGTTC
+AAGCAATCCTCCCACCTCAGCCTCCTGAGTAGCTGGGACCACAGTCATGCATCACCATGCCCAGGTAGTTTTTAAAATTA
+TTTGCTATGTTGTTCAGGTTGGTCTTGAACTCCTGGGCTCAAGTGGCCCACCCGTCTTGGCTCCCCAAAGTTCTGGAATT
+ACATGTGTCAGCCACTGCGCCTGACCTGTGTGCCTTCTTTTGTGAACTGTCTGTTCAGATCTTTTGCCTATTTAAATAAT
+TGGATTGTTAGTTTTTTTCTTGTAGAGTTGTTTGAGCTCCTTATATATTCTGGTTATTAATCCCTTGTCAGTTATATAGT
+TTGCAAATATTTTCTTCCATTCTGTGGATTGTCTTTTCACTTTGTCCATTGTTTTCTTTACTGTGCAGAAACTTTTGAAC
+TTGATGTGATACCACTTGTTCATTTTTGCTTTGGTTGCCTGAGCTTTTGGAGTATTACTCAAGAAATCTGTGCCAAGACC
+AATTTCCTGGAGAGTTTCCCTAATGTTTTCTTTCAGTAGTTTCGTGTCTTTGATTTAAGTCTTTAACCCATTTGGATTTG
+ATTTTTGTATATAGTGCAAGAGAGGGTTCTAGTTTAACTATTCTGCCAATGACTTTGGGAGGCCAAGGTGGGCGGATCAT
+GGGGGCAGGAGATCGAGACCATCCTGGCTAACACGGTGAAACTCCGTCACTACTAAAAGTACAAAAAAAAAATTAGCCAG
+GCGTGGTGGTGGGCACCTGTACTCCCAGCTACTTGGGAGGCTGAGGAGGAGAATGGTGTGAACCCGGGAGGTGGAGCTTG
+CAGTGAGCCCAGATCGCGCCACTGCACTCCAGCCTGAGCAACAGAGCTAGACTCCATCTCAAAAAAAAAAAAAAATCTGC
+CAATGAATATCTAGTTTTCCCAGCACAATTTGTTGAAGAGACTGTCCTCTCCCCCATGTATATTCTTGGCACCTTCATTG
+AAAATGAGTTAATTGTAAATGTATGGATTTATTTCTGGGTTCTCTATTCTGTTCCATTGGTCTATGTGTCTGTTTTATGC
+CAGTACCATGGTGTTTTGTTTATAATTGCTCTGTACTATAATTTAAAGTCAGGTGATGTGATTCTTCCAGTTTTGTTCTT
+TTTGCTCAGGATGGCTTTTGGTATTCTGGGTCTTTTATGGTTTCATGTAAATTTTAGGATTTTTTTTCTATTTCTGTGAA
+GAAAGTTATTAGTATTTCAATAGGGATTGCATTGAATCTGTAGATTGCTTTGTGAAGTATGGGTATTTTAACAATATTTA
+CTCTTCCAATAAATGAACATGGACTATCTTTCCATTTTTTTTTTGGTGTCCTCTTTAATTTTTTTGCATCTATGTTTTAT
+AGTTTTCATTGTAGAGATCTTTCACTTCTTCTGTTATGTTTATTCCCAGTTATTTTATTTTATTTGTAGCTATTGTAAAT
+GGGATTACATTCTTGATTTTCTTCTTTAGATTGTTCATTTTTGTCATTTAGAAATGCTACTGACTTTTGTAGTTTGATTT
+TGTATGCTGTGACTCTGAATTTGTTGATCAGTTCTAATAGTTTTTTGGTGGAGTCCTTAGGTTTTTCCAAATATAAGAGC
+TAATCATCTGCAAACAAGAAACAGTAATAATTTTACTTCTTTCCAATTTGGATCCCTTTTATTGTTTTTCTCTTGTCTGA
+ATTGCTCTAGCTAGGACTTCCAGTACTATGTTGAGTAACAGTGTTGGAAGTGGACATTCTTGTCTTGTTCCAGATCTTAG
+AAGAAAGGCTTTCAGCTTTTCCCTGTCCAGGATGATACTGGCTGTGGGTCTGTTGCATATGGTTTTCATTGTGTTGTGGT
+ATGTTCCTTCTATATCTAGTTTTTTTTGAGGGTTTCTTTTTATCACAGGGATGTTGAATTTTATTAAATGCTTTTCAGCA
+TCAATTGAAATTATCATATGGTTTTTGTCCTTCATTCTGTTGATATGATGTGTCACATTGATTGATTTGCATACATTGAA
+CCATGTTGGCATCCTTGGGATAAATCCCACTTAGACATGATGAATGGTCTTTTTCATAGGATGAGTTTGGAAATACTACA
+GCCTTCTCTGTTTTTTTTGAATAGTTTGAGTAGGATTGATAGTAATTCTGCCTTCAATGTTTGGTAAAATTAATCAGTGA
+AGCCAGTGAAGCCATTGAATCCAGGCTTTTCTTTGCTAGGACATGTTTTATTAAGGCTTCAATTTCATTTATCCATTTCT
+TCTAGGTTTTTTTTTTTTTTGAGATGGAATCTTGCTCTCTCACCCAGACTGCAGCGTGGTACAATCTCAGCTCACTGCAA
+CCTCTGCCTCCCAGGTTCAAGTGATTTTCCTGCCTCAGCCTCTAGAGTAGCTGGAAGTACAGGTGCATGCCACCATGCCT
+GGCTAATTTTTGTATTTTTAGTAGAGATGGGGTTTCACCATGTTGACCAGGCTGGTCTTGAACTACTGACCTCAGGTGAT
+CACCTGCCTTGGCTTCCCAAAGTGTTGGGATTACAGGCATGAGCCACGGTGCCCAGCCATTTCTTCCAGGTTTTTCAATT
+TATTGGAATATAGTTGGTCATAATAGTTTCTAATGATTCTTTGAATTTCCACAGTATCAGTTATAGTGTCTCCTTTTTAA
+TCTCTGGTTTTATGTATTTGAATCTTCTCTCTTTTTTCTTAGTCTGGTTAAGTGTTTGTTGATTTTGTTGGTCTTTTAAA
+AATATTAACTTTTCATTTCATTGATATTTTATATTTTTAAATTTCAATTTCATTTATTTCTGCTCCGATCTTTGCTATGT
+TTCCTTCTACTAATTTTGGTTTTGGCTTGCTCTTGCTTTTCTAATTATTTAAGATGCATTATTAGGTTGTTTATTTGAAG
+CTTTTCTACTTTTTTTGATGTAGGTGCTTTTATCTATAAACTTACCTCTTAGTACTGTAGTACTGTTTTTACTGTATCCC
+ATAGGTTTTTTTTTTTTTTTTTTTTTTTTTTTTGTTTTTTTTTTTTTTTTGAGATGGAGTCTCGTTCTGTTGCCCAGGCT
+GGAGTGCAGTGACGCGATCTCGGCTGACTGCAAGCTCCGCCTCCCAGGTTCACACCATTCTCCCGCCTCAGCCTCCCGAG
+TAGCTGGGACTACAGGCGCCTGCCTTCACGCCCGGCTAATTTTTGTTTTTTGTATTTTTAGTAGAGACGGGGTTTCACCG
+AGTGCGCCCGGCCTGTACCCTAGGTTTTGGTTTGACTTTAAACTTTTTCTTTTCTCGAAAACTCAGTGTCATGGTACCGG
+CTTCTTGTGCTTTGGGCAGTGAGACCCTTTTACTTGATAACAGTGGTAGCTGGGACAAGTTGGCAATGTAAATAAATAAA
+CAACATCTAGATTGGAAAGGAAGAAGTACAGTTATCTTTATGTACAGATGACATGATCTTGCATTTAGAAAATCGTAAGA
+AATTTACTAAAAAGTATTAGGACTCATGAACAAACTTAAGAATGTAACACTATATAAGATTGGTATACAAAAATAACTGT
+ATTTCTTTACCAATAAATCAAGAATCCAAAAATGGAATTAAAAAAATAAATCTTGTTACAATAGAATTAAAGCTGGGGAA
+GCTTAAACTTGAACACTAAAAACTACAATACATGGTTAGCGTTGGAAACACCCAGATACCATCCCTGAGCCTTCTCTCCT
+TGGCTCTGAGGGCTTTACCTTCACGGGGTGAGGAAAGGGGTTGCATTCTTGGCTTTTACATTATATTAGGTGGGTTCGGG
+TTGAGGTATCTGCAATTCAAATGAGTATTACAATCTCTACTTTTATGGATAAGAGACTGAGGCCCACCAAGAGAGGGAAT
+GACAGTCCATATCCTGGAAGGCGAATTGTCAGGCACTGATTTCCGCTATTTAACCCCTGCCAATCATCATGTATTTAAAG
+GATCCCCAGATACCATACCAATAGGTGTTCAAGAGAGAGGCCTGTAATCTAGGCGTCTGAGAAAACAAGGCTAGAGATTC
+CAATATTGGAGACAACAGGGCTCTGGGAAGATTAAGGTTGAGTTTTCTGGATCTGCAGAATAGAGTCACTGAGGACCAAT
+TGCAAGATCAGAGGAGATGAAAGAACAAGTCAGGGCATGCTTAGGAAAAGAGAATACCAGGGATAGGTTTTAGGCAAGAG
+TCACACTGAGGAAGGGCAGGTTCTTGGCGTCGTTTAGGAAGAAATCCAAAAGCAAGCCTGTGGTGGAAGAAAGCAGCTCT
+ACGGAGGCATTGGCGGTGTTACAGCACTGCGTCCACTCCGGCAGGGCAGGGAGCCCTCCGTGGGTTGTGCTCCCAGAGCA
+GCAGCCTAGGGGTGGCTTGTAGTCATTTTTATAATTCACTTTTAATGGCATGCTAATTAAGGGGCGGGTTATTCAGAAAT
+AGCTAGAAATGGGCAGTAACTTCCATCTGTTTCCATGGCAAGGGGTGGGGACTTCTCGTGATGACATGGCATTGGCAAAC
+TGTCATGGCACTGGTGGGAGCGTCTTCTGGTGATCTGAGGCGTGAGGTGCTTTCGCTGCCTCTCCCAGGTTCCTGCGTGC
+CTCTTACCTGAAAGCCCATCACACCCCCATCTGCCCACCTACAAACGTCACTGCCCTTTCACCCCATCCCCGTTTCACAC
+GCACTCCCACATCAACCCTGAGCATTCAAGCCTGCGTTTCCCTGTTAGGAACCTCGGTGGTAGCCGGAGCTCTGAGAAAC
+CCCTAGGCAGAACTCCTTGCCTAGTTTGTGGCAGACATCAGGGAAGGAAAGGCAAATTTCAGGTCTTTCTCACAATAAAT
+AAATAAAGATAGGTAGATTTGATTGATGGATGGATGGATGAAACCTGGGAGTCTACGGGCAAATATTTATCAGACACTGG
+AAGTGTAAGTTGTCACAAAGATTATGGAGTGCACCTGTCTTATGACCCTGTTATTTTATCCTAGTATGTGCACTAGAGCA
+TATTTTCTAACAGTGTAAATTGAAGGCTCACAAATTAGTTTAGTGAGAGAAAAGATAACGGATTGGAAGAGAATTACCAT
+ATTCATTAGTTGTGTTTTAAAATTTTTAAAGTAAAATAGAGACATTATTTTTTTCATGCTTTCGAATGCATCTATAAAAA
+ATAGACTTGAGGGCTGGGCGCAGTGGCTCACGCTTGTAATCCCAGCACCTTGGGAGGCCGAGGAGGGCGGATAACGAGGT
+CAGGAGTTGGAGACCAGCCTGACTAACATGGTGAAAACCCGTCTCTACTAAAAATACAAAAATTAGGCGAGTGTGGTGGC
+ACGCGCCTGTAATCCCAGGTACTTAGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGGAAGCGGAGGTTGCAGTGAGCC
+GAGATCGCACCATTGCACTCCAGCCTGGGCGACAGAGTGAGACTCCTTCTCAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAACTTATTCATAGCATAGACCAATTGGCCTCTATTGAAATTTCTCCATTATTTTCACAATGTCCCAGGCTG
+TGAAACCAGGATTTAATAAAGAACCAGAATGCCACATCTGTGTCACCTGGGTAGGGACCAGTCCTGATACATTAACTCCG
+GGTCTCTGGGTAACTGGACTCAACTGCTGGGCAAAACAGAATGTCCGGCTTGGGTTCCTAACGGGGTACCGCAAAGCCTC
+ATGGGAATTGTAGTGTCACCTTCCAATGATGTTACCATCAAGGACCTTGGGAACCAGCTTTTCTCTCTGCGCTTGCGCCG
+CCCGGCCCACTCCGCCATTTTCCTCCGGAAGTGCGGCACCCAGAGGCCGTCCTGTAGCCGGGCCGGCTTGGGGCTTGGTT
+CTATGTCCCTGCGGGTCGGTGCAAGGGCGAAGAGGAACCCGTGGGCCTCAGGGGATCCCGGGGGGCCGGACCAGTGTTCC
+CTAGTTGTGGGAGCAGACGCGTGGGCACATTGCGGGCGGGCAGGGCCTGAAGTGCAGGTGCGGGCAGTGGACCCTGGCGG
+GGGCTGGGAGGACAGGCGTGGGGTCCCGGCAGTGAAACGGGTTCTAGAGGCGCAGGAGCAGGTAGGAGAGGCCGGTGGCC
+CTGGGCCCGGAGTCTGCAGGCCGCGCTCCTGTCCTGCCGCTGAGGGACCCGGTTACCAACCTGCATGACGCTCAGTTTGC
+CCATCTGTCCCAGTGGTAACACACAGTTCTCGGGAGACGTTCCCCATTCCCAGAGGAGTAGTGTGAAACGCGTGCGCCTC
+TAGTCTTAAACTTGGCGTTTGTATTAGTTGGGTTTCCTGGTGTCTCTTTAGCAAGTGAAGTTTCTGGTTCCCTCCTTCAC
+TGTGTGACCTGCCTAGTCCTCCTGGGTCGCATTTACAGAAGTTTATACGAGACCTAGTTTCCAGGGAAGAACTCACTGAT
+TCCGCGAGGGAGATGGCTTAATGGATGATGGTCGTCAGCCTTAAGGATACTTCAGTCTTAACTGTGTGTTACAAAGTTTG
+AAAGGGAGGGTTCCCTATGAATAAGAAGCACACTTGAAAGAACAGCCATCTGGTCTAACCTCTCACTGGTGCTTCAGAGG
+AGGAAAAAAGGTCACAGGTGAAGATCCCAGTTTTCCTTGCTCAGGAAATATTAATTCTACTCCCTAGAATGCACAAGATT
+TGCAAAGACTAGGTGATAGTAGAAGGTTTGGACGAACTTTCAGAAGGTTGAGGTGAATTCAGCTAAGAAGAACAGGCAAG
+GACTTAGGAAATATTCCTTATTTGAAGGGGCCTGAAAGTGTGGTCTGGGGTACAGCAGTGACCTGTCATACTTGAGAGGA
+TTAAAATACTCTCCAAACACAGTCCCATTCCTTCAACCTTAGCTCGTTTTTTCCAGCGTCTGAGATACATTAAACCTAGT
+CCATCCCCAAATTTAACATTAGATTGCGAAGTTCTATTGATTGTATTTGATTTGTAATTTAACATTTTCTCCCCCTACGT
+AATTTTGTTAAAAACACAGAAGTGAATTCTGTTCACTTAGGTGTAACAGTTAATACTTGCTGTTTAAGGAACTAATTAAA
+CCTTACTGGCTTATAAAAAACAACCACCATTTTATTTGTTCGAAGTTCTGTGGATCTGCACTTTGGTGTGGTGGGTTCAG
+CTGGGTAGTTAATATATTTGTGTTGCCTGGATCACAAAAAGGCCTCAGTCACCTGGTGCCTTGACTGAGCCTGGTTGGTT
+TAAGATAGTTTACTTCACAATCTGGTGGTTTGTGGTGACTCTTGGCTAGGCCCTGTGTCTCCAACAGGGTAGCTCCAGAC
+TTCTTCACAATTTCCCCCAAAAAGGAAAGAACCAATGGATATTTGCATCACATTTTCCATTTTCCATTCACTGGACAAGT
+CAGATGGAAAAGCCCAATTTATTGTCAGAGCATAATATGAGGGCTTGGATAGAAGGAAAGGTGTTATTGGGAAACATGAG
+TAGAATGGTGTACTGCAGGAAATACATATTATGTACATTTTAAAAAACGTAATTGTAGGCCAAAATTGCTGGTTTGCAAG
+ATGCACTTTCCATGATGTTCAGGTATAGAAAAGCAAGATGTACTGTCATAGGAACACTCATATGAAGTTATTTGTGGAAT
+CTACATATTAATAGGAAAATAGTTAATACAGCCCAGTATATTTCTATAACATTTATTTTAGTGAACTTATAATGTTTCTT
+TGTATTAAATTATTAGATTATATCTTTAGATAATATTGTTACTAAATTAGTAGGTAATATATATTTTTATTCAAAAATAA
+ATTGTGCATCTAATGTCTACCAATTAATGTACTTGTAGATGTATCTTATCTTAACTTGAGTCTTTGCTGCCCCTAATGAG
+GTGTGAAGGACTCTTCTCCCCTGGGGAAGTTTTTCTTTTTCAGGAGGGAGGAGGGCTTTCCCAGGTAATGTGTCTAGAGT
+GTTGGGCAGAAGAATCTGGGACCACACCACACCAGTTCTCTCCTTAATCCACGTCATTTGCCTTCTATCCCAGCTATGTT
+TCCAGTGTCCTCTGGGTGTTTCCAAGAGCAACAAGAAATGAATAAATCTCTGGTGAGTTGTTTATTTGTTCTTCACTTTG
+TTTTACACTGTATTTTCTGAGTTTATGGGTGTCTGTGAATTAAAAAGGAAAAGTAGAAATAAGTAAAACTCAGGTTGAAG
+GAAATATACATAAATAAGATAAAGCTGACCTGTAGATATAGGCAGGTTATAAGAGCTTAGAGTTGTCTAAGTTGAGTGCA
+AATTTTCCTCTGATCTTTCTGATGCCGAGACAAAAAAGGCAGTCATGTTTGTTATGTGATTGGAATGGAACCCGAGAAGA
+GAGCATGCTGTGTTCTTGTGGGACAGGAAAGCTTGCGTGCACCAAGTCTGAACCACCACCTTCATTGGTGACATAGATTA
+TGTGCTGGAACATATTTCACACCGGCCTGGCAGTAAACACTTGTAGTGTTGTGCAGTGGAAACGGTCATCTTCCGCTAAA
+GCACGGCGTGTTGTGCAGCGGAAATGGTCATCTGCTGCTAAAGCACAGCTTCCATCGTAATGTATGCTCCTTGCTCAAAG
+AGTGTGGTCCCAAACAGCCTTTGGGAGGTCCTCCTTGATTCATGGATGAAACCTGGAACATCTTGAGGACTGAGTTAACC
+ATAGGTCCTTAAATAACTCTCCACACGTTTTTCTTAGTTTATCTCTACATGCAGGGTGTGCAGCAGCCTGTTCAAAGTCA
+TATTTTCTGGGAAATATTTCCAGTGTTTATTTGCACTTTAGCCCACTCTGTGTAGCCTTATTTCTTCTAAACTCACCATT
+AACCTGAATAATAGTCAAATTTAGGGGGACTGTATTTGCCTTACTTGAGTCTTCTACCATAGTTGAAACTGTCGTACCCG
+AGTGAGTTAGAGAGAAATGCCACGCATTGAGACGAATTCAGGAGTCCTTTACTAGCCAGCGACTGAGACATGGCTAACGC
+ACGAAATTCTCTCGGCCCTGAAGAAGGGACTAGATTTTCTTTTATACTTTGGTTTAGAGAGGGGAGGGGGGATTCTAGCT
+GCAACAACTTTACAGAAGAAAAAAACAGACAAAAAACTTAGAAAAACAGATGGTTACAGGAAAACAAACTGTTCCTGGTG
+TAGGGTCTTTAAATTCACCACAAAGTGATAGGTGAGGGGGCTCTGGGCATTATCTGCCGGACAAATGTGGGGGCTTTATG
+ATACTATCTCTGAGTAAATTGCTGGGAACTGGGGACATCTCTTGTCTCAGCACTTTATCAGTTATTTGCACGCTTTGATA
+TGTTGAAAATCAGCTTGCACAAGTTAAAGTCCTTGAGGAAAGGGGGTGGGTAAGGAGTCCTTGATGTCTTGTTAATGAAG
+GAGCCAAATGGAGTTTGTCTGGTTTTCTCAGCTAAGGGAGAGTCTATTCATATTAAAAACAAGGTTAGCTGTCTAAGGAA
+GAGTCTATTCATGTTAATACAACGTTGGGTATTACAAAACGTCTGTTCATGATCTGGAAATTCTTCTGTGTTAGTTCTGT
+TAAAAGAAAAACTTTAAAGGAGTTTAATTTAGCAATAAACGATTCATGAATCGGACAGTCCCCAGAATCACAGCAGATTC
+ACAGAGACTCCAGTGCAGTCATGTGGTGGAAGAAGATTTATAGACAAAAGGGAAGTGGCATACCGAAATCGGAAGTGAGG
+TACAGAAACAACTCAGCGTTTGCCTTGTTTGAACACATTTTGAACATTTGGCAGTGCCTGAGTGGTTGAAGTTTGGCCAT
+TGGGATTGGCCAAGATGTAGCTGTTGTTCCAGGTGCATACTCTTAAGTTAGTTTTTCATTCTTGTATACCTATTAAGGTA
+GGTTGCAGTTCACCCACAAGGACTCATATATAGAATTATGGAGTCCTTCTCAGGCCATACTTAGTTCACTTTAACAATGC
+CTTCCCTTTGGTTATTTTCTCAATTTTGAGAGATTGGCCGAAACTTCAGTCACTGGTGTCACTATTACCATTGCAAATGT
+ACTTACTTGGTTTAGAAACCCACTGGGAAATAGACCAGTGAGATTTGAAAAGGTGGAACAAGGACTTGAGTAGAAGGTAT
+CTTCTTATGCTGGAACATCCTGTTTACAGGAGAAAAACAAAACCTGGTTTGTTCTAGGATTTATGTGTTTCCCTAAAGTC
+TTAGTTTGATTATGTTACATTTAGCATGAGTGACTCCATTTTGGTTTGGTTTGGTTTGGTCTGTTGGGACCTATTGCATG
+AGCTTAGTTCAAAACAATGGCCTCCCATAATTTTGCTTTAAAAATTCCTCCTTTTTGCTGGGTGTGGTGGCTCACACCTG
+TAATCCCAGCACTTTGGGAGCCTGAGGTGGGCAGATCACGAGGTCAGGAGATTGAGACCATCCTTGCTAATATGGTGAAA
+CCCCATCTCTACTAAAAATACAAAAAATTAGCCAAGTGTGGTGGCGGGTGCCTGTATTCCCAGCTACTCAGGAGGCTGAG
+GCAGGAGAATGGCCTGAACCCGGGAGGCAGAGCTTGCAGTGAGCCAAGATCATGCCACTGCACTCCACTCTGGGGGACAG
+ACCAAAACTCTGTCTTAGAAAAAAAAAATCCTCCTTTTCAGTCAAGTTCTCACTTAGTTGAGAGTGTGACCAAAATGTAG
+GGCCTTAGCATCACTCTTAGTTACCATTGTTTTGGGTTCCAGTTTTAGCATGTCATTCCCATTGTTTTGGGTTTCTGGTT
+TAGCACATCACTCCCATTGTTTTGGGTTCCGGTTTTAGCACGTCATTCCCATTGTTTTGGGTTTCTGGTTTAGCACGTCA
+CTCTCATTGTTTTGGGTTCCTGTTTTAGCACGTCACTCCCATTGTTTTGGGTTCTGGTTTAGCACGTCACTCACATTGTT
+TTGGGTTCTGGTTTTAGCACCTCACTCCCATTGTTTTGGGTTTCCAGGTTTTAGCACGTCACTCCCATTGTTTTGGGTTT
+CTGGTTTAGCACGTCACTCCCATTGTTTTGGGTGTCTGGTTTTATCACGTCACTCCCATCGTTTTGGGTTTCTGGTGTAG
+CAGGACACTCCCATTGTTTTGGGTTTCTGGTTTTAGCACTTCACTCCCATTGTTTTGGGTTTCTGGTTTAGCAGGACCCT
+CCCATTGTTTTGGGTTTCTGGTTTTAGCATGTCACTCCCATTGTTTTGGGTTCCGGTTTTAGCACGTCACTCCCATTGTT
+TTGGGTTCCGGTTTAGCACGTCATTCCCATTGTTTTGGGTTCAGGTTTTAGCACATCACTCCCATTGTTTTGGGTATCTG
+GTTTTAGCATGTCACTCCCATTGTTTTGTGTATCTGGTTTTAGCACCTCACTCCCATTGTTTTGGGTTTCCAGGTTTTAG
+CACGTCACTCCCATTGTTTTGGGTTTCTGGTTTATCATGTCACTCTCATTGTTTTGGGTTTCTGGTTTAGCAGGACGCTC
+CCATTGTTTTGGGTTTCTGGTTTTACACGTCACTCCCATTGTTTTGGGTTTCTGGTTTAGCAGGACACTCCCATTGTTTT
+GGGTTTCTGCTTTAGCGGGTCACTACCATTGTTTTGGGTTCCGGTTTAAGCACATCACTCCCATTGTTTTGGGTTCCGGT
+TTTAGCACATCACTCCCATTGTTTTGGATTTCTGGTTTAGCAGGTCACTCCCATTGTTTTTGGTTCTGGTTTAAGCACAT
+CACTCCCGTTGTTTAGGGTTCTGGTTTTAGCACCTCACTCCCATTGTTTGGGTTTCTGGTTTTGCAGGTCACTCCCATTG
+TTTTGGGTTCCAGTTTAAGCACATCACTCCCATTGTTTTCGTTTCCGGTTTTAGCACGTCACTCCCATTGTTTTGGGTTC
+CAGTTTTAGCAAGTCACTCCCATTGTTTTGGGTTTCTGGTTTAGCATGTCACTCCCATTGTTTTGGGTTTCCAGTTTAGC
+ATGTCACTCATAGGTTATGGTGGCCTTATGGTTGCACATTTTTTTTAATCTCTTGTCATTCCAGTTGAAGAGATACCATT
+TGACATTTTAGAGATGGCTGCATGCAAACTCTTGAAACATTTGAGTAAGTACAGTACACCAGGGAGACTCTTATGACTAT
+TGGGATAACACCAAGATGTGGTATATGCTCCTTACTCAGGGTCCCCATAAATCAAACCACCAAAAATCAAATAGATTAAA
+GAATGAATTAGATAAAGGGTTTACTTGCTTAACTAAGTGGTTTTTTTTTGTTAATTCCCTACAACCAAATCTTTATAATA
+CCCCATGTTTTCTCCACATGCTGTAAGTGTTAGCAGCTGCACAGATACTTAAGATAAGAGTCTCATGATAGTAGAGAAGT
+CTTGATCTGTGATCTTGGGAAAAGCTGTTCACATTAAGGATGCCATCTTCTTCTGGGGGGAACTGTCCGTGTTAGCTTTA
+CCTTAAGGGTTCCAATAGGCATATGGTTCTGAGTGTGGAGGGACCCTTCTGAGTTGTGAGACTATGAACCCAAAGTTTAA
+GGTTTTAAAGTTTTGTTGTCATGTGGATGGCGAGGGCAGTCCTTCTCTGATGTTCTCAGAAGATCCAGTCATCAGATTCT
+AGATTTTGAAGGGTTTGACTGTCCTCAGTGAACCATAAAAGGCTTTCTTTACCTGGTGAAAATACACTTCAGGGTAATAA
+TCTACTGTTTTAACATCAACTCTCTCGCATGGAAGAGCTTTTATACAATCAGAAAACATGCACTGAAAATGACAACTGAA
+TGAAATCCCTTTATAAAATGTTTAAATGGCCCATCAGATAACCAAATGTACCTAAAGTTTTGATTGTTTTCCTAGGAATA
+TAGGTTTCACAAACCAAACATTGGTTATAAACTATTTTAGCAGTTTAGAAATCACCACACCAATATATTTAATTTGGATC
+GTTTTCTCTTTCTGTGATGAGTTATGGAATGCAGAACTTTTAATAACAAAAGTTTTTAGGACTTAAGAAGGATAAGGTGG
+CCATCCTGGTTCTTTATAGGTCTGTGCTTAATTAACATCAGACTTACATCCTCTTGAATACCAGCTGTTTCTCCAAATTA
+GGTGCATGGCACTGGTAACTGATGAGTAGTTATAGGTAATTTGATTTAGACCATGGAGTTTATTTAAATTATATATCTAA
+ACAATTTCAATATTGGTGATTTAGCATGAAAATGTGGCATAATATTTCCTTGGTATACAATTTTTGTTTTACTTGGGTTA
+GCAGTTTTACAAACCAGTTGGTCTTTTTATTAAATTTTTGATATTTTTTTTTTTGAGACAGAGTCTCACTCTGTTACCTA
+GGTTGGAGTGCAGTGGCACAATCTTGGCTCACTGCAACCTCTGCCTCCTGGGCTCAAGCAATTCTCTTGCCACAGCCTCC
+CGAGTAGCTGGGATTACAGGCACATACCACCACACCCAGCTAATTTTTGTATTTTTAGTAGAGGTGGGGTTTCACCTTTG
+GCCAGGCTGGTCTCAAACTCCTGACCTCAAGTGATCCACCGGCCTTGGCCTCCTAAAGTGCTGGGATTGCCGACATGAGC
+CGCTGCACCCAGCCTAACTGTTGAGAATTCTTAGCCAGTCCAATTCTTGGGGTATCGGGGAACTTATGGGGAATTTTTAC
+CCATGATATTAAAGTTATTAGAAATCTGTGTTCACGAGTGTTTTTCAGGGTCATTTTCATTCTTTCATGAATCTTCTAAG
+AGACACCATACTCTAGAATTTTGCATGCTTGTGAAGTTTTTAGAAACTGCATCACTATTAAGCAATTAACTGTGAAATGA
+CTTTAGTTATAGTTAAAGACAATTGACAAGGAAATTTGGTTATTTCTGTGGTCTACAATAACTTAATAACCATAATTAGG
+GTGGATGTGGTGGCTCATGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGTGGAAGGATCATGAGGTCAGGAGATCGTGA
+CCATCCTGGCTAACACGGTGAAATCCATCTTTACTAAAAATACAAAAATTAGCCTGGCATGGTGGTGGGTGCCTGTAGTC
+CCAGCTACTCGGGAGGCTGAGGCAGGAGAATGGCATGAACCCGGGAGGCGGAAGTTGCAGTGAGCCGAGATTGCGCCACT
+GTACTCCAGCCTGGGTGACAGAGCAAGACTCCATCTCAGAAAAAAAAAAAATACAAGAATTTTAGAAATCCTACACAATT
+TTAGAATGGATTGATGACACACACTAAATATAACCTGAGGAAGGTTCCACATTATTTTTTATTTTGACAGTGCTACCCAT
+GTGACTTAACATGTTAAATAGTCCTGTTTACCTCTCTTTTGGGTGCTTCAGGGGTCTCTGTAGTATCCCAAAATTAGAGG
+TCAGAAAAGACAATTTTGAAGTTGAAATTTGATTTTGGGAAGCCTATTAAATATATTAAAGGTTTAAACACTTGATGTTA
+TGAAATAGAATTCCACGTCACCGTAAGTCATTCATTTACCAAAAATCATGACAAAAAATTTTTATTATTTTTTTATTTTT
+ATTTTTTATTATACTTTTAGTTTTAGGGTACATGTGCACATTGTGCAGGTTAGTTACATATGTATACATCTGCCATGCTG
+GTGTGCTGCACCCACTAACTCGTCATCTAGCATTAGGTATATCTCCCAATGCTATCCCTCCCCCCTCCCCCCACACCACA
+ACAGTCCCCAGAGTGTGATATTCCCCTTCCTGTGTCCATGTGATCTCATTGTTCAATTCCCACCTATGAGTGAGAATATG
+CAGTGTTTGGCTTTACGTTCTTGCAATAGTTTACTGAGAATGATGATTTCCAATTTCATCCATGTCCCTACAAAGGACAT
+GAACTCATCATTTTTTATGGCTGCATAGTATTCCATGGTGTATATGTGCCACATTTTCTTAATCCAGTCTATCATTGTTG
+GACATTTGGGTTGGTTCCAAGTCTTTGCTATTGTGAATAATGCCGCAATAAACCTACGTGTGCATGTGTCTTTATAGCAG
+CATGATTTATAGTCCATTGGGTATATACCCAGTAATGGGATGGCTGGGTCAAATGGCATTTCCAGTTCTAGATCCCTGAG
+GAATCGCCACACTGACTTCCACAATGATTGAACTAGTTTACAGTCCCACCAACAGTGTAAAAGTGTTCCTATTTCTCCAC
+GTCCTCTCCAGCACCTGTTGTTTTCTGACTTTTTAATGATTGCCATTCTAACTGGTGTGAGATGGTATCTCATTGTGGTT
+TTCATTTGCATTTCTCTGATGGCCAGTGATGATGAGCATTTTTTCATGTGTTTTTTGGCTGCATAAATGTCTTCTTTTGA
+GAAGTGTCTGTTCATGTCCTTCGCCCACTTTTTGATGGGATTGTTTGTTTTTTTCTTGTAAATTTGTTTGAGTTCATTGT
+AGATTCTGGATATTAGCCCTTTGTCAGATGAGTAGGTTGCGAAAATTTTCTGCCGTTTTGTAGGTTGCCTGTTCACTCTG
+ATGGTGGTTTCTTTTGCTGTGCAGAAGTTCTTTAGTTTAATAAGATCCCATTTGTCAATTTTGGCATTTGTTGCCATTGC
+TTTTGGTGTTTTAGACATGAAGTCCTTGCCCATGCCTATGTCCTGAATGGTAATGCCTAGGTTTTCTTCTAGGGTTTTTA
+TGGTTTTAGGTCTAACGTTTAAGTCTTTAATCCATCTTGAATTGATTTTTGTATAAGGTGTAAGGAAGGGATCCAGTTTC
+AGCTTTTTACATATGGCTAGCCAGTTTTCCCAGCACCATTTATTAAATAGGGAATCCTTTCCCCATTTCTTGTTTTTCTC
+AGGTTTGTCAAAGATCAGATAGTTGTAGATATGTGGCATTATTTCTGAGGGCTCTGTTCTGTTCCATTGATCTATATCTC
+TGTTTTGGTACCAGTACCATGCTGTTTTGGTTACTGTAGCCTTGTAGTATAATTTGAAATCAGGTAGTGTGATGCCTCCA
+GCTTTGTTCTTTTGGCTTAGGATTGACTTGGCGATGTGGGCTCTTTTTTGGTTCCATATGAACTTTAAAGTAGTTTTTTC
+CAATTCTGTGAAGAAAGTCATTGGTAGCTTGATGGGGATGGCATTGAATCTGTAAATTACCTTGGGCAGTATGGCCATTT
+TCAAGATATTGATTCTTCCTACCCATGGGCATGGAATGTTCTTCCATTTGTTTTTATCCTTTTTTATTTCCTTGAGCAGT
+GGTTTGTAGTTCTCCTTGAAGAGGTCCTTCACATCCCTTGTAAGTTGGATTCCTAGGTATTTTATTCTCTTTGAAGCAAT
+TGTGAATGGGAGTTCACTCATGATTTGGCTCTCTGTTTGTCTGTTGTTGGTGTATAAGAATGCTTGTGATTTTTGTACAT
+TGATTTTGTATCCTGAGACTTTGCTGAAGTTGCTTATCAGCTTAAGGAGATTTTGGGCTGAGACAATGAGGTTTTCTAGA
+TATACAATCATGTCGTCTGCAAACAGGGACAATTTGACTTCCTCTTTTCCTAATTGAATACCCTTTATTTCCTTCTCCTG
+CCTAATTGCCCTGGCCAGAACTTCCAACACTATGTTGAATAGGAGTGGTGAGAGAGGCCATCCCTGTCTTGTGCCAGTTT
+TCAAAGGGAATGCTTCCAGTTTTTGTCCATTCAGAATGATATTGGCTGTGGGTTTGTCATAGATAGCTCTTATTATTTTG
+AAATGCGTCCCATCAATACCTAATTTATTGAGAGTTTTTAGCATGAAGCATTGTTGAATTTTGTCAAAGGCCTTTTCTGA
+ATCTATTGAGATAATCATGTGGTTTTTGTTTTTGGCTCTGTTTATATGCTGGATTACATTTATTGATTTGTGTATATTGA
+ACCAGCCTTACATCCCAGGGATGAAGCCCACTTGATCAAGTTGCATAAGCTTTTTGATGTGTTAATGGATTCGGTTTGCC
+AGTATTTTATTGAGGATTTTTGCATCAATGTTCATCAAGGATATTGGTCTAAAATTCTCTTTTTTGGTTGTGTCTCTGCC
+TGGCTTTGGTGTCAGAATGATGCTGGCCTCATTAAATGAGTTAGGGAGGATTCCCTCTTTTTCTGTTGATTGGAATAGTT
+TCAGAAGGAATGGTACCAGTTCCTCCTTGTACCTCTGGTAGAATTCGGCTGTGAATCCATCTGGTCCTGGACTCTTTTTG
+GTTGGTAAGCTATTGATTATTGCCACAATTTTGGCTCCTGTTATTGGTCTATTCAGAGATTCAACTTCCTGGTTTAGTCT
+TCGGAGAGTGTATGTGTCGAGGAATTTATCCATTTCTTCTAGATTTTCTAGTTTATTTGCGTAGAGGTGTTTGTATTATT
+CTCTGATGGTAGTTAGTATTTTGCTGGGATTGGTGGTGATATCCGCATTATCATTTTTTATTGCGTCTATTTGATTCTTC
+TCTCTTTTTTTCTTTATTAGTCTTGCTAGAGGTCTATCAATTTTGCTGATCCTTTCAAAAAACCAGCTCCTGGATTCATT
+AATTTTTTGAAGGGATTTTGTGTCTCTATTTCCTTCAGTTCTGCTCTGATTTTAGTTATTTCTTGCCTTCTGCTAGCTTT
+TGAATGTGTTTGCTCTTGCTTTTCTAGTTCTTTCAATTGTGATGTTAGGGTGTCAATTTTGGATCTTTCCTGTTTTCCCT
+TGTGGGCATTTAGTGCTATAAATTTCCCTCTACACAGTGCTTTGAATGCGTCCCAGAGATTCTGGTATGTTGTGTCTTTG
+TTCTCATTGGTTTCAAAGAATATCTTTATTTCTGCCTTCATTTCCTTATGTACCCAGTAGTCATTCAGGAGCAGGTTGTT
+CAGTTTCCATGTAGTTGAGCAGTTTTGAGTGAGATTCTTAATCTTGAGTTCTAGTTTGATTGCACTGTCATCTGAGAGAT
+AGTTTCTTATAATTTCTGTTCTTTTACATTTGCTGAGGAGAACTTTACTTCCAAGTATGTGGTCAATTTTGGAATAGGTG
+TGGTGTGGTGCTGAAAACATGTATATTCTGTTGATTTGGGGTGGAAAGTTCTGGAGATGTCTGTTAGGTCCGCTTGGTGT
+AGAGCTGAGTTCAATTCCTGGGTATGCTTTTTGACTTCCTGTCTTGTTGATCTGTCTAATGTTGACAGTGGGGTGTTAAA
+GTCTCCCATTATTAATGTGTGGGAGTCTAAGTCTCTTTGTAGGTCACTCAGGACTTGCTTTATGAATCTGCATGCTCCTG
+TATTGGGTGCATATATATTTAGGATAGTTAGCTCTTCTTGTTGAATTGATCCCTTTACCATTATGTAATGGCCTTCTTTG
+TCTCTTTTGATCTTTCTTGGTTTAAAGTCTGTTTTATCAGAGACTAGGATTACAACCCCTGCCTTTTTTTGTTTTCCATT
+TGCTTGGTAGATCTTCCTCCATCCTTTTATTTTGAACCTATGTGTGTCTCTGCACGTGAGATGAGTTTCCTGAATACAGC
+ACACTGATGGTTCTTGACTCTTTATCCAATTTGCCAGTCTGTGTCTTTTAATTGGAGCATTTAGTCCATTTACATTTAAA
+GTTAATATTGTTATGTGTGAATTTGATCCTGTCATTATGATGTTAGCTGGTTATTTTGCTCGTTAGTTAATGCAGTTTCT
+TCCTAGTCTCGACGGTGTTTACATTTTGGCATGATTTTGCAGCGGCTGGTACCAGTTGATCCTTTCCATGTTTAGTGCTT
+CCTTCAGGAGCTCTTGTAAGGCAGGCCTGGTCGTGACAAAATCTCTCAGCATTTGCTTGTCTGTAAAGTATTTTATTTCT
+CCTTTGCTTATGAAGCTTAGCTTGGCTGGATATGAAATTCTGGGTTGAAAATTCTTTTCTTTAAGAATGTTGAATATTGG
+CCCCCACTCTCTTCTGGCTTGTAGGGTTTCTGCTGAGAGATCCGCTGTTAGTCTGATGGGCTTCCCTTTGAGGGTAGCCC
+GACCTTTCTCTCTGGCTGCCCTTAACATTTTTTCCTTCATTTCAACTTTGGTGAATCTGACAATTATGTGTCTTGGAGTT
+GCTCTTCTCGAGGAGTATCTTTGTGGCGTTCTCTGTATTTTCTGAATCTGAATGTTGGCCTGCCTTGCTAGATTGGGGAA
+GTTCTCCTGGATAATGTCCTGCAGTGTTTTCCAACTTGGTTCCATTCTCCCTATCACTTTCAGGTACACCAATCAGATGT
+AGATTTGGTCTTTTCACATAGTCCCATATTTCTTGGAGGCTTTGCTCATTTCTTTTTATTCTTTTTTCTCTAAACTTCCC
+TTCTCATTTCATTTCATTCATTTCATCTTCCATCGCTGATACCCTTTCTTCCAGTTGATCGCATCGGCTCCTGAGGCTTC
+TGCATTCTTCACGTAGTTCTCGAGCCTTCGTTTTCAGCTCCATCAGCTCCTTTAAGAAATTCTCTCTATTGGTTATTCTA
+GTTATACATTCTTCTAAATTTTTTTCAAAGTTTTCAACTTTGCCTTTGGTTTGAATGTCCTGCCATAGCTCAGAGTAATT
+TGATCATCTGAAGCCTTCTCTCAGCTCGTCAAAATCATTCTCCATCCAGCTTTGTTCCGTTGCTGGTGAGGAACTGCATT
+CCTTTGGAGGAGGAGAGGCGCTCTGCTTTTTAGAGTTTCCAGTTTTTCTGTTCTGTTTTTTCCCCATCTTTGTGGTTTTA
+TCTACTTTTGGTCTTTGATGATGGTGATGTACACATGGGTTCTTGGTGTGGATGTCCTTTCTGTTTTTTAGTTTTCCTTC
+TAACAGACAGTACCCTCAGTTTCAGGTCTGTTGGAGTTTTCTAGAGGCCCATTCCCGACCCTGTTTGCCTGGGTATCAGC
+AGTGGTGTCTGCAAAACCGTGGATTTTCATGATCCGTGAATGCTGCTGTCTGATCGTTCCTCTGGAAGTTTTGTCTCAGA
+GGAGTACCTGGTCGTGTGAGGTGTCAGTCTGCCCCTGCTAGGGGGCGCCTCCCAGTTAGGCTGCTCGGGGGTCAGGGGTC
+AGGGACCCACTTGAGGAGGCAGTCTGCCCATTCTCAGATCTCCAGCTGTGTGCTGGGAGAACCACTGCTCTCCTCAAAGC
+TGTCAGACAGGGACATTTAAGTCTGCAGAGGTTACTGCTGTCTTTTTGTTTGTCTGTGTCCTGCCCCCAGAGGTGGAGCC
+TGCAGAGGCAGGCAGGCCTCCTTGAGCTGTGGTGGGCTCCACCCAGTTCGAGCTTCCAGGCTGCTTTGTTTACCTAAGAG
+AGCCTGGGCAATGGCCGGTGCCCCTCCCCCAGCCTCGCTGCTGCCTTGCAGTTTGATCTCAGACTGCTGTGTTAGCAATC
+ATCGAGACTCCATGGGCATAGGACCTTCTGAGCCAGGTGCGGGATATAATCTCGTGGTGTGCCATTTCCTAAGCCCATCA
+GAAAAGCTCAGTATTAGGGTGGCAGTGGCCCGATTTTCCAGGTGCCATCTGTCACCCCTTTCCTTGACCAGGAAAGGGAA
+CTAACTTCCTGACCCCTTGCACTTCCCGAGTGAGGCAATGCCTTGCCCTGCTTTGGCTAGTGCACAGTGCACTTCACCCA
+CTGTCCTGCACCCACTGTCTGGCACTCCCTAGTGAGATGAACCCAGTACCTCAAATGGAAATGCAGAAATCACCCATCTT
+CTGCATCGCTCATGCTGGGAGCTGTAGACCGGAGCTGTTCCTATTCGGCCATCTTGGCTCCTCCTCCCATTATTTTTTAA
+TATTTTCTGAAAATCTTCTTTAAAGAGAGAAAGCCAAATGTCACCCACTTTTTCATAAAACCTTACAGGCAAATCTATTA
+TTCTTTTCTTTTTTGAGATGGATTTTCCCTCTTGTTGCCCAAGCTGGAGTGCAATGGTGCGATCTCGGTTTACTGCAACC
+CCCTGCCTCCCAGGTTCAAGTGATTCTCCGGCCCCAGCCTCCTGAGTAGCTGGGATTAGAGGCATGCCCCACCATGCCCA
+GCTAATTTTGTGTTTTTAGTAGAGACGGGGTTTTTCCTTGTTGGTCAGGCTGGCCTTGAACTCCTGACCTCATGTGATCC
+ACCTGCCTCGGCCTCCCCAAGTGTTGAGATTACAGCTGTGAGCCACTGCCCCGGGCCATTTTTTTTTAAAGATAGCGTCT
+TGCTCTGTCACCCTCCTCACCACATTATAGCTCTGGGGGCCAAGCTGCATCACAATGGAAATCATGGAGCCACAGGAAGA
+ATCCACTTAGCTTTGCAAGATGCTGCCCAAGGGGTTGCTTGGAGTAACCAAATTAACATTTTTCATTCTGCTCAGAGCAA
+AATATATGTGACAAAACATAGCCACGAGCCACTTTGCTTAGCACCCAGTGTCAAACTGGTAAGACTCAAACTTCCTCCCA
+GATAGGCCATGCCATCTCTAAATCTTTTTAGAAGCTTCTGCATGTTAATAGGCATCCCTAGATGAGACTAATTTGGGAGC
+CATCATTTTTAAATGCACTTCAGGGCATTATTCATTTGGAATGTTACACTATAAGTTATCTTTAGTAAGATTTTGCCATT
+TCTGTAAGACTTTGCTGCTTCCCAGGCCTAATGAATTAGCCAGAAAGAACTTAGTTTTCCAGAAATTAAGGATCCTATTT
+TTACCTAATATATTGGCTTTACTCTCAGGTTCCCTTGATTGACTTAGCCAATGATTTTTTTTCCTACCTAAGCGTGTGAG
+GAAAATGAAACAAAGGGGCAGAACACAAAAATCCCCGTGAATTTCCAAAAGCCAAATTTTACAACCCTCCAATATTATCA
+TTTACTACCACTTTCCTTCTGACCCATTCAGATGTAGGAGGCCTCTAACTGGAACTGGATTCAAGCCAGTTAATTACTGG
+ATCAAATCTGATCCTGGACCCGGTCCCGTTTCTGTCATAACTTCTAAAACATCCAGCCAGTCATGGCTGGATAGCAGTTT
+GGAACAGAAATTTGCTCAGAGAAACTCAGCTCAAAACACAAATTCATGGAGCTCTGAAATCCGAGAGAGAATTTACCACG
+ATCCCCAGATGCTCTGAGAGGTCAAAGGGCACAAGTGTTACAGAATCCTGAGGCGTCACTTTTCTGCCTGAAACCTCTGG
+CTGGTGGCGCCTTTACCTGTGTTTTGCTCGGGCCCACTGGGTTCGTTCTGTCCACTTGGCTCATGCTAGTGGTCTGGATC
+CCACACCTGCCAAGGGTGAGCTGGGTACAGAGCAGTGAAGGGTGTGTGAGCAAGCGAGCATGGGATCTGGCCACTGCACA
+CAGCCAAGCATGCCAGCTGCAGTGGGGTGGGCAGCTCCAGGCACCGCCACAGGTGCCAGCTCCCTGTGAGGCTGCAGCTG
+GACCAGGCTGACTGCAAACAGCTTCCACTGTGGGTATCAGGGAATGCAGCGGTGCCTGGAAGCTTCGAGATGCAGGAACT
+GCAGAGCCCCAAATAAGGTGTCACAGCCCTGGCTTGGGGAGCTCCTAGGTCTGGGCTCCCTGAAGGGCCACAGCTCTTCT
+CTCCTTCTCTCTTCTCTTCTTCTTGCCTGCAATTTGGCAAGCAAGGGGTGCGTTTCAGCCCTGTTTATGTTACACCTCTT
+TCAGCCCTGCTAGTTGGCAGGTCCCGAGTTCTTGTCCTGAGTCCAGGAAGAATGAGGTATGTGGGCAAGTAGAAGGTGAG
+CAAGGTGAAGAGGTGCTTTATTGAGCAACAGTACAGCTCAGAGGAGACGTGCAGTGGGTAGCTCCTTTCGGCAGGCAGGT
+CATCCCAATGTCTGTTCAGCTCTCAGCAGCTGAGAGAGATGCATGGTGGTTAGCTATGCCCACAGTGCCCAGGCTTTTCG
+AGCTGAGGAGTGCCTTCAAGCCAGTGCTGAGCCACTCTTAGCCCCACCTCAACGTCCCTCCTGTGCTCATCAGTTCCCAA
+AGTGTGGAGGGGGCCGAGGTGGCAGGGGGCTGGCATGTCAGCACTGCCCTGAGCTTGCACAAACTGGGCTGGGTTGCGAC
+TGTGCCTGGGTTCAACCTCAATTTGGATCCGAAGTTGGAGTGGGCTCTGGGAGCGGAGACATGCCAGGTGGTGGGAGCAG
+GTATGACTGGGCCTACGGGGGCAGGGGGGCTTGCTGGGCCTCTGAGAGTGCAAAGATGCCCGGGTTTGCTGTCATGGGTG
+GATGGCTGCAGCTGTGCCTGGGAGGGTGGGGCTCCTGCCTGCCAATTTAGAAGGGGTGGGTCTCTCACCTGTTCCTGGCT
+CCCACTAACTTTGAGGAGTTCACAGCCCCAGCCACTCCTCCCCACTGCAGCCAGTGTCTCCGTAGCAACTGCTCCACATG
+GGCCACTGCTGCCATCATAGAGCGGTCCTTGCAGGTGCCTTTCTTGTACCTCAGCACTCCTGGGGGTCATTAGAAGCCCT
+AGCAACACTGCTCACCACACTATAGCTCCAGAGGCCCTAGCAGTCCTGCTCCCACAGATCCCACTTCTGACACCATCTAT
+TAAAAGAAAATCTTCAGCTGAATTAAATTTAAAGGAACTTAATTGAGCAATGAATGATTCACGAATCAGGCAGCCCCCAG
+AATCACAGCAGATTCAGTGAGACTCCAGCACAGCTACATGGTGGAAGATTTATAGACAATAAAGGGAACGTGATGTACAG
+AAATCTGAAGTGAGGAGTGAGGTCCAGAAGCAACTGGGTCCGTTACAGTTCTCAGCAGTGAGGTCCAGAAACAACTGGAC
+TGGTTACAGTGCTCAGCATTTGCCTTATTTGAACACAGCTGAACACTCAGTAGTGTGTGAGTGGCAGAAGTTTGGCTGTT
+GGGATTGGCCAGGACTCAGCTATAGTTACAGGTGCATACTCCAAAGTTAGGTTATCAGTCTTTCTACCTATTAAGTTAGG
+TTGCAGTTTGTCCACAGGGACTCAAATCTAGAAGTACAGAGTCCTTCCCAGGCCATATTTAGTTCACTGTAACAGTTCCT
+ATTATGACCTCACTGACAGTTCTTTTTCTCTGAATTTTCCTTTCTTCTCAACAGCTTGTCCAAATGTTCCATTGGTCCCT
+GTTCATCCCGCCCTGCAGCTCTCCTTGACTGATTCTGCCCTTTGTGGTTTGCAGTCCTGTTTCTCTACAGCTTGGACCCC
+TTCAATCTTTCCATCATAGGTTTAACTCTCTGTTGAATTCTTATTTGTAGCTACGCAAATGTTACCTTAAGCTAAAAAAA
+TTCAAAGTGAAAGCCACATCCTCCTCTCTTCCCTTATGTGTATGGTATTACTACCATGCAGCCAGTGACCCAAAATGGGA
+TTTTTTCTGGGCTTTTCTTGCTTAGATTCAGGCTCATCTGGTGTCAAGCCTTGTTACTTTTGTTTCCTTGTTCTTTTATT
+TTTAATTTTTTTTCTTTTGAGACAGAGTTTCACTCTTGTTGCCCAGGCTAGAGTGCAGTGGTGTGATCCCGGCTCACTGC
+AGCCTCCACCTCCCGGGTTCAAGCAATTCTCCTGCCTCAGCTCCTGAGTAGCTGGTATTACAGGCATTTGCCACCACGCC
+TGGCTAATTTTGTATTTTTAGTAGAGATGGGGTTTCTCTGTTTTGATCAGGGTGGTCTCGAGCTCCCGACCTCAGGTGAT
+CTGCCCACCTCGGCCTCCCAAAGTGCTGGGATTACAGGCCTGAGCCACCGTGCCTGGCCTGCTTGTTGTTTTCATCTCAT
+CCTGATTTCTGAATACAGGAGAGGAGCTGAGTTGGTGTTCACTAACAAGCACAGAAGCTTTGTTACATTTACAGTGTCAT
+TCTTGGCAAAACCTGAATGGTATGTTTGTGGGGTGATGAGGTTCAGTCCCCTGTGACCTGTGCATCTGGCCAACACTGTG
+GTGACATCCTTAGGAATCCATGGGGAGAGACAAAGCATTCAGGAGTTAGTGGGTCACGTTTGACAAGGGCCAATAAAGAA
+ATATGCAAAGACAAAAAACAAGAAGAACATTATCATATTTTATACCTTTTGTTTATATAAATTTATGTCAATGATTCTAG
+CTTATGTTAATATACAATGTATACAATATGCTAACATATACAATATATGTTTATAGTTTAAACATTTCTGTCATGTTTTC
+AGATTCTTTAAAGATTACATTACACTTCCTATTTCAGATAGCTGTTTAAAATGAGTAAGGAAAAACGGATGTGTGCATCA
+GTTCTAACTGTTTATGGACTAAAACTAGTTGATTTCTTGGTTAAGAACAAAAAGTGACAACCTAATTAACTGAAAATTTT
+AAGTAGGCAATTATGGTTTTAGCTTTAATGTAAAATATTAACTATGCTCCATTCTTGCATTTTTAACCTAATACTCAATA
+TAAATCGCCACATGCCATGTTTCAGATCAAGGTTCTACTTGTGATCTCTCATGAGTTTTTCAAGGTTTTAATTATCTGAG
+ATGTAACAATGTACCCATAACCTTACTGGCTTAAACCAGGAATTTATTCTTTTTACATGTCACAATTTTCTGGGTCAAGA
+CACTGGACAGAGCGGTGTGGGTTGGTTGCTTCATGATGTCCCTGGTCTCATCTGGAAGGACTCTAGTGGCTGGAGATGTG
+AAGCAGGCACCCAGAAGGACTCTAGTGGCTGGGGACATGGAGCAGGCACCCAGCCCTCTCTTTGTGGCCAGCACGGACTT
+CCTTCCAGTCTGGTAGCATCAGGTAGTCAGGTTTGTCTGGCTTCTCCCAGGGTGTGTGTCCAAGAGGCCCAGGCAGAAGC
+TGTAAGGTCTCTCATGATCATCCCTCAGAAGTCCCAGAGCATCTCTCCTGCCACACTGTCCAGTTGTACTCATCACTGAG
+ACCAGCCATGATTCAAGGGGGAAAGGTGATTAGATTCCACCTCTTGATGAGAAGCATAGTAGGAACCTGCAGCAGTCTTT
+AATAAACCACAGCTTGTCCTCTGGCCACAAACTATTAACGTTTCTCCCACATGCAAATTATGCTTTGCCCCTCTCAAGAG
+CCCCAGAATGGTTTTCCTTATGGCACTGGCTAGTAGCCCAACTGAATCCTGAATCAGGTTGTGGTGGCTTGTCATCTGCA
+CCCACACACACTCAGCTGCAGTGAGGACTGAATCAGGTTGTGGTGGCCTGTCATCTGACCCCCCACACACAGCCACAGTG
+GGGACTGAATCAGGTTGTTGTGAGCTATCATCTGAGCCCACAAACTTAGCCGCAGTGAGGGGACTGCTGTGAAAACAGTC
+GACATTTCCCTTTAGAAGCTTTGGTGGGAGGCAAGAGGGAAGTGCTGCCCTGCAGGCCCCGTCTAACAGTTGGTCATTCC
+CATGGGGCGCCTGTTACAGTTCTGTGATTAGTGCCCAGTCCTGGTCCCTGAAAACGGCGCCCAGTCCTGGTCCCTGAGAA
+TGGTGTTTGTGTCCTTTTACTCCTCCCTCTGGGCTTTTGTCATTCTCCATGTTCTTTTTCCTTCAGTGCCTGGGTTGCCG
+TTGACCAGCTTTCCCTGCCTTTTTCTTATGGTCAATAGGGTATTCAATGGCTTCTTTTTCATTTTTTTTCCTTTTCTTTT
+CTTTTCTTTTTTTTTACTTTGGCCTTTTGAGACAAGAAATTATTTCTTTATATTTTCTCTAAATTCTGTTTGAAAACTGA
+ACCTTCTTCTTTAGATCATGTCCCTCTCCTGTCATATTTATTCAGTGACAGTTAGGGGAGGCTGGTAGCACTTTCCATGT
+TCTTCCCAGATGTCTCCTTAGGCAGATCCCTGAGATGGTGCAGTGCCCTTTCAGTTTCCATGTTGTGGCCATAGTTTTCC
+CACAGTCCCTCAGCAAGTAACTCTCAGACCTTTTCTCCAGTTTCCAATGACATTTTCTCACCGTCCTTCAGGCCCTGACC
+AAGAGTCTTGATGCCCTTCCAGGTTGCATGAATGGTCTCCTTGAGGCCCAGTTACAGGTCAGCCTCACAGTCGTGTCACA
+TATTGTAGCTTCTGATTACCACAGCAGCTCATTTCCAGCTGCCATATTCTGTTCCAGTTATCTATTCTGAAGTAAAACAA
+CTCATTATTACTTGTTTTTTGGCTTAGAGAGTCTTGGTGGCCAGCTCATCTCACACACAGTTGCAGCCAAGCTGGATTGT
+GTGAAAGCACAGTGGGGTGGTGTGCAGGGTGGCTCACTAGTGGTTGGGAGTGGATGTTGCTGGAGGCTCACTAGTTGTTG
+GGAGTCGGTGTTGCTGGAGGCTCAGTGGGGGATGTCAATGCGTGTAGCTAGTCATGGACTGGCCTTGTGGTTTCCATCAT
+GAGGTCTCAGGGGAGTGGGATTTCCTGCCTGGTGACTGGCTTTCTCCTGGGTAAGTGTTCTGTTTTCTCAGCCTGGCTTC
+TGAAGTCCCCAAATACCAGCTTTGTCACCTTCTGTTGGCCAAATAAGTCAGTAGTCTGGTCAAGGTTTAAGGGGAATTGG
+TTCTCACAGAGAGAGGAGCAGGAAAGAATTTGTCACCTTTAGTCTACCAGAAATGAGATTTTTATAACAAGTTTATTCCA
+AATACATTCCAGTTCCCCTTGTGAATACTTTTTTGACTCACAGGGTATTTCAAAGTTTATTACTTGGTTTTCAGACATTT
+GAGGCTTTTCTGGATATCAATTTGTTGTTGGTTTCTAATTTAATTTCAAGTGTTCAGACAACATCCTTTGTATACTATTT
+CAGGCTTGAACCTTTTCTCAATCCATCGACATACAGTCTATCTTGGCACTGCCAAGTACCATTTGGGTCAGGATTTTGTC
+ATTTAGATCCGTATTTTTCCTATATTTTTATCTGGTTGTTCCGTCAGTTACTGAGAGAGCAGTATTAATTCACCAGCTAT
+AATTTTGGATTGTCAATTTCCTGCTTTTGTTCTGTTGTTTTTGATTCACATACTTTGAGGTTCTGTGTGTGTGTGTGTAG
+TTTGTGTGCACTTTGAGGCACAATTTATAATTGTAACATCATCCTCTCTGATTCTTTTATTTTTATTAAATTACCCTGTT
+TATTTCTGGTGATATATTTTGTTCTGAAGCCTCTTTCATCTAGTGTTAACATCTCTGTTGAAGCTTTTTATGATTAGTGT
+CTGGATAGCATATTTTTATGTTTAGCATCTGCATAGCATATTTTTTCTCATACTTTGTGTCTTTGTGTTTAAATTGTGTC
+TCTGTGGATGCCATATTGTTGGGTCTTGCCTTCCTCTCAGGTCTGGCAGTCTCTGTCTTAAGTAGAGTATTTGTCCACTT
+ACATTGTAACTAATCATTGCTAAGGTTGGATTTAGGTCTGCCATTTTTCTACTTATTTTCTATTTGTTTGTTTATTTTTT
+TTAAGACAGGGTCTTGTTCTGTCACCCAGTCTGTAGTGCAATGGTGCAATCTTGGCTCACTGCAACCTCTGCCTCCCAGG
+CCCAACCAATCCTCACTTGAGCCCCCTGAGTAGCTGGGACTACAAGTGCATGGCACCACACCTGGCTAATTTTTATATTT
+TTGTAGAGATAGGGTTTTGCCATGTTGCACAGGCTGGTCTTGAACTACTGAGCTCAAGCAATCTACCCACCTTGGCCTCC
+CAAAGTGTTCAGATTACAGGCATGAGCCACCATGCCTGGCCTTCGTCTGTCTTTTGATCTTCTATATATTCTTTCCTAAC
+TTCTTTTGGGTTAAATATTTCTAAATATTCCAGTTTGATTAATCTTTTGGCTTTTGGAAATAATTTTTTATAGGCTGGGC
+ATGTTGGCTTATGCTCATAATCTCAGCTCTGTGTGAGTCCAAGGGAGGTGGATTGCTTGAACCCAGGAGTTTGAGACCAG
+CCTGGGCAACATGGCAAAACCCTCTCTACAAAAAACCAAACCAAAATTTAGCCTGACATCTTGGTGTGCACCTGTAGTCC
+TAACTATTTGGGAGGCTGAGGTGGGAGGGTTGCTTGAGCCTGGGAGGTTGAGGCTGCAATGAGCTGTGATCATGCCATTC
+CACTCCTGCCAGGGCAACAGAGTAAGACCGTGTGTCAAAAAAGATCATTTTTTATAAATAATTTATAATTTCGAATTTTG
+GTAACAAACACATACCTTAAAATTTACCATCATAACCAGTTGTAAGTATACAGTTTTGTAGAGTTAAGAATATTTACATT
+GTTGTGTAGCAGATTTCTAGATTTTTTTTTATCTTAGAAAACTCTATACCCATTCAACAACTATTAATTTCCCCTTCCTT
+CCACCTCCTGGCAAGTACTATTCTACTTTGTGTTTCTAAAAATTTGGCTTATATACCTAGGGTTATATAATATTTGTTTT
+TTAAGTAGGTTCCATGTTATGTGCAGATGTGTCAGGATTTTCTTCCTTTCTATGGCTGAATAATATTTCTTCATATATAT
+ATATTTTCTCTATATATATATATATGTATATTCTTTTGTTTATCCATCTATTCCTGGATGGACGTTTTGGTTTCTTCCAC
+CTCGTGGCTATGTAATGCTCCTGTGAACACAGGTGTACACATATCTGTTTGAGGTCCTGCTACTAGTTATTCTGTCTCTG
+TAGAAGTTGGATGGCTGGATCATATGGTCATTTTATTTTATTTTTTTGAGGAGCCAGTTAATATTTCCACCAACAGTGTT
+CAAGTGTTTCAGTTTCACCTGCACTTGTTACTTTCTGTTGGGTTTGAAGTGATGTCCCATTGTGGTTTCTATTTGCATTT
+CTCTAATGATTAGTGATGTTACACATCTTCTCATATATCTCATGTATCTGTTGGCTATTTGTATATCATCTTTGCATCTT
+TGGATGAATGTTCTTTGTCCATTTTTTAATCACTTTATTTTGTTGTGTTGTAGCTGGGTTTTTTGGTCATGAGCATTCAT
+TTATCTCACAGTTCATTCTTGTTACTTGGGCCAGGGTCATGATCATTCATTATCTCTCAGTTCATCCTCATTACGTTGGG
+CAAACAGTCATGCTGCAGGGTATAGATTATGTTATTCTGTTACTTTCAGGTAGAATTCGGGTCTAGGTTCTAATTGTTTC
+TAAGTTTAGATTCTGAATGAGAATCAGCAGAGGTAGACCACTGCTGCTGAGGCCTGGGGATTGCTGGGAAAAAGGCAGGA
+AACAGATATTGACCTGACCATGGAGGGTTTATGTTTCACGGCTCCCATCTGGGTACCCAAGGAACCTACATGTAGCTCGT
+GTGTGGAGAGCCTACATTGCCCACTCAAAGCAATTGAGGATGGAACAGTCTTGGGGCTGGAGCTCATTATTTGGAATGAT
+AACCACATCTGCACAGAGAGGACCTGATAAGATGTTGTCCTTCCATGTATATCTGGGAATCCTGTGTAGGGTCTCTCTGT
+AAGGACAGGGGCAGTGTTGGCTCCTTGGCCTCTAGTTAGCTTCACAAGTAGTCTAGTAAAGGCTTTGCCAACTTGTCACC
+ATCTGTGGATATTCTGGTCAGCTCTTGTTTTCACCCTACTGACTTCTTCAGACACTAGGCTTTTGCTTTAGACCATTCAT
+GGTTTTCTTCCTCTTCAAATCAGTAATCAATAAATCGCCTTCAAGTCAATAAATTTCCACTCCTTTAGGAAACCCTGATC
+TTCTGGTCACACCAAGGTTTAATTAACTGGTTTGATTGTTTTTCTGTTTTCTTGGATTTTTTTTCCTTCTTCCTGGGGGT
+TTCTAGTAATTCTAGTTTGATGTCTCACTTTCTCCATTTTTTATTTCTTAGTTTTCTTCTGTGATTATTTTCACTGCAGC
+TGCAGGGCCTAATCCTGGGTTGGCAGAGAACTAGCACTTACTCTGCCCTAATTGGAATCCAGGAGAGATAGCAGGTTCCC
+TAGTGTGAAAATGTGTTTGCTCCTCTCTGCTTCTGGTAGTCTCTCTGTAGGAGTTCTTTACGCATTCTGAATGTTCACTT
+CTTATGAGATACATGATGTGCAACTATAGGTTGAATGTCTCTGATCCAAAAATCTGAAATCCCAAATGCTCCAAAGTCTG
+AAACTTTTTGAGTGCCAACATGACACTCAAAGGAAATGCTTATTGGAGCATCTCAGACTCAGGTGTTTGAATTTGAGATC
+CTCAACCAGTAAGAATAGTGCAAATATTACAAAATCTGAAACACATCCCAAGCATTTCAAATAAGGGACACTCAACTGGT
+ATTTTTTTTAATTTTACAGTTTGCCTTTTACCCTGTTGGTTGTGACCTTTGAGGTACAGAAGTTTTTAGGTTTGATATAT
+TTTTGCTTTTACTGCCTGAGCTTTTAATGTCATATCCTAAAAATTATTGACAAATTCATCGTCATAAAGCATTTTCCAAA
+TTTGTTTTCCCTAGGAGTTTGATAGTTCTAGTTTTACATTTAGGTTTATAATTCACTTTGAATTGATTTTAACGTGGTGT
+AAGGTAAGAGTCCAACTTCATTGTTTTGCATGTAGTTATACAATTTTCCCATCATCACTTGTTGAAGAAACTGTGCTTTT
+CCATTGAGTGGTCTTGGCATCCTTCTGGAAGATCATCGGACCATATATGCCAGGGTTGGTTTCTGAGGTCTCTGTTGTGT
+TGGTCCATAAGTGTGTCAAGAGTGTCTTTATGCCATGACCACATTTTTTCTTTTTGGCTTATTGCAGTTTTGTAATTGCT
+TTGAGACGTTTAATTTTGTTCTGTTTCAAGATTGATTTGCCTATTCATGGGCCCTGGAGATTCCATATGAGTTTTAGGAT
+AGGTTTTTCTGTTTATCAAAAATGTCATTGGAATCTTTATAAGGATTGTATTGAATCTAGGTCACTTCGAGTAGTGTTGA
+CATCATTCCAAGATGAAATCATCTAATTTGCAAACCCAGCTTTTCTTTTCATTTATTTGTGTTTAATTTCTTTTAACAGT
+GTTTTGTAGTTTTCTGTGTTCAAATCTTTTGCCCTCTTGGTTAAGCTTATTTCTAATTTTTATAATGCTGTTGTAAATAT
+AATTTTTTTTTTTTGAGATGGAGTCTTGCTCTGTCTCCCAGGCTGGAGTGCAGTGGCACTATCTCAGGTCACTGCAACCT
+GCACCTTCCTTATTCAAGCGATTCTCCAACCTCAGCCTCTCAAGTACCTGGGATCACAGGTGCGCGCCACCATGCCCAGC
+TAACTTTTTGGTATTTTTAGTAGAGACAGTGTTTCTCCATGTTGACCAGGCTATTCTTGAACTTGTGACCTCAGGTGATC
+TGCCCACCTCGGCCTCCCAAACTGCTGGGATTGCAGGCATGAACCACTGCACCCAGCCAAATGTCATTCTTTTTAAAAAT
+TTCTTTTCTTTTGTTTTCTCTTTCTTTTCTTTTCTTTCTCTCTCTTTCTTTCCTTTCTTTCTTTTTTTTTGAGACGGTGT
+CTCACTCTGTTTCCTAAGCTGGAGCACAGTGGCACAGTCTCAGCTGACTGCAACCTCCACCTTCCAAGTTCAAGCAATTC
+TCCTGCCTCAGCCTCCCAAGTAGCTGGGACTACAGGTGTCTGCCACTATGCCCAGCTAATTTTTGTATTTTTAATAGCGA
+TAGAGTTTTACTATTTATATTAGAGATGGGGTTGACCCAGCTGGTCACGAACTCCTGACCTCAGGTGGTCCACCCGCCTT
+GGCCTTCCAAAGTGCTGGGATTACAACTGTGAGCCACTGCACCTGGCCTCTTTTTAAAATTTTATTTGCAGATTGTTCAT
+TGTTAGTTTATAGAAATGGAACTGACTTGTGTGTGTTACTGTATCCTGAAACTTTGTTGAATTTCATTATTCTACCAGTA
+TTTTGTGGAATTTCAGGATTTTTACACATTACATCATGTTGTCTGTGAACAAAATTTTGTACTTTTTCCTTTCCAATTTG
+CATGCTTTTTATTACTTTCTCTTGCCTAATTATTCTGAGTAGAAATTCCAGTACTGTGGTGAATAGAAGTGGCAGGAAGA
+GATGTTGCTATCTTATTCCTGATCCTAGAGGAAAAGATTTTAGTTTTTCACCATTCAGTATGATGTTAGCTGTGAGCTTT
+TCATGTATAATCTTTATTTACTGAGGAGTTTCCATATATTACTAATTCTTTGAGTGTTTTTATTACAAAAGGTGTTCATC
+TGGCTCTGGAACCAGATAAATGTTGACCTGATAGAGTGGATTGGAATGTCCCCTTCTGGTTTTTGAACATTTTTGGAATA
+TTTTACAAAGGGCTGGCATTAATTCTTCTTGAAATGTTTGGTAAAATTTTCCAGTGAAGTTATCTGGACCTGGAATTTTC
+TTTTTTGGGGGGTTTTTGATTACTGGTTGAATCTTCTTACTAGTTACAGGTCTCTTTGGATTTTTTATTTCTCCATGATG
+CAGTATGGTGGTTTGTGTTTCTAGGAATTTATAAATTTATTCTAGGTTGCCCAGTTCTGTGGCATATGGTTGCTCACATT
+AGTCTCTTGTAATCTTTTTCATTTCTGTGGAATCTGTTGTACTGTCACCTCTTTTATTTATGATTTTAGTATTTGAGATT
+TCTCTTTTTTTCTTAATATAGCTGTGAGTTTTAAAATTTTTATTGATCTTTAAAAAAACAAACTCAGTGGTTTTTTTTTT
+CCTTTTTTTCTGGTCTTATTCTGCTTATCTCTGTTCTAATCTGTTATTTTCTTCCTTTTGCTTGGTTTGTCATTAGTTTT
+TTTTTTTTTCCCTTTAGGTGTAATGTTAGGTTATTGATTTGAGATCTTTCTTCTTTTTAATTTGATCACCTGCAGCTATA
+AGCTTCCCTTTAGCATGGCTTTGAGATCTTTCTTCTTTTTAATTTAAGCATCTGTGGCTGTAAGCTTCCCTTTAGCACTG
+CCTTTGTTGCCTCCTCCTGAGTTTGGGTATGTCATGGTTTTGTTTTCATTTGCTTAAACATTTTTTTTTGTCCTATTGTA
+ATATAATTGTGTTGTTTTTAATAGAGGTAATTAATGAAACACATAATGAATTGTGCTTCTGTTTTTATAATATTTTAAGC
+ATTCTTAACTCAGAAATGTAAATTTTAGAAAAAAATTCCAGGCCAGGCACAGTGGCTCACACCTGTAGTCCCAGCACTTG
+AGGAGGCCGAGGTGGGAGGATCATCTGAGGTCAGGAGTTGGAGACCAGCCTGGCCAACATGGTGAAACCCTGTCTTTACT
+AAAAATAGAAAAAAAATATATAAAAGCTAGCTGAGTGTCATGGCGGGTGCCTGTAATCCCAGCTACTCTGGAGGCTGAGG
+CAGGAGAATCACTTGAATCTGGGAGGCGGAGGTTGCAGTGAGCTGAGATTGCACCACTGCACTCCAGCCTGGGTGACAGA
+ATGAGTCCATCTCAAAAAAAAAAGAAAAAAGAAAAAATTTGAGACATATTTATTTGTATTTCAATTTAGAAACTATGATC
+TCCTAAGTGTATTGACACAGCAACCTGACATAAAGATAAAGAATAATAAGTATATAACAAAACGGAAACTTGCAAATACC
+TTTTTTAATTAATTTTTAATTATATATATTTAAAAATTGCCGGGTGCAGTGGCTTACACCTGTAATCCCAGCACTTTGGG
+AGGCTGAGGTGGGCAGATCACATGAGGTCAGGAGTTTGAGACCAGCCTGGCCAACATGGTGAAACCTCATCTCTATTAAA
+AATCAAAAAATTAGCCAGGCGTGATAGCATGCATCTGTAGTCCCAGCTACTCAGGAGACTGAGGCAGCAGAATTGCTTGA
+ACATGGGAGGCAGAGGTTGCAGTGAGCCAAGATAGTGCCACTGCACTCCAGCCTTGGTGACAGAGTGAGACTCTGACTCA
+AAAAAAAATTGTCTGGGCACGGTGGCTCACACCTGTAATCGCAGCATTTTGGGAAGCTGAGGCAGGCAGATCACGTTAGG
+AGATCGAGACCATCCGGGCTAACACGGTGAAATGCCATCTCTACTAAAAATACAAAAAATTAGCCGGACGTGGTGGCGGG
+TGCCTGTAGTTCCAGATACTCCGGAAGTTGAGGCAGGAGAATGGTGTGAACCTGGGAGGTGGAGCTTGCAGTGAGCTGAG
+ATTGCACCACTGGACTCCAGCCTGGGTGACAGAGCGAGACTCTGTCTCAAATAAAATAAAATAAAATAAAACTAAGGTGT
+GGTTGACATACAAAAATTACACATATTTAATATATACCTTTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTGTTAC
+GGAGGTTTTACTCTTGTTGCCCAGGCTGGAGTGCAGTGACACGATCTCAGCTAGCTGCAACCTCCACCTCCCGGGTTCAA
+GCAATTCTCCTGCCTCAGCCTCCTGAGTAGCTGGGATCGCAGGCGTGCGCCCCGACACCCGGCTAATTTTTGTATTTTTT
+TAGTACAGACAGGGTTTCACCATGTTGGCCAGGCTGGTCTCGAACTCCTGACCTCAGATGATCCACCTGCCTTTGTCTCC
+CAAAGTGCTGGGATTACAGGCGTGTGACACCGAATATATACATCTTAATGAGTTTAGAGATAAGTATTCGCCCCAGGACT
+CATCACAACAAATAATGCCGTAAACTTGACCATCACTCCCCATATATTTCTCATTCTGACCCTTTTTAAAAAATGAGACC
+GGGAGTGGTGGCTCACGCCTGTAATCCCAGCATTTTGCGAGGCCGAGGCGGGTGGATCACGAGGTCAGGAGATCAAGACC
+ATCCTGGCTAACACAGTGAAACCCCGTTTCTACTAAAAATACAGAAAATTAGCTGGGCGTGATGGCGGGCACCTGTAGTC
+CCAGCTACTTGGGAGACTGAGGCAGGATAGTGGTATGAACTCGGGAGGCAGAGCTTGCAGTGAGCTGAGATCGTGCCACT
+GCACTCCAGCCTGGGCAACAGAGTGAGACTCCGTCTCAAAAAAAAAAATGAGATGACCATTTCACCTAAAATATACCCTC
+TTAAGTTTTATTTTAAGTGTACAATACAGGACGGCCATGCATCAGAGATATATGTGGGTTTGGTTCCAGACCACTGCAAT
+AAAGTGAGTTATACAATTTCTTTTGGTTTTCCAGTGCATGTAAAAGTATGTTTATACTGTGCTGTATAAAGTGTGCAATA
+GCATATGTCTACAAAGTGTTCACACTTTAATTTACAAATACTTTATTGTTAACAAGTGCTAACAGTCATCTGAGCCTTCA
+GAAAGCTGCAATCTTTTTTTGTGTGTGTGACAGGGTTTTACTCTGTGGCTCAGGCTAGAGTAATTGCAGTCTCAACCTCA
+TGCTCAATCAAACCCCCACCTCAGACTCCTGACTAGCTGGAACTACAGATACATGCCACCATGACCAGCTAATTTTTGTA
+TTTTTTTTTTTTTGTAGAGATGGGGTTTTGCCATGTTGCCTTGACTTCCTGGGCTCAAGCAATCTACCCACCTTGGCCTC
+CCAAGGTGTTGGGATGACAGGTGTGAGCCACTGCACCTGGCCAAGTTTCAGTCTTCTTGCTGATGGAGGGTCTTGCCTTA
+ATGTAAGGTGGTGGTTGCTGAGCGTTGGGGTGGCTGTGGCAATTTCTTAAAATAAGACACCATTGAAGTTTGCTGTGTCA
+ATTGACTCTCCCTTTCACAAAAGAATTATCTGTAGCATACGATGTTGTTTGATAGCTTTTTACCCACAGTAGAACTTTCA
+AATTGGATTCAATCCTGTCAAACCTTCGTACAGCTGTACCAACTAAGTTTATGTATTATTGTAAATCATTGTGTCAATCC
+TGTCAAGCCCTCCTTCTGCTGTACCAACTAAGTTTATTCTAAATCTGTTGTCATCTCAACATTGTTTACACTGTCTTCAC
+CACGAGTAGATTTCATCTCAAGAAACCACTTTCTTTGCTCATCCTTGGAAGCAACTCATCCACTCACGTTTTCTCCGGAG
+GCTGCTGCAGTCTCGCCAGATCTTCAGGCTCTGTCTCTGATTCTAGTGCTCTTGTTATTTCCACCATATCTGCAGTTACC
+TCCTCCACAGAAGTCGTGAACCCCTGTGTCATCTGTGAGGGTTGGAATAATCTTCCCAACTTCTCTCTCTCTCTCTCTCT
+CTTTTTTTTTTTTTGAGATGAAGTCTTGCCTGGGCTGGAGTGCAGTGATGCGATCCCAGCTCACTGCAACCTCCACCTCC
+CAGGTTCAAGCAATTCTGCCTCAGCCTCCCAAGTGTTTGGGATTACAGTCACCCCCGACCAGGCCCAGCTAATTTTTTGT
+GTGTTTTTAGTACAGACAGGATTTCACTATGTTGGCCAGGCTGGTCTCAAATTCCTGACCTCGTGACCCACGTGCCTTGG
+CCTGCCAACATGCTGGGATTACAAGTGTGAGCCACCACGCCCGGCCCCAACTTCTCCTAATGTTGCTATTTTGATCTTAT
+TTTTTAAATCATGAATGTTCTCAATGACATCTAGAATGGTGAATCCTTTCCAGTAGGTTTTCAATTATTTTGCCCAGATC
+CATCAAAGGAATCACTTTCTAGAGAAGTTATAGCTTTATGAAATATATTTTTAAGTGATAAGACTTGAAAGTTGAAATTA
+TTCTTTGATCCAAGGGCACCAGAATGAATGTTGGGTTAGTAGGCATGAAAACAATATTCAGCTCTTTGTACATCTCTGTA
+AAAGCCCTTGAGTACCAGGGGCATTGTCAGTGAGCGGTAATACTTTGAAAGGAATCTTATTTCTTGAGCAGTAGGTGTCA
+ACAGTGGGCTTCAGATATTCAGTAAACCATATTTGTAAGCCGATAGTCTGTCATCCAGGCTTTGTTCCCATTTGTAGAGT
+ACAGACAGAGCTGTGTTTTATCATAATTCTTCAGGGCCCTTGGATTTTCAGAATAGTAAATCATCATTGGTTTCAAGTTA
+ACATCACCAACTGCATTAGCCCTTAACAAAAGAGTCAGCATGTCCTTTGAAGCCTTAAAGCCAGGCATCAACTCCTCTCT
+AGCTGGGAACATCCTAGATGGCATCTCCTTCTAGTAGAAGGCTGTTTTGTCTCCATTGCAAATCTATTTAGTGTTGCCAT
+CTTAATCAGTTATCTTCTAGATAGCTTTCTGCAGCTTTTCCATCAGTACTTGCTGCTTTATCTTGCGCTTTTATGTTATG
+GAGATGACTTTTTTCCTTAAACCTCAAGAAACAAGCTCTTCTAGCTTCAGACTTTCCTTCTGCAGCTGCCTCACCACTCT
+AAGTCTTCATAGAATTGAAGAGAGGCCGGGTGCGGTGGCTGTCACACCTGTAATCCTAGCACTTTGGGAGGCCGAGGCGG
+GCAGATCACCTGAGGTCGGGAGTTCGACACCAGTCTGACCAATGTGGAGAAACCCCGTCTCTACTAAAAATACAAAAAAA
+TAGCCAGGTGTGGTGGTGCTTGCCTGTAATCCCAGCTACTTGGGATGCTGAGGCAGGAGAATGGCTTGAACTTGGGAGGC
+AGAGGTTGCGATGAGCCAAGATCACGCCATTGCACTCCAGCTTGGGCAAGAAGAATGAAACTCTGTCTCAAAAAAAAAGA
+AAAAAAAGTAAAAAGAGAGTTAGGCTTAGGCTTAATGGAATGTTTTTTGTTTTTTTTTCATCTTCTATCTAGACCAATTA
+AACTTTCTTCATAACAGCAGCAAGATTGTTTAGCTTTTTATCATTCATGTGTTCACTGGAGCAGTACTTTAAATTTCTTT
+CCAGAACACTTCGTTTGCATTCACAACTTGGCTAAGTGTTTGTTGCATGAGGTCTAGCTACTGGCCTGTCTTGCTTACAG
+CATGCCTTCCTCACTAAGCTTAATTATTTCTTCCTTTTGGTTTAAAGTGACAGACATGCAACTCTTCTTTCATGAACATA
+TAGAGGCTATTGTAGGGTTATTAATTGGCCACATTTTAATATTAATAAAAAGAAGCCTGAGAAAAAGAGAAAGAGAAATG
+GCCCGTTGGTTGGGCAGTCAGAACAAACGCATTTGTCAATTGTTTGCTGTCTTATCCTGGTGTGATTTGTGGTTCCCAAA
+ACAATGACAACAGTAGCATTAAAGATTACTGATTACAGATCACCACAACAGATTCAATAATAAAAATCTTAAAATACTGT
+GAGAATGACCGAAATGTGACACAGAGACGTGAAGTGAGCACGTGCTGTAGGAACAATGGTGCCAGTGAGACCTGCTTATT
+GCAGGGTGGCCACAAACCTTCAATACGTAAAACACATGGTCACAAAACACAATAAAGCAAAGTGCAGTGAAACAAGATGT
+GTCTGTCTTTTGATAGACTCTGACAATCTCTACCTTTGAATTGGTACATTCATACCATTAACATTCAAAGTGATTATTGA
+TATCATTGGATTAATATCTACTATATTTGTTACTGTTTTCTATTCATTCTCCTCAGTCTTCATTCTTTTGTCTACCACTC
+TTTTTCTGCCTTTTGCAGTTTTCATTGATGATTTTAGATGACTCCATTTTCCCTGTCTTTCTTAGTACATACTTCTCTTT
+TTAAAACTTTTTTTTAACTAGTTGCCACAGAATTTGCAATATACATTTACAACCAATTCAAGTCCACTTTCAAATAACAC
+TATCCAACTATCCCACAAATAAGACTACCTGCTTAACAAACAAAACACCTAATTCCTCAGTAACATTTACAACCAATTCA
+AGTCCACTTTCAAATAACACTATCCCACTATCCCACAAATAAGACTACCTGCTTAACAAAGAACACACCTAATTCCTCAA
+TATACATTTACAACCAATTCAAGTCCACTTTCAGATAACACTATCCCACTTCACGGGTGACTACCTGCTTAACAAAGAAA
+ACACCTGATTCCTCCCTCCCATCCTTCCATTCCATTCCTTGTATTATTGTTCCTTATTTCACTTGTGTATAAGCATGCAT
+AATCTATCTGTGTGTATTTATTATTATCTACAAACTTATTGGTCAGATCAATTATGAATAAATACATGTTTTTATTGTAC
+CACAATGCCTCCCTACCATCCTTCCATTCCATTCCTTGTATTAGTGTTACTCATTTCAATTGTATATAAGCATACATAAT
+ATATCTGTATTTGTTATTGTCTATGATCTTCTTGGTCAGATCAATTAAGAATAAATACATAGGTTTTTATTGTACCACAA
+TTCTTTCTTTAATGCTCTTTTTAAAAAAATGTTGATCCAGGTTTCAGTTATATATCTTTTGTTTCCCTAAAGAATTTCAT
+TTAACATTTCTTGCAAGACAGGTCTCCTGGCAACAAGTTTCTTGAATTTTTATTTTTCTGAGGAAGGCCTTAATTCTCCT
+TCACTTTTGAAGGGTGGTTTCAGTGGGTACAGAAACTTAGGTTGGTGGGTTTTTTCTGTCAACATTTTGAATTTTTCATT
+TCACTGTCTTCTTACTTTCACAGTTTCTGAAATGTTGAATGCAGTTCTTATCTTTGTGTCTCTGTAGGTAAGGTGTTTTC
+TGCCCCACCTCTGGTTTCTTTCAGAGTTTTCCTTTATCTTTTATTTCATATAGTTTGAAAATTATATGTCCAAGTGTAGG
+TTGTTGGTATTTATTCTGCCTGGTGTTCTCAGAGCTTCCTGGATCTTTGGTTTGGTGTCTGACATTAATACTGGAAGTTC
+TCAGACATGGTTGTTGCAGAACTTTCTTCTATTTCTTCTCCTCTTGGTATTCTCATTACTGTTTCACCTTTTGTAGTTGT
+CCCACAGTCTTGAATATCATCTTCTGTTCTTTTCAGTGTTTCTTTTCTTTAGTTTTCGAAGTTTCTGATGATAAATCCTC
+AAGCTCAGAGATTTTTACTCAGCTGAGTCCAGTCTACTAATAAGCCATCAGAGGTATTCTTCAGTTATTTACCACGTTTT
+TCATCACTACATTATGTTGAAAGTTCTTACGATGTCTGTCTTTCTCATTACATTACCCATCTACACTTGAATACTGTCTA
+CTTCATTCATTAGGCCCTTAGCATATTCTCCAGAGGTTTAAAAAAAAATTCCAAGATCATATCTTTGTCTGCTTCTGAAG
+CTTGCTCTGTTGACACAAATTGTATTTTTTTCTTTTTTTGGATTTTAGTAAGCCTTGCAATTTTTTCCCTTTATTCTGAT
+GCATGAAGTACCCACTAAAAGTGACTGTTGTTAGTATAGCTTCAGTAATGCGGTGATGAGGTGACAGGGCAGGTGATGCT
+CTCTTAGTCTCTTTAGGCTACTATAACAAAATACTTCAGACTGAGTAATTCATAAACAACAGAGATTATTGTTCACAGAT
+CTGGAGGCTGGAAAGTACAAGACTAAAGGGCCAGGATATTTGGTGTTTGGTGAAGGTCAAACATTCAGACACTCTCAACG
+ACTATAGCGACAGCAGCAGTCTTCAGGAATCCTATGTGAGGGACAAACACTCAGAAGCCAGCTGGAGTGTTCTAGAATCC
+TATGTGAGGGCCAAACATTCAGACCCCAGCAGTAGTGTTGTGGAATCCTATGTGAGGGACAAACATTCAGACCACGGGAG
+CAGTGTTCTGGAATTCTATGTGAAGGACAAACATTAAGACTCTCATAGCAGTGTCCTGGAATCATATGTGAGGGACAACC
+ATTCAGACACCAGCAGAAGTGTTCTGGAATCCTAGGTGTGGGAAAAACATTCAGAACCTAGTAGCAGTGTTCTGGAATCC
+TATGTGAGGGACATACATTCAGACCACGGCAGCAGTGTTCTGGAATGGTATGTGAAGGACAAACATTCAGACCCTTGTAG
+CAGTGTTCCAGAATTTTATGTGAGGGACAAACATTCAGACCACAGCAGCAGTGTTCTGGAATCCTATATGACGGACCAAC
+GTGCAGACCCTTGCAACAGTGTTCTGGAATACTAGGTGAGGGAAAAATATTCACACCCTTGTAGCAGTGTTCTGGAATTC
+TATGTGACTGACAAACATTCAGACTCCAGCAGCAGTGTTCTGTAATCCTATGTGAGGGACAAACATGCAGACCCCAAGAG
+CAGTGTTCTGAAATCCTATGTTAAGGGAAACATTGAGACCCCAGCATGAATGTTCTGGAATCCTATGTGAGTGACAAACA
+TTCAGACCACGGCAGGAGTGTTCTGGAATCCTATGTGAGGAACAAACATTCAGACCACAGCAGGAGTGTTCTGGAATCCT
+ATATGAGGTATAAGCATTCAGACCCTCATAGCAGTGTTCTGGAATCCTATGTGAGGGAGAAGCATTCAGAGCACAGCAGG
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.fasta.fai b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.fasta.fai
new file mode 100644
index 0000000..c5e2e02
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.21.10M-10M200k.fasta.fai
@@ -0,0 +1,2 @@
+20	200000	4	80	81
+21	200000	202508	80	81
diff --git a/src/test/resources/htsjdk/samtools/cram/io/bzip2-test.bz2 b/src/test/resources/htsjdk/samtools/cram/io/bzip2-test.bz2
new file mode 100644
index 0000000..a16c0e0
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/io/bzip2-test.bz2 differ
diff --git a/testdata/htsjdk/samtools/cram/test.cram b/src/test/resources/htsjdk/samtools/cram/test.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test.cram
rename to src/test/resources/htsjdk/samtools/cram/test.cram
diff --git a/testdata/htsjdk/samtools/cram/test.cram.bai b/src/test/resources/htsjdk/samtools/cram/test.cram.bai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test.cram.bai
rename to src/test/resources/htsjdk/samtools/cram/test.cram.bai
diff --git a/testdata/htsjdk/samtools/cram/test.dict b/src/test/resources/htsjdk/samtools/cram/test.dict
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test.dict
rename to src/test/resources/htsjdk/samtools/cram/test.dict
diff --git a/testdata/htsjdk/samtools/cram/test.fa b/src/test/resources/htsjdk/samtools/cram/test.fa
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test.fa
rename to src/test/resources/htsjdk/samtools/cram/test.fa
diff --git a/testdata/htsjdk/samtools/cram/test.fa.fai b/src/test/resources/htsjdk/samtools/cram/test.fa.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test.fa.fai
rename to src/test/resources/htsjdk/samtools/cram/test.fa.fai
diff --git a/testdata/htsjdk/samtools/cram/test.fasta b/src/test/resources/htsjdk/samtools/cram/test.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test.fasta
rename to src/test/resources/htsjdk/samtools/cram/test.fasta
diff --git a/testdata/htsjdk/samtools/cram/test2.cram b/src/test/resources/htsjdk/samtools/cram/test2.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test2.cram
rename to src/test/resources/htsjdk/samtools/cram/test2.cram
diff --git a/testdata/htsjdk/samtools/cram/test2.dict b/src/test/resources/htsjdk/samtools/cram/test2.dict
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test2.dict
rename to src/test/resources/htsjdk/samtools/cram/test2.dict
diff --git a/testdata/htsjdk/samtools/cram/test2.fa b/src/test/resources/htsjdk/samtools/cram/test2.fa
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test2.fa
rename to src/test/resources/htsjdk/samtools/cram/test2.fa
diff --git a/testdata/htsjdk/samtools/cram/test2.fa.fai b/src/test/resources/htsjdk/samtools/cram/test2.fa.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test2.fa.fai
rename to src/test/resources/htsjdk/samtools/cram/test2.fa.fai
diff --git a/testdata/htsjdk/samtools/cram/test2.fasta b/src/test/resources/htsjdk/samtools/cram/test2.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/cram/test2.fasta
rename to src/test/resources/htsjdk/samtools/cram/test2.fasta
diff --git a/testdata/htsjdk/samtools/cram/xx#blank.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#blank.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#blank.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#blank.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#blank.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#blank.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#blank.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#blank.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#blank.sam b/src/test/resources/htsjdk/samtools/cram/xx#blank.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#blank.sam
rename to src/test/resources/htsjdk/samtools/cram/xx#blank.sam
diff --git a/testdata/htsjdk/samtools/cram/xx#large_aux.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#large_aux.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#large_aux.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#large_aux.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#large_aux.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#large_aux.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#large_aux.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#large_aux.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#large_aux.sam b/src/test/resources/htsjdk/samtools/cram/xx#large_aux.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#large_aux.sam
rename to src/test/resources/htsjdk/samtools/cram/xx#large_aux.sam
diff --git a/testdata/htsjdk/samtools/cram/xx#large_aux2.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#large_aux2.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#large_aux2.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#large_aux2.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#large_aux2.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#large_aux2.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#large_aux2.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#large_aux2.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#large_aux2.sam b/src/test/resources/htsjdk/samtools/cram/xx#large_aux2.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#large_aux2.sam
rename to src/test/resources/htsjdk/samtools/cram/xx#large_aux2.sam
diff --git a/testdata/htsjdk/samtools/cram/xx#minimal.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#minimal.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#minimal.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#minimal.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#minimal.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#minimal.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#minimal.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#minimal.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#minimal.sam b/src/test/resources/htsjdk/samtools/cram/xx#minimal.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#minimal.sam
rename to src/test/resources/htsjdk/samtools/cram/xx#minimal.sam
diff --git a/testdata/htsjdk/samtools/cram/xx#pair.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#pair.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#pair.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#pair.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#pair.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#pair.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#pair.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#pair.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#pair.sam b/src/test/resources/htsjdk/samtools/cram/xx#pair.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#pair.sam
rename to src/test/resources/htsjdk/samtools/cram/xx#pair.sam
diff --git a/testdata/htsjdk/samtools/cram/xx#rg.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#rg.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#rg.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#rg.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#rg.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#rg.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#rg.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#rg.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#rg.sam b/src/test/resources/htsjdk/samtools/cram/xx#rg.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#rg.sam
rename to src/test/resources/htsjdk/samtools/cram/xx#rg.sam
diff --git a/testdata/htsjdk/samtools/cram/xx#tlen.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#tlen.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#tlen.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#tlen.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#tlen.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#tlen.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#tlen.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#tlen.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#tlen2.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#tlen2.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#tlen2.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#tlen2.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#tlen2.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#tlen2.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#tlen2.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#tlen2.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#triplet.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#triplet.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#triplet.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#triplet.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#triplet.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#triplet.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#triplet.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#triplet.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#triplet.sam b/src/test/resources/htsjdk/samtools/cram/xx#triplet.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#triplet.sam
rename to src/test/resources/htsjdk/samtools/cram/xx#triplet.sam
diff --git a/testdata/htsjdk/samtools/cram/xx#unsorted.2.1.cram b/src/test/resources/htsjdk/samtools/cram/xx#unsorted.2.1.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#unsorted.2.1.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#unsorted.2.1.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#unsorted.3.0.cram b/src/test/resources/htsjdk/samtools/cram/xx#unsorted.3.0.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#unsorted.3.0.cram
rename to src/test/resources/htsjdk/samtools/cram/xx#unsorted.3.0.cram
diff --git a/testdata/htsjdk/samtools/cram/xx#unsorted.sam b/src/test/resources/htsjdk/samtools/cram/xx#unsorted.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx#unsorted.sam
rename to src/test/resources/htsjdk/samtools/cram/xx#unsorted.sam
diff --git a/testdata/htsjdk/samtools/cram/xx.fa b/src/test/resources/htsjdk/samtools/cram/xx.fa
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx.fa
rename to src/test/resources/htsjdk/samtools/cram/xx.fa
diff --git a/testdata/htsjdk/samtools/cram/xx.fa.fai b/src/test/resources/htsjdk/samtools/cram/xx.fa.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram/xx.fa.fai
rename to src/test/resources/htsjdk/samtools/cram/xx.fa.fai
diff --git a/testdata/htsjdk/samtools/cram_query_sorted.cram b/src/test/resources/htsjdk/samtools/cram_query_sorted.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram_query_sorted.cram
rename to src/test/resources/htsjdk/samtools/cram_query_sorted.cram
diff --git a/testdata/htsjdk/samtools/cram_query_sorted.fasta b/src/test/resources/htsjdk/samtools/cram_query_sorted.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/cram_query_sorted.fasta
rename to src/test/resources/htsjdk/samtools/cram_query_sorted.fasta
diff --git a/testdata/htsjdk/samtools/cram_query_sorted.fasta.fai b/src/test/resources/htsjdk/samtools/cram_query_sorted.fasta.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram_query_sorted.fasta.fai
rename to src/test/resources/htsjdk/samtools/cram_query_sorted.fasta.fai
diff --git a/testdata/htsjdk/samtools/cram_tlen.fasta b/src/test/resources/htsjdk/samtools/cram_tlen.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/cram_tlen.fasta
rename to src/test/resources/htsjdk/samtools/cram_tlen.fasta
diff --git a/testdata/htsjdk/samtools/cram_tlen.fasta.fai b/src/test/resources/htsjdk/samtools/cram_tlen.fasta.fai
similarity index 100%
rename from testdata/htsjdk/samtools/cram_tlen.fasta.fai
rename to src/test/resources/htsjdk/samtools/cram_tlen.fasta.fai
diff --git a/testdata/htsjdk/samtools/cram_tlen_reads.sorted.sam b/src/test/resources/htsjdk/samtools/cram_tlen_reads.sorted.sam
similarity index 100%
rename from testdata/htsjdk/samtools/cram_tlen_reads.sorted.sam
rename to src/test/resources/htsjdk/samtools/cram_tlen_reads.sorted.sam
diff --git a/testdata/htsjdk/samtools/cram_with_bai_index.cram b/src/test/resources/htsjdk/samtools/cram_with_bai_index.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram_with_bai_index.cram
rename to src/test/resources/htsjdk/samtools/cram_with_bai_index.cram
diff --git a/testdata/htsjdk/samtools/cram_with_bai_index.cram.bai b/src/test/resources/htsjdk/samtools/cram_with_bai_index.cram.bai
similarity index 100%
rename from testdata/htsjdk/samtools/cram_with_bai_index.cram.bai
rename to src/test/resources/htsjdk/samtools/cram_with_bai_index.cram.bai
diff --git a/testdata/htsjdk/samtools/cram_with_crai_index.cram b/src/test/resources/htsjdk/samtools/cram_with_crai_index.cram
similarity index 100%
rename from testdata/htsjdk/samtools/cram_with_crai_index.cram
rename to src/test/resources/htsjdk/samtools/cram_with_crai_index.cram
diff --git a/testdata/htsjdk/samtools/cram_with_crai_index.cram.crai b/src/test/resources/htsjdk/samtools/cram_with_crai_index.cram.crai
similarity index 100%
rename from testdata/htsjdk/samtools/cram_with_crai_index.cram.crai
rename to src/test/resources/htsjdk/samtools/cram_with_crai_index.cram.crai
diff --git a/testdata/htsjdk/samtools/empty.bai b/src/test/resources/htsjdk/samtools/empty.bai
similarity index 100%
rename from testdata/htsjdk/samtools/empty.bai
rename to src/test/resources/htsjdk/samtools/empty.bai
diff --git a/testdata/htsjdk/samtools/empty.bam b/src/test/resources/htsjdk/samtools/empty.bam
similarity index 100%
rename from testdata/htsjdk/samtools/empty.bam
rename to src/test/resources/htsjdk/samtools/empty.bam
diff --git a/testdata/htsjdk/samtools/empty_no_empty_gzip_block.bai b/src/test/resources/htsjdk/samtools/empty_no_empty_gzip_block.bai
similarity index 100%
rename from testdata/htsjdk/samtools/empty_no_empty_gzip_block.bai
rename to src/test/resources/htsjdk/samtools/empty_no_empty_gzip_block.bai
diff --git a/testdata/htsjdk/samtools/empty_no_empty_gzip_block.bam b/src/test/resources/htsjdk/samtools/empty_no_empty_gzip_block.bam
similarity index 100%
rename from testdata/htsjdk/samtools/empty_no_empty_gzip_block.bam
rename to src/test/resources/htsjdk/samtools/empty_no_empty_gzip_block.bam
diff --git a/src/test/resources/htsjdk/samtools/hg19mini.dict b/src/test/resources/htsjdk/samtools/hg19mini.dict
new file mode 100644
index 0000000..8c844a8
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/hg19mini.dict
@@ -0,0 +1,5 @@
+ at HD	VN:1.0	SO:unsorted
+ at SQ	SN:1	LN:16000	M5:8c0c38e352d8f3309eabe4845456f274	UR:file:///Users/cmn/projects/htsjdk/testdata/htsjdk/samtools/hg19mini.fasta
+ at SQ	SN:2	LN:16000	M5:5f8388fe3fb34aa38375ae6cf5e45b89	UR:file:///Users/cmn/projects/htsjdk/testdata/htsjdk/samtools/hg19mini.fasta
+ at SQ	SN:3	LN:16000	M5:94de808a3a2203dbb02434a47bd8184f	UR:file:///Users/cmn/projects/htsjdk/testdata/htsjdk/samtools/hg19mini.fasta
+ at SQ	SN:4	LN:16000	M5:7d397ee919e379328d8f52c57a54c778	UR:file:///Users/cmn/projects/htsjdk/testdata/htsjdk/samtools/hg19mini.fasta
diff --git a/testdata/htsjdk/samtools/hg19mini.fasta b/src/test/resources/htsjdk/samtools/hg19mini.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/hg19mini.fasta
rename to src/test/resources/htsjdk/samtools/hg19mini.fasta
diff --git a/testdata/htsjdk/samtools/hg19mini.fasta.fai b/src/test/resources/htsjdk/samtools/hg19mini.fasta.fai
similarity index 100%
rename from testdata/htsjdk/samtools/hg19mini.fasta.fai
rename to src/test/resources/htsjdk/samtools/hg19mini.fasta.fai
diff --git a/testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTest.vcf b/src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTest.vcf
similarity index 100%
rename from testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTest.vcf
rename to src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTest.vcf
diff --git a/testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list b/src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list
similarity index 100%
rename from testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list
rename to src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list
diff --git a/testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverse.interval_list b/src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverse.interval_list
similarity index 100%
rename from testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverse.interval_list
rename to src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverse.interval_list
diff --git a/testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverseManual.interval_list b/src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverseManual.interval_list
similarity index 100%
rename from testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverseManual.interval_list
rename to src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverseManual.interval_list
diff --git a/testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestManual.vcf b/src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestManual.vcf
similarity index 100%
rename from testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestManual.vcf
rename to src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestManual.vcf
diff --git a/testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestManualComp.interval_list b/src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestManualComp.interval_list
similarity index 100%
rename from testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestManualComp.interval_list
rename to src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestManualComp.interval_list
diff --git a/testdata/htsjdk/samtools/intervallist/IntervalListchr123_empty.interval_list b/src/test/resources/htsjdk/samtools/intervallist/IntervalListchr123_empty.interval_list
similarity index 100%
rename from testdata/htsjdk/samtools/intervallist/IntervalListchr123_empty.interval_list
rename to src/test/resources/htsjdk/samtools/intervallist/IntervalListchr123_empty.interval_list
diff --git a/testdata/htsjdk/samtools/inttest.bam b/src/test/resources/htsjdk/samtools/inttest.bam
similarity index 100%
rename from testdata/htsjdk/samtools/inttest.bam
rename to src/test/resources/htsjdk/samtools/inttest.bam
diff --git a/testdata/htsjdk/samtools/inttest.sam b/src/test/resources/htsjdk/samtools/inttest.sam
similarity index 100%
rename from testdata/htsjdk/samtools/inttest.sam
rename to src/test/resources/htsjdk/samtools/inttest.sam
diff --git a/testdata/htsjdk/samtools/io/5newline5.txt b/src/test/resources/htsjdk/samtools/io/5newline5.txt
similarity index 100%
rename from testdata/htsjdk/samtools/io/5newline5.txt
rename to src/test/resources/htsjdk/samtools/io/5newline5.txt
diff --git a/testdata/htsjdk/samtools/io/empty.txt b/src/test/resources/htsjdk/samtools/io/empty.txt
similarity index 100%
rename from testdata/htsjdk/samtools/io/empty.txt
rename to src/test/resources/htsjdk/samtools/io/empty.txt
diff --git a/testdata/htsjdk/samtools/io/slurptest.txt b/src/test/resources/htsjdk/samtools/io/slurptest.txt
similarity index 100%
rename from testdata/htsjdk/samtools/io/slurptest.txt
rename to src/test/resources/htsjdk/samtools/io/slurptest.txt
diff --git a/testdata/htsjdk/samtools/issue76.bam b/src/test/resources/htsjdk/samtools/issue76.bam
similarity index 100%
rename from testdata/htsjdk/samtools/issue76.bam
rename to src/test/resources/htsjdk/samtools/issue76.bam
diff --git a/testdata/htsjdk/samtools/issue76.bam.bai b/src/test/resources/htsjdk/samtools/issue76.bam.bai
similarity index 100%
rename from testdata/htsjdk/samtools/issue76.bam.bai
rename to src/test/resources/htsjdk/samtools/issue76.bam.bai
diff --git a/testdata/htsjdk/samtools/liftover/hg18ToHg19.over.chain b/src/test/resources/htsjdk/samtools/liftover/hg18ToHg19.over.chain
similarity index 100%
rename from testdata/htsjdk/samtools/liftover/hg18ToHg19.over.chain
rename to src/test/resources/htsjdk/samtools/liftover/hg18ToHg19.over.chain
diff --git a/testdata/htsjdk/samtools/metrics/metricsOne.metrics b/src/test/resources/htsjdk/samtools/metrics/metricsOne.metrics
similarity index 100%
rename from testdata/htsjdk/samtools/metrics/metricsOne.metrics
rename to src/test/resources/htsjdk/samtools/metrics/metricsOne.metrics
diff --git a/testdata/htsjdk/samtools/metrics/metricsOneCopy.metrics b/src/test/resources/htsjdk/samtools/metrics/metricsOneCopy.metrics
similarity index 100%
rename from testdata/htsjdk/samtools/metrics/metricsOneCopy.metrics
rename to src/test/resources/htsjdk/samtools/metrics/metricsOneCopy.metrics
diff --git a/testdata/htsjdk/samtools/metrics/metricsOneModifiedHistogram.metrics b/src/test/resources/htsjdk/samtools/metrics/metricsOneModifiedHistogram.metrics
similarity index 100%
rename from testdata/htsjdk/samtools/metrics/metricsOneModifiedHistogram.metrics
rename to src/test/resources/htsjdk/samtools/metrics/metricsOneModifiedHistogram.metrics
diff --git a/testdata/htsjdk/samtools/metrics/metricsOneModifiedMetrics.metrics b/src/test/resources/htsjdk/samtools/metrics/metricsOneModifiedMetrics.metrics
similarity index 100%
rename from testdata/htsjdk/samtools/metrics/metricsOneModifiedMetrics.metrics
rename to src/test/resources/htsjdk/samtools/metrics/metricsOneModifiedMetrics.metrics
diff --git a/testdata/htsjdk/samtools/queryname_sorted.sam b/src/test/resources/htsjdk/samtools/queryname_sorted.sam
similarity index 100%
rename from testdata/htsjdk/samtools/queryname_sorted.sam
rename to src/test/resources/htsjdk/samtools/queryname_sorted.sam
diff --git a/testdata/htsjdk/samtools/readWithBadRname.sam b/src/test/resources/htsjdk/samtools/readWithBadRname.sam
similarity index 100%
rename from testdata/htsjdk/samtools/readWithBadRname.sam
rename to src/test/resources/htsjdk/samtools/readWithBadRname.sam
diff --git a/testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.fasta.fai b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.fasta.fai
similarity index 100%
rename from testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.fasta.fai
rename to src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.fasta.fai
diff --git a/testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.dict b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.dict
similarity index 100%
rename from testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.dict
rename to src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.dict
diff --git a/testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta
rename to src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta
diff --git a/testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.fai b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.fai
similarity index 100%
rename from testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.fai
rename to src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.fai
diff --git a/testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.nodict.fasta b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.nodict.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.nodict.fasta
rename to src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.nodict.fasta
diff --git a/testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.nodict.fasta.fai b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.nodict.fasta.fai
similarity index 100%
rename from testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.nodict.fasta.fai
rename to src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.nodict.fasta.fai
diff --git a/testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta
rename to src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta
diff --git a/testdata/htsjdk/samtools/reference/reference_with_trailing_whitespace.dict b/src/test/resources/htsjdk/samtools/reference/reference_with_trailing_whitespace.dict
similarity index 100%
rename from testdata/htsjdk/samtools/reference/reference_with_trailing_whitespace.dict
rename to src/test/resources/htsjdk/samtools/reference/reference_with_trailing_whitespace.dict
diff --git a/testdata/htsjdk/samtools/reference/reference_with_trailing_whitespace.fasta b/src/test/resources/htsjdk/samtools/reference/reference_with_trailing_whitespace.fasta
similarity index 100%
rename from testdata/htsjdk/samtools/reference/reference_with_trailing_whitespace.fasta
rename to src/test/resources/htsjdk/samtools/reference/reference_with_trailing_whitespace.fasta
diff --git a/testdata/htsjdk/samtools/reference/testing.fai b/src/test/resources/htsjdk/samtools/reference/testing.fai
similarity index 100%
rename from testdata/htsjdk/samtools/reference/testing.fai
rename to src/test/resources/htsjdk/samtools/reference/testing.fai
diff --git a/testdata/htsjdk/samtools/roundtrip.sam b/src/test/resources/htsjdk/samtools/roundtrip.sam
similarity index 100%
rename from testdata/htsjdk/samtools/roundtrip.sam
rename to src/test/resources/htsjdk/samtools/roundtrip.sam
diff --git a/testdata/htsjdk/samtools/samFilter01.js b/src/test/resources/htsjdk/samtools/samFilter01.js
similarity index 100%
rename from testdata/htsjdk/samtools/samFilter01.js
rename to src/test/resources/htsjdk/samtools/samFilter01.js
diff --git a/testdata/htsjdk/samtools/samFilter02.js b/src/test/resources/htsjdk/samtools/samFilter02.js
similarity index 100%
rename from testdata/htsjdk/samtools/samFilter02.js
rename to src/test/resources/htsjdk/samtools/samFilter02.js
diff --git a/testdata/htsjdk/samtools/seekablestream/megabyteZeros.dat b/src/test/resources/htsjdk/samtools/seekablestream/megabyteZeros.dat
similarity index 100%
rename from testdata/htsjdk/samtools/seekablestream/megabyteZeros.dat
rename to src/test/resources/htsjdk/samtools/seekablestream/megabyteZeros.dat
diff --git a/testdata/htsjdk/samtools/seekablestream/seekTest.txt b/src/test/resources/htsjdk/samtools/seekablestream/seekTest.txt
similarity index 100%
rename from testdata/htsjdk/samtools/seekablestream/seekTest.txt
rename to src/test/resources/htsjdk/samtools/seekablestream/seekTest.txt
diff --git a/testdata/htsjdk/samtools/sequenceWithSpace.bam b/src/test/resources/htsjdk/samtools/sequenceWithSpace.bam
similarity index 100%
rename from testdata/htsjdk/samtools/sequenceWithSpace.bam
rename to src/test/resources/htsjdk/samtools/sequenceWithSpace.bam
diff --git a/testdata/htsjdk/samtools/sequenceWithSpace.sam b/src/test/resources/htsjdk/samtools/sequenceWithSpace.sam
similarity index 100%
rename from testdata/htsjdk/samtools/sequenceWithSpace.sam
rename to src/test/resources/htsjdk/samtools/sequenceWithSpace.sam
diff --git a/testdata/htsjdk/samtools/serialization_test.bam b/src/test/resources/htsjdk/samtools/serialization_test.bam
similarity index 100%
rename from testdata/htsjdk/samtools/serialization_test.bam
rename to src/test/resources/htsjdk/samtools/serialization_test.bam
diff --git a/testdata/htsjdk/samtools/serialization_test.bam.bai b/src/test/resources/htsjdk/samtools/serialization_test.bam.bai
similarity index 100%
rename from testdata/htsjdk/samtools/serialization_test.bam.bai
rename to src/test/resources/htsjdk/samtools/serialization_test.bam.bai
diff --git a/testdata/htsjdk/samtools/serialization_test.sam b/src/test/resources/htsjdk/samtools/serialization_test.sam
similarity index 100%
rename from testdata/htsjdk/samtools/serialization_test.sam
rename to src/test/resources/htsjdk/samtools/serialization_test.sam
diff --git a/testdata/htsjdk/samtools/sra/test_archive.sra b/src/test/resources/htsjdk/samtools/sra/test_archive.sra
similarity index 100%
rename from testdata/htsjdk/samtools/sra/test_archive.sra
rename to src/test/resources/htsjdk/samtools/sra/test_archive.sra
diff --git a/testdata/htsjdk/samtools/uncompressed.sam b/src/test/resources/htsjdk/samtools/uncompressed.sam
similarity index 100%
rename from testdata/htsjdk/samtools/uncompressed.sam
rename to src/test/resources/htsjdk/samtools/uncompressed.sam
diff --git a/testdata/htsjdk/samtools/unsorted.sam b/src/test/resources/htsjdk/samtools/unsorted.sam
similarity index 100%
rename from testdata/htsjdk/samtools/unsorted.sam
rename to src/test/resources/htsjdk/samtools/unsorted.sam
diff --git a/testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/5k-30BB2AAXX.3.aligned.sam.fastq b/src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/5k-30BB2AAXX.3.aligned.sam.fastq
similarity index 100%
rename from testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/5k-30BB2AAXX.3.aligned.sam.fastq
rename to src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/5k-30BB2AAXX.3.aligned.sam.fastq
diff --git a/testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/illumina-as-standard.bam b/src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/illumina-as-standard.bam
similarity index 100%
rename from testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/illumina-as-standard.bam
rename to src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/illumina-as-standard.bam
diff --git a/testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/s_1_sequence.txt b/src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/s_1_sequence.txt
similarity index 100%
rename from testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/s_1_sequence.txt
rename to src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/s_1_sequence.txt
diff --git a/testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa-as-standard.bam b/src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa-as-standard.bam
similarity index 100%
rename from testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa-as-standard.bam
rename to src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa-as-standard.bam
diff --git a/testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa_full_range_as_solexa.fastq b/src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa_full_range_as_solexa.fastq
similarity index 100%
rename from testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa_full_range_as_solexa.fastq
rename to src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa_full_range_as_solexa.fastq
diff --git a/testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/unmapped.sam b/src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/unmapped.sam
similarity index 100%
rename from testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/unmapped.sam
rename to src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest/unmapped.sam
diff --git a/testdata/htsjdk/samtools/util/defective_bgzf.bam b/src/test/resources/htsjdk/samtools/util/defective_bgzf.bam
similarity index 100%
rename from testdata/htsjdk/samtools/util/defective_bgzf.bam
rename to src/test/resources/htsjdk/samtools/util/defective_bgzf.bam
diff --git a/testdata/htsjdk/samtools/util/no_bgzf_terminator.bam b/src/test/resources/htsjdk/samtools/util/no_bgzf_terminator.bam
similarity index 100%
rename from testdata/htsjdk/samtools/util/no_bgzf_terminator.bam
rename to src/test/resources/htsjdk/samtools/util/no_bgzf_terminator.bam
diff --git a/testdata/htsjdk/samtools/util/whitespace_text_file.txt b/src/test/resources/htsjdk/samtools/util/whitespace_text_file.txt
similarity index 100%
rename from testdata/htsjdk/samtools/util/whitespace_text_file.txt
rename to src/test/resources/htsjdk/samtools/util/whitespace_text_file.txt
diff --git a/testdata/htsjdk/tribble/Tb.vcf.idx b/src/test/resources/htsjdk/tribble/Tb.vcf.idx
similarity index 100%
rename from testdata/htsjdk/tribble/Tb.vcf.idx
rename to src/test/resources/htsjdk/tribble/Tb.vcf.idx
diff --git a/testdata/htsjdk/tribble/basicDbSNP.dbsnp b/src/test/resources/htsjdk/tribble/basicDbSNP.dbsnp
similarity index 100%
rename from testdata/htsjdk/tribble/basicDbSNP.dbsnp
rename to src/test/resources/htsjdk/tribble/basicDbSNP.dbsnp
diff --git a/testdata/htsjdk/tribble/bed/NA12878.deletions.10kbp.het.gq99.hand_curated.hg19.bed b/src/test/resources/htsjdk/tribble/bed/NA12878.deletions.10kbp.het.gq99.hand_curated.hg19.bed
similarity index 100%
rename from testdata/htsjdk/tribble/bed/NA12878.deletions.10kbp.het.gq99.hand_curated.hg19.bed
rename to src/test/resources/htsjdk/tribble/bed/NA12878.deletions.10kbp.het.gq99.hand_curated.hg19.bed
diff --git a/testdata/htsjdk/tribble/bed/NA12878.deletions.10kbp.het.gq99.hand_curated.hg19_fixed.bed b/src/test/resources/htsjdk/tribble/bed/NA12878.deletions.10kbp.het.gq99.hand_curated.hg19_fixed.bed
similarity index 100%
rename from testdata/htsjdk/tribble/bed/NA12878.deletions.10kbp.het.gq99.hand_curated.hg19_fixed.bed
rename to src/test/resources/htsjdk/tribble/bed/NA12878.deletions.10kbp.het.gq99.hand_curated.hg19_fixed.bed
diff --git a/testdata/htsjdk/tribble/bed/Unigene.sample.bed b/src/test/resources/htsjdk/tribble/bed/Unigene.sample.bed
similarity index 100%
rename from testdata/htsjdk/tribble/bed/Unigene.sample.bed
rename to src/test/resources/htsjdk/tribble/bed/Unigene.sample.bed
diff --git a/testdata/htsjdk/tribble/bed/disconcontigs.bed b/src/test/resources/htsjdk/tribble/bed/disconcontigs.bed
similarity index 100%
rename from testdata/htsjdk/tribble/bed/disconcontigs.bed
rename to src/test/resources/htsjdk/tribble/bed/disconcontigs.bed
diff --git a/testdata/htsjdk/tribble/bed/unsorted.bed b/src/test/resources/htsjdk/tribble/bed/unsorted.bed
similarity index 100%
rename from testdata/htsjdk/tribble/bed/unsorted.bed
rename to src/test/resources/htsjdk/tribble/bed/unsorted.bed
diff --git a/testdata/htsjdk/tribble/complexExample.vcf b/src/test/resources/htsjdk/tribble/complexExample.vcf
similarity index 100%
rename from testdata/htsjdk/tribble/complexExample.vcf
rename to src/test/resources/htsjdk/tribble/complexExample.vcf
diff --git a/testdata/htsjdk/tribble/fakeBed.bed b/src/test/resources/htsjdk/tribble/fakeBed.bed
similarity index 100%
rename from testdata/htsjdk/tribble/fakeBed.bed
rename to src/test/resources/htsjdk/tribble/fakeBed.bed
diff --git a/testdata/htsjdk/tribble/fakeVCF.vcf.gz b/src/test/resources/htsjdk/tribble/fakeVCF.vcf.gz
similarity index 100%
rename from testdata/htsjdk/tribble/fakeVCF.vcf.gz
rename to src/test/resources/htsjdk/tribble/fakeVCF.vcf.gz
diff --git a/testdata/htsjdk/tribble/fakeVCF.vcf.gz.tbi b/src/test/resources/htsjdk/tribble/fakeVCF.vcf.gz.tbi
similarity index 100%
rename from testdata/htsjdk/tribble/fakeVCF.vcf.gz.tbi
rename to src/test/resources/htsjdk/tribble/fakeVCF.vcf.gz.tbi
diff --git a/testdata/htsjdk/tribble/gwas/smallp.gwas b/src/test/resources/htsjdk/tribble/gwas/smallp.gwas
similarity index 100%
rename from testdata/htsjdk/tribble/gwas/smallp.gwas
rename to src/test/resources/htsjdk/tribble/gwas/smallp.gwas
diff --git a/testdata/htsjdk/tribble/index/chrY_Y4_small.bed b/src/test/resources/htsjdk/tribble/index/chrY_Y4_small.bed
similarity index 100%
rename from testdata/htsjdk/tribble/index/chrY_Y4_small.bed
rename to src/test/resources/htsjdk/tribble/index/chrY_Y4_small.bed
diff --git a/testdata/htsjdk/tribble/large.txt b/src/test/resources/htsjdk/tribble/large.txt
similarity index 100%
rename from testdata/htsjdk/tribble/large.txt
rename to src/test/resources/htsjdk/tribble/large.txt
diff --git a/testdata/htsjdk/tribble/sites.bed b/src/test/resources/htsjdk/tribble/sites.bed
similarity index 100%
rename from testdata/htsjdk/tribble/sites.bed
rename to src/test/resources/htsjdk/tribble/sites.bed
diff --git a/testdata/htsjdk/tribble/soapsnp.txt b/src/test/resources/htsjdk/tribble/soapsnp.txt
similarity index 100%
rename from testdata/htsjdk/tribble/soapsnp.txt
rename to src/test/resources/htsjdk/tribble/soapsnp.txt
diff --git a/testdata/htsjdk/tribble/split/test.bin.part1 b/src/test/resources/htsjdk/tribble/split/test.bin.part1
similarity index 100%
rename from testdata/htsjdk/tribble/split/test.bin.part1
rename to src/test/resources/htsjdk/tribble/split/test.bin.part1
diff --git a/testdata/htsjdk/tribble/split/test.bin.part2 b/src/test/resources/htsjdk/tribble/split/test.bin.part2
similarity index 100%
rename from testdata/htsjdk/tribble/split/test.bin.part2
rename to src/test/resources/htsjdk/tribble/split/test.bin.part2
diff --git a/testdata/htsjdk/tribble/split/test.bin.part3 b/src/test/resources/htsjdk/tribble/split/test.bin.part3
similarity index 100%
rename from testdata/htsjdk/tribble/split/test.bin.part3
rename to src/test/resources/htsjdk/tribble/split/test.bin.part3
diff --git a/testdata/htsjdk/tribble/split/test.bin.split b/src/test/resources/htsjdk/tribble/split/test.bin.split
similarity index 100%
rename from testdata/htsjdk/tribble/split/test.bin.split
rename to src/test/resources/htsjdk/tribble/split/test.bin.split
diff --git a/testdata/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf.gz b/src/test/resources/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf.gz
similarity index 100%
rename from testdata/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf.gz
rename to src/test/resources/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf.gz
diff --git a/testdata/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf.gz.tbi b/src/test/resources/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf.gz.tbi
similarity index 100%
rename from testdata/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf.gz.tbi
rename to src/test/resources/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf.gz.tbi
diff --git a/testdata/htsjdk/tribble/tabix/bigger.vcf.gz.tbi b/src/test/resources/htsjdk/tribble/tabix/bigger.vcf.gz.tbi
similarity index 100%
rename from testdata/htsjdk/tribble/tabix/bigger.vcf.gz.tbi
rename to src/test/resources/htsjdk/tribble/tabix/bigger.vcf.gz.tbi
diff --git a/testdata/htsjdk/tribble/tabix/testTabixIndex.vcf b/src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf
similarity index 100%
rename from testdata/htsjdk/tribble/tabix/testTabixIndex.vcf
rename to src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf
diff --git a/testdata/htsjdk/tribble/tabix/testTabixIndex.vcf.gz b/src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf.gz
similarity index 100%
rename from testdata/htsjdk/tribble/tabix/testTabixIndex.vcf.gz
rename to src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf.gz
diff --git a/testdata/htsjdk/tribble/tabix/trioDup.vcf b/src/test/resources/htsjdk/tribble/tabix/trioDup.vcf
similarity index 100%
rename from testdata/htsjdk/tribble/tabix/trioDup.vcf
rename to src/test/resources/htsjdk/tribble/tabix/trioDup.vcf
diff --git a/testdata/htsjdk/tribble/tabix/trioDup.vcf.gz b/src/test/resources/htsjdk/tribble/tabix/trioDup.vcf.gz
similarity index 100%
rename from testdata/htsjdk/tribble/tabix/trioDup.vcf.gz
rename to src/test/resources/htsjdk/tribble/tabix/trioDup.vcf.gz
diff --git a/testdata/htsjdk/tribble/tabix/trioDup.vcf.gz.tbi b/src/test/resources/htsjdk/tribble/tabix/trioDup.vcf.gz.tbi
similarity index 100%
rename from testdata/htsjdk/tribble/tabix/trioDup.vcf.gz.tbi
rename to src/test/resources/htsjdk/tribble/tabix/trioDup.vcf.gz.tbi
diff --git a/testdata/htsjdk/tribble/test.bed b/src/test/resources/htsjdk/tribble/test.bed
similarity index 100%
rename from testdata/htsjdk/tribble/test.bed
rename to src/test/resources/htsjdk/tribble/test.bed
diff --git a/testdata/htsjdk/tribble/test.gff b/src/test/resources/htsjdk/tribble/test.gff
similarity index 100%
rename from testdata/htsjdk/tribble/test.gff
rename to src/test/resources/htsjdk/tribble/test.gff
diff --git a/testdata/htsjdk/tribble/test.tabix.bed.gz b/src/test/resources/htsjdk/tribble/test.tabix.bed.gz
similarity index 100%
rename from testdata/htsjdk/tribble/test.tabix.bed.gz
rename to src/test/resources/htsjdk/tribble/test.tabix.bed.gz
diff --git a/testdata/htsjdk/tribble/test.tabix.bed.gz.tbi b/src/test/resources/htsjdk/tribble/test.tabix.bed.gz.tbi
similarity index 100%
rename from testdata/htsjdk/tribble/test.tabix.bed.gz.tbi
rename to src/test/resources/htsjdk/tribble/test.tabix.bed.gz.tbi
diff --git a/testdata/htsjdk/tribble/test.vcf b/src/test/resources/htsjdk/tribble/test.vcf
similarity index 100%
rename from testdata/htsjdk/tribble/test.vcf
rename to src/test/resources/htsjdk/tribble/test.vcf
diff --git a/testdata/htsjdk/tribble/test.vcf.gz b/src/test/resources/htsjdk/tribble/test.vcf.gz
similarity index 100%
rename from testdata/htsjdk/tribble/test.vcf.gz
rename to src/test/resources/htsjdk/tribble/test.vcf.gz
diff --git a/testdata/htsjdk/tribble/testGeliText.txt b/src/test/resources/htsjdk/tribble/testGeliText.txt
similarity index 100%
rename from testdata/htsjdk/tribble/testGeliText.txt
rename to src/test/resources/htsjdk/tribble/testGeliText.txt
diff --git a/testdata/htsjdk/tribble/testIntervalList.list b/src/test/resources/htsjdk/tribble/testIntervalList.list
similarity index 100%
rename from testdata/htsjdk/tribble/testIntervalList.list
rename to src/test/resources/htsjdk/tribble/testIntervalList.list
diff --git a/testdata/htsjdk/tribble/tmp/.gitignore b/src/test/resources/htsjdk/tribble/tmp/.gitignore
similarity index 100%
rename from testdata/htsjdk/tribble/tmp/.gitignore
rename to src/test/resources/htsjdk/tribble/tmp/.gitignore
diff --git a/testdata/htsjdk/tribble/trio.vcf b/src/test/resources/htsjdk/tribble/trio.vcf
similarity index 100%
rename from testdata/htsjdk/tribble/trio.vcf
rename to src/test/resources/htsjdk/tribble/trio.vcf
diff --git a/testdata/htsjdk/tribble/trio.vcf.idx b/src/test/resources/htsjdk/tribble/trio.vcf.idx
similarity index 100%
rename from testdata/htsjdk/tribble/trio.vcf.idx
rename to src/test/resources/htsjdk/tribble/trio.vcf.idx
diff --git a/testdata/htsjdk/tribble/trioDup.vcf b/src/test/resources/htsjdk/tribble/trioDup.vcf
similarity index 100%
rename from testdata/htsjdk/tribble/trioDup.vcf
rename to src/test/resources/htsjdk/tribble/trioDup.vcf
diff --git a/testdata/htsjdk/tribble/vcfexample.vcf b/src/test/resources/htsjdk/tribble/vcfexample.vcf
similarity index 100%
rename from testdata/htsjdk/tribble/vcfexample.vcf
rename to src/test/resources/htsjdk/tribble/vcfexample.vcf
diff --git a/testdata/htsjdk/tribble/vcfexample.vcf.gz b/src/test/resources/htsjdk/tribble/vcfexample.vcf.gz
similarity index 100%
rename from testdata/htsjdk/tribble/vcfexample.vcf.gz
rename to src/test/resources/htsjdk/tribble/vcfexample.vcf.gz
diff --git a/testdata/htsjdk/tribble/vcfexample.vcf.gz.tbi b/src/test/resources/htsjdk/tribble/vcfexample.vcf.gz.tbi
similarity index 100%
rename from testdata/htsjdk/tribble/vcfexample.vcf.gz.tbi
rename to src/test/resources/htsjdk/tribble/vcfexample.vcf.gz.tbi
diff --git a/testdata/htsjdk/variant/HiSeq.10000.vcf b/src/test/resources/htsjdk/variant/HiSeq.10000.vcf
similarity index 100%
rename from testdata/htsjdk/variant/HiSeq.10000.vcf
rename to src/test/resources/htsjdk/variant/HiSeq.10000.vcf
diff --git a/testdata/htsjdk/variant/ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf b/src/test/resources/htsjdk/variant/ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf
similarity index 100%
rename from testdata/htsjdk/variant/ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf
rename to src/test/resources/htsjdk/variant/ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf
diff --git a/src/test/resources/htsjdk/variant/VCF4HeaderTest.vcf b/src/test/resources/htsjdk/variant/VCF4HeaderTest.vcf
new file mode 100644
index 0000000..9af0cb3
--- /dev/null
+++ b/src/test/resources/htsjdk/variant/VCF4HeaderTest.vcf
@@ -0,0 +1,42 @@
+##fileformat=VCFv4.0
+##FILTER=<ID=ABFilter,Description="AB  0.75 && DP  40">
+##FILTER=<ID=DPFilter,Description="DP  120 || SB  -0.10">
+##FILTER=<ID=FDRtranche0.00to0.10,Description="FDR tranche level at qual 0.06">
+##FILTER=<ID=FDRtranche0.10to1.00,Description="FDR tranche level at qual 0.03">
+##FILTER=<ID=FDRtranche1.00to2.00,Description="FDR tranche level at qual 0.02">
+##FILTER=<ID=FDRtranche2.00to10.00+,Description="FDR tranche level at qual > 0.06">
+##FILTER=<ID=FDRtranche2.00to10.00,Description="FDR tranche level at qual unknown">
+##FILTER=<ID=HARD_TO_VALIDATE,Description="MQ0 = 4 && ((MQ0 / (1.0 * DP))  0.1)">
+##FILTER=<ID=Indel,Description="Overlaps a user-input mask">
+##FILTER=<ID=LowQual,Description="Low quality">
+##FILTER=<ID=LowQual,Description="QUAL  50.0">
+##FILTER=<ID=ANNOTATION,Description="ANNOTATION != \"NA\" || ANNOTATION <= 0.01">
+##FILTER=<ID=ANNOTATION2,Description="ANNOTATION with quote \" that is unmatched but escaped">
+##FILTER=<ID=SnpCluster,Description="SNPs found in clusters">
+##FORMAT=<ID=AD,Number=.,Type=Integer,Description="Allelic depths for the ref and alt alleles in the order listed">
+##FORMAT=<ID=DP,Number=1,Type=Integer,Description="Read Depth (only filtered reads used for calling)">
+##FORMAT=<ID=GL,Number=3,Type=Float,Description="Log-scaled likelihoods for AA,AB,BB genotypes where A=ref and B=alt; not applicable if site is not biallelic">
+##FORMAT=<ID=GQ,Number=1,Type=Float,Description="Genotype Quality">
+##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
+##INFO=<ID=AC,Number=.,Type=Integer,Description="Allele count in genotypes, for each ALT allele, in the same order as listed">
+##INFO=<ID=AF,Number=.,Type=Float,Description="Allele Frequency, for each ALT allele, in the same order as listed">
+##INFO=<ID=AN,Number=1,Type=Integer,Description="Total number of alleles in called genotypes">
+##INFO=<ID=DB,Number=0,Type=Flag,Description="dbSNP Membership">
+##INFO=<ID=DP,Number=1,Type=Integer,Description="Total Depth">
+##INFO=<ID=Dels,Number=1,Type=Float,Description="Fraction of Reads Containing Spanning Deletions">
+##INFO=<ID=HRun,Number=1,Type=Integer,Description="Largest Contiguous Homopolymer Run of Variant Allele In Either Direction">
+##INFO=<ID=HaplotypeScore,Number=1,Type=Float,Description="Consistency of the site with two (and only two) segregating haplotypes">
+##INFO=<ID=EscapingQuote,Number=1,Type=Float,Description="This description has an escaped \" quote in it">
+##INFO=<ID=EscapingBackslash,Number=1,Type=Float,Description="This description has an escaped \\ backslash in it">
+##INFO=<ID=EscapingNonQuoteOrBackslash,Number=1,Type=Float,Description="This other value has a \n newline in it">
+##INFO=<ID=MQ,Number=1,Type=Float,Description="RMS Mapping Quality">
+##INFO=<ID=MQ0,Number=1,Type=Integer,Description="Total Mapping Quality Zero Reads">
+##INFO=<ID=OQ,Number=1,Type=Float,Description="The original variant quality score">
+##INFO=<ID=QD,Number=1,Type=Float,Description="Variant Confidence/Quality by Depth">
+##INFO=<ID=SB,Number=1,Type=Float,Description="Strand Bias">
+##UnifiedGenotyper="analysis_type=UnifiedGenotyper input_file=[/humgen/1kg/analysis/bamsForDataProcessingPapers/scriptsToMakeBams/Q-2970 at gsa2-1-temp-23/NA12878.HiSeq.WGS.bwa.cleaned.recal.bam, /humgen/1kg/analysis/bamsForDataProcessingPapers/scriptsToMakeBams/Q-2970 at gsa2-1-temp-24/NA12878.HiSeq.WGS.bwa.cleaned.recal.bam, /humgen/1kg/analysis/bamsForDataProcessingPapers/scriptsToMakeBams/Q-2970 at gsa2-1-temp-5/NA12878.HiSeq.WGS.bwa.cleaned.recal.bam, /humgen/1kg/analysis/bamsForDataProcessi [...]
+##VariantFiltration="analysis_type=VariantFiltration input_file=[] read_buffer_size=null read_filter=[] intervals=null excludeIntervals=[chrM, chrY] reference_sequence=/seq/references/Homo_sapiens_assembly18/v0/Homo_sapiens_assembly18.fasta rodBind=[variant,VCF,wgs.v9/HiSeq.WGS.cleaned.ug.snpfiltered.vcf, mask,Bed,wgs.v9/HiSeq.WGS.cleaned.indels.10.mask] rodToIntervalTrackName=null BTI_merge_rule=UNION DBSNP=null hapmap=null hapmap_chip=null out=wgs.v9/HiSeq.WGS.cleaned.ug.snpfiltered.in [...]
+##VariantFiltration="analysis_type=VariantFiltration input_file=[] read_buffer_size=null read_filter=[] intervals=null excludeIntervals=[chrM, chrY] reference_sequence=/seq/references/Homo_sapiens_assembly18/v0/Homo_sapiens_assembly18.fasta rodBind=[variant,VCF,wgs.v9/HiSeq.WGS.cleaned.ug.vcf] rodToIntervalTrackName=null BTI_merge_rule=UNION DBSNP=null hapmap=null hapmap_chip=null out=wgs.v9/HiSeq.WGS.cleaned.ug.snpfiltered.vcf err=null outerr=null filterZeroMappingQualityReads=false dow [...]
+##source=VariantOptimizer
+#CHROM	POS	ID	REF	ALT	QUAL	FILTER	INFO	FORMAT	NA12878	
+chr1	109	.	A	T	0	FDRtranche2.00to10.00+	AC=1;AF=0.50;AN=2;DP=1019;Dels=0.00;HRun=0;HaplotypeScore=686.65;MQ=19.20;MQ0=288;OQ=2175.54;QD=2.13;SB=-1042.18	GT:AD:DP:GL:GQ	0/1:610,327:308:-316.30,-95.47,-803.03:99
diff --git a/testdata/htsjdk/variant/VQSR.mixedTest.recal b/src/test/resources/htsjdk/variant/VQSR.mixedTest.recal
similarity index 100%
rename from testdata/htsjdk/variant/VQSR.mixedTest.recal
rename to src/test/resources/htsjdk/variant/VQSR.mixedTest.recal
diff --git a/testdata/htsjdk/variant/breakpoint.vcf b/src/test/resources/htsjdk/variant/breakpoint.vcf
similarity index 100%
rename from testdata/htsjdk/variant/breakpoint.vcf
rename to src/test/resources/htsjdk/variant/breakpoint.vcf
diff --git a/testdata/htsjdk/variant/dbsnp_135.b37.1000.vcf b/src/test/resources/htsjdk/variant/dbsnp_135.b37.1000.vcf
similarity index 100%
rename from testdata/htsjdk/variant/dbsnp_135.b37.1000.vcf
rename to src/test/resources/htsjdk/variant/dbsnp_135.b37.1000.vcf
diff --git a/testdata/htsjdk/variant/diagnosis_targets_testfile.vcf b/src/test/resources/htsjdk/variant/diagnosis_targets_testfile.vcf
similarity index 100%
rename from testdata/htsjdk/variant/diagnosis_targets_testfile.vcf
rename to src/test/resources/htsjdk/variant/diagnosis_targets_testfile.vcf
diff --git a/testdata/htsjdk/variant/ex2.bgzf.bcf b/src/test/resources/htsjdk/variant/ex2.bgzf.bcf
similarity index 100%
rename from testdata/htsjdk/variant/ex2.bgzf.bcf
rename to src/test/resources/htsjdk/variant/ex2.bgzf.bcf
diff --git a/testdata/htsjdk/variant/ex2.bgzf.bcf.csi b/src/test/resources/htsjdk/variant/ex2.bgzf.bcf.csi
similarity index 100%
rename from testdata/htsjdk/variant/ex2.bgzf.bcf.csi
rename to src/test/resources/htsjdk/variant/ex2.bgzf.bcf.csi
diff --git a/testdata/htsjdk/variant/ex2.uncompressed.bcf b/src/test/resources/htsjdk/variant/ex2.uncompressed.bcf
similarity index 100%
rename from testdata/htsjdk/variant/ex2.uncompressed.bcf
rename to src/test/resources/htsjdk/variant/ex2.uncompressed.bcf
diff --git a/testdata/htsjdk/variant/ex2.vcf b/src/test/resources/htsjdk/variant/ex2.vcf
similarity index 100%
rename from testdata/htsjdk/variant/ex2.vcf
rename to src/test/resources/htsjdk/variant/ex2.vcf
diff --git a/testdata/htsjdk/variant/serialization_test.bcf b/src/test/resources/htsjdk/variant/serialization_test.bcf
similarity index 100%
rename from testdata/htsjdk/variant/serialization_test.bcf
rename to src/test/resources/htsjdk/variant/serialization_test.bcf
diff --git a/testdata/htsjdk/variant/serialization_test.bcf.idx b/src/test/resources/htsjdk/variant/serialization_test.bcf.idx
similarity index 100%
rename from testdata/htsjdk/variant/serialization_test.bcf.idx
rename to src/test/resources/htsjdk/variant/serialization_test.bcf.idx
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.dict b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.dict
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.dict
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.dict
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.fasta b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.fasta
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.fasta
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.fasta
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.fasta.fai b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.fasta.fai
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.fasta.fai
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/Homo_sapiens_assembly18.trimmed.fasta.fai
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/ScreenSamReads.100.input.sam b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/ScreenSamReads.100.input.sam
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/ScreenSamReads.100.input.sam
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/ScreenSamReads.100.input.sam
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/empty.interval_list b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/empty.interval_list
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/empty.interval_list
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/empty.interval_list
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.dict b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.dict
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.dict
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.dict
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.vcf b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.vcf
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.vcf
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.vcf
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.vcf.idx b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.vcf.idx
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.vcf.idx
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1.vcf.idx
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1_comp.interval_list b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1_comp.interval_list
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1_comp.interval_list
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test1_comp.interval_list
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test2_comp.interval_list b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test2_comp.interval_list
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test2_comp.interval_list
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test2_comp.interval_list
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test3_comp.interval_list b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test3_comp.interval_list
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test3_comp.interval_list
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test3_comp.interval_list
diff --git a/testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test4_comp.interval_list b/src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test4_comp.interval_list
similarity index 100%
rename from testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test4_comp.interval_list
rename to src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/test4_comp.interval_list
diff --git a/testdata/htsjdk/variant/variantFilter01.js b/src/test/resources/htsjdk/variant/variantFilter01.js
similarity index 100%
rename from testdata/htsjdk/variant/variantFilter01.js
rename to src/test/resources/htsjdk/variant/variantFilter01.js
diff --git a/testdata/htsjdk/variant/variantFilter02.js b/src/test/resources/htsjdk/variant/variantFilter02.js
similarity index 100%
rename from testdata/htsjdk/variant/variantFilter02.js
rename to src/test/resources/htsjdk/variant/variantFilter02.js
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C59.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C59.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C59.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C59.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C60.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C60.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C60.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C60.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C61.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C61.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C61.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C61.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C62.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C62.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C62.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C62.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C63.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C63.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C63.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C63.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C64.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C64.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C64.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C64.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C65.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C65.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C65.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C65.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C66.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C66.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C66.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C66.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/s_1_1101.filter b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/s_1_1101.filter
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/s_1_1101.filter
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/BaseCalls/L001/s_1_1101.filter
diff --git a/testdata/net/sf/picard/illumina/125T125T/Data/Intensities/L001/s_1_1101.clocs b/src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/L001/s_1_1101.clocs
similarity index 100%
rename from testdata/net/sf/picard/illumina/125T125T/Data/Intensities/L001/s_1_1101.clocs
rename to src/test/resources/net/sf/picard/illumina/125T125T/Data/Intensities/L001/s_1_1101.clocs
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C1.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C10.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C11.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C12.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C13.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C14.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C15.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C16.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C17.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C18.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C19.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C2.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C20.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C21.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C22.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C23.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C24.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C25.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C26.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C27.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C28.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C29.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C3.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C30.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C31.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C32.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C33.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C34.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C35.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C36.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C37.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C38.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C39.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C4.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C40.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C41.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C42.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C43.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C44.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C45.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C46.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C47.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C48.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C49.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C5.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C50.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C51.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C52.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C53.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C54.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C55.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C56.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C57.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C58.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C6.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C7.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C8.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/C9.1/s_1_1101.bcl
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/s_1_1101.filter b/src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/s_1_1101.filter
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/s_1_1101.filter
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/Data/Intensities/BaseCalls/L001/s_1_1101.filter
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/ACAGGTAT.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/ACAGGTAT.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/ACAGGTAT.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/ACAGGTAT.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/ACAGTTGA.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/ACAGTTGA.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/ACAGTTGA.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/ACAGTTGA.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/ACTAAGAC.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/ACTAAGAC.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/ACTAAGAC.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/ACTAAGAC.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/AGGTCGCA.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/AGGTCGCA.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/AGGTCGCA.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/AGGTCGCA.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/ATTATCAA.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/ATTATCAA.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/ATTATCAA.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/ATTATCAA.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/CAATAGTC.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/CAATAGTC.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/CAATAGTC.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/CAATAGTC.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/CAGCGGTA.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/CAGCGGTA.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/CAGCGGTA.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/CAGCGGTA.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/CGCTATGT.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/CGCTATGT.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/CGCTATGT.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/CGCTATGT.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/CTGTAATC.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/CTGTAATC.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/CTGTAATC.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/CTGTAATC.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/GACCAGGA.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/GACCAGGA.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/GACCAGGA.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/GACCAGGA.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/GCCGTCGA.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/GCCGTCGA.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/GCCGTCGA.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/GCCGTCGA.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/GCCTAGCC.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/GCCTAGCC.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/GCCTAGCC.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/GCCTAGCC.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/N.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/N.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/N.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/N.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/TGCAAGTA.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/TGCAAGTA.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/TGCAAGTA.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/TGCAAGTA.sam
diff --git a/testdata/net/sf/picard/illumina/25T8B25T/sams/TGTAATCA.sam b/src/test/resources/net/sf/picard/illumina/25T8B25T/sams/TGTAATCA.sam
similarity index 100%
rename from testdata/net/sf/picard/illumina/25T8B25T/sams/TGTAATCA.sam
rename to src/test/resources/net/sf/picard/illumina/25T8B25T/sams/TGTAATCA.sam
diff --git a/src/tests/resources/testng.xml b/src/test/resources/testng.xml
similarity index 100%
rename from src/tests/resources/testng.xml
rename to src/test/resources/testng.xml
diff --git a/testdata/tribble/tmp/.gitignore b/src/test/resources/tribble/tmp/.gitignore
similarity index 100%
rename from testdata/tribble/tmp/.gitignore
rename to src/test/resources/tribble/tmp/.gitignore
diff --git a/src/tests/java/htsjdk/samtools/BAMCigarOverflowTest.java b/src/tests/java/htsjdk/samtools/BAMCigarOverflowTest.java
deleted file mode 100644
index 74f44a8..0000000
--- a/src/tests/java/htsjdk/samtools/BAMCigarOverflowTest.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.CloserUtil;
-import org.testng.annotations.Test;
-import static org.testng.Assert.assertEquals;
-
-import java.io.File;
-
-/**
- * Test the fix of a bug reported by s-andrews in which the use of an arithmetic rather than a logical right shift in BinaryCigarCodec.binaryCigarToCigarElement()
- * causes an overflow in the CIGAR when reading a BAM file for a read that spans a very large intron.
- */
-public class BAMCigarOverflowTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    @Test
-    public void testCigarOverflow() throws Exception {
-        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT).open(new File(TEST_DATA_DIR, "BAMCigarOverflowTest/CigarOverflowTest.bam"));
-
-        //Load the single read from the BAM file.
-        final SAMRecord testBAMRecord = reader.iterator().next();
-        CloserUtil.close(reader);
-
-        //The BAM file that exposed the bug triggered a SAM validation error because the bin field of the BAM record did not equal the computed value. Here we test for this error.
-        //Cast to int to avoid an ambiguity in the assertEquals() call between assertEquals(int,int) and assertEquals(Object,Object).
-        assertEquals(testBAMRecord.computeIndexingBin(), (int) testBAMRecord.getIndexingBin());
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/BAMFileIndexTest.java b/src/tests/java/htsjdk/samtools/BAMFileIndexTest.java
deleted file mode 100755
index 7299cfc..0000000
--- a/src/tests/java/htsjdk/samtools/BAMFileIndexTest.java
+++ /dev/null
@@ -1,527 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.StopWatch;
-import htsjdk.samtools.util.StringUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Random;
-import java.util.Set;
-
-import static org.testng.Assert.*;
-
-/**
- * Test BAM file indexing.
- */
-public class BAMFileIndexTest {
-    private final File BAM_FILE = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
-    private final boolean mVerbose = false;
-
-    @Test
-    public void testGetSearchBins()
-            throws Exception {
-        final DiskBasedBAMFileIndex bfi = new DiskBasedBAMFileIndex(new File(BAM_FILE.getPath() + ".bai"),
-                null);    // todo can null be replaced with a Sequence dictionary for the BAM_FILE?
-        final long[] bins = bfi.getSpanOverlapping(1, 0, 0).toCoordinateArray();
-        /***
-         if (bins == null) {
-         System.out.println("Search bins: " + bins);
-         return;
-         }
-         System.out.println("Search bins:");
-         for (int i = 0; i < bins.length; i++) {
-         System.out.println(" " + Long.toHexString(bins[i]));
-         }
-         ***/
-        assertNotNull(bins);
-        assertEquals(bins.length, 2);
-    }
-
-    @Test
-    public void testSpecificQueries()
-            throws Exception {
-        assertEquals(runQueryTest(BAM_FILE, "chrM", 10400, 10600, true), 1);
-        assertEquals(runQueryTest(BAM_FILE, "chrM", 10400, 10600, false), 2);
-    }
-
-    @Test(groups = {"slow"})
-    public void testRandomQueries()
-            throws Exception {
-        runRandomTest(BAM_FILE, 1000, new Random());
-    }
-
-    @Test
-    public void testWholeChromosomes() {
-        checkChromosome("chrM", 23);
-        checkChromosome("chr1", 885);
-        checkChromosome("chr2", 837);
-        /***
-         checkChromosome("chr3", 683);
-         checkChromosome("chr4", 633);
-         checkChromosome("chr5", 611);
-         checkChromosome("chr6", 585);
-         checkChromosome("chr7", 521);
-         checkChromosome("chr8", 507);
-         checkChromosome("chr9", 388);
-         checkChromosome("chr10", 477);
-         checkChromosome("chr11", 467);
-         checkChromosome("chr12", 459);
-         checkChromosome("chr13", 327);
-         checkChromosome("chr14", 310);
-         checkChromosome("chr15", 280);
-         checkChromosome("chr16", 278);
-         checkChromosome("chr17", 269);
-         checkChromosome("chr18", 265);
-         checkChromosome("chr19", 178);
-         checkChromosome("chr20", 228);
-         checkChromosome("chr21", 123);
-         checkChromosome("chr22", 121);
-         checkChromosome("chrX", 237);
-         checkChromosome("chrY", 29);
-         ***/
-    }
-
-    @Test
-    public void testQueryUnmapped() {
-        final StopWatch linearScan = new StopWatch();
-        final StopWatch queryUnmapped = new StopWatch();
-        int unmappedCountFromLinearScan = 0;
-        final File bamFile = BAM_FILE;
-        final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
-        linearScan.start();
-        CloseableIterator<SAMRecord> it = reader.iterator();
-        int mappedCount = 0;
-        while (it.hasNext()) {
-            final SAMRecord rec = it.next();
-            if (rec.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
-                unmappedCountFromLinearScan = 1;
-                break;
-            }
-            ++mappedCount;
-        }
-        linearScan.stop();
-        System.out.println("Found start of unmapped reads.  Num mapped reads: " + mappedCount);
-        System.out.println("Time so far: " + linearScan.getElapsedTimeSecs());
-        linearScan.start();
-
-        while (it.hasNext()) {
-            final SAMRecord rec = it.next();
-            Assert.assertEquals(rec.getReferenceIndex().intValue(), SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-            ++unmappedCountFromLinearScan;
-        }
-        it.close();
-        linearScan.stop();
-        queryUnmapped.start();
-        it = reader.queryUnmapped();
-        int unmappedCountFromQueryUnmapped = 0;
-        while (it.hasNext()) {
-            final SAMRecord rec = it.next();
-            Assert.assertEquals(rec.getReferenceIndex().intValue(), SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-            ++unmappedCountFromQueryUnmapped;
-        }
-        it.close();
-        queryUnmapped.stop();
-        System.out.println("Linear scan total time: " + linearScan.getElapsedTimeSecs());
-        System.out.println("queryUnmapped time: " + queryUnmapped.getElapsedTimeSecs());
-        System.out.println("Number of unmapped reads:" + unmappedCountFromQueryUnmapped);
-        Assert.assertEquals(unmappedCountFromQueryUnmapped, unmappedCountFromLinearScan);
-        CloserUtil.close(reader);
-    }
-
-    @Test
-    public void testQueryAlignmentStart() {
-        final SamReader reader = SamReaderFactory.makeDefault().open(BAM_FILE);
-        CloseableIterator<SAMRecord> it = reader.queryAlignmentStart("chr1", 202160268);
-        Assert.assertEquals(countElements(it), 2);
-        it.close();
-        it = reader.queryAlignmentStart("chr1", 201595153);
-        Assert.assertEquals(countElements(it), 1);
-        it.close();
-        // There are records that overlap this position, but none that start here
-        it = reader.queryAlignmentStart("chrM", 10400);
-        Assert.assertEquals(countElements(it), 0);
-        it.close();
-        // One past the last chr1 record
-        it = reader.queryAlignmentStart("chr1", 246817509);
-        Assert.assertEquals(countElements(it), 0);
-        it.close();
-        CloserUtil.close(reader);
-    }
-
-    @Test
-    public void testQueryMate() {
-        final SamReader reader = SamReaderFactory.makeDefault().open(BAM_FILE);
-
-        // Both ends mapped
-        SAMRecord rec = getSingleRecordStartingAt(reader, "chrM", 1687);
-        SAMRecord mate = reader.queryMate(rec);
-        assertMate(rec, mate);
-        SAMRecord originalRec = reader.queryMate(mate);
-        Assert.assertEquals(originalRec, rec);
-
-        // One end mapped
-        rec = getSingleRecordStartingAt(reader, "chr11", 48720338);
-        mate = reader.queryMate(rec);
-        assertMate(rec, mate);
-        originalRec = reader.queryMate(mate);
-        Assert.assertEquals(originalRec, rec);
-
-        // Both ends mapped
-        final CloseableIterator<SAMRecord> it = reader.queryUnmapped();
-        rec = null;
-        while (it.hasNext()) {
-            final SAMRecord next = it.next();
-            if (next.getReadName().equals("2615")) {
-                rec = next;
-                break;
-            }
-        }
-        it.close();
-        Assert.assertNotNull(rec);
-        mate = reader.queryMate(rec);
-        assertMate(rec, mate);
-        originalRec = reader.queryMate(mate);
-        Assert.assertEquals(originalRec, rec);
-        CloserUtil.close(reader);
-    }
-
-    private void assertMate(final SAMRecord rec, final SAMRecord mate) {
-        Assert.assertNotNull(mate);
-        Assert.assertEquals(mate.getReadName(), rec.getReadName());
-        Assert.assertEquals(mate.getReferenceIndex(), rec.getMateReferenceIndex());
-        if (SAMUtils.getMateCigarString(rec) != null) {
-            Assert.assertEquals(mate.getCigarString(), SAMUtils.getMateCigarString(rec));
-        }
-        Assert.assertEquals(mate.getAlignmentStart(), rec.getMateAlignmentStart());
-        Assert.assertFalse(mate.getFirstOfPairFlag() == rec.getFirstOfPairFlag());
-    }
-
-    /**
-     * Compare the results of a multi-interval query versus the union of the results from each interval done
-     * separately.
-     */
-    @Test(dataProvider = "testMultiIntervalQueryDataProvider")
-    public void testMultiIntervalQuery(final boolean contained) {
-        final List<String> referenceNames = getReferenceNames(BAM_FILE);
-
-        final QueryInterval[] intervals = generateRandomIntervals(referenceNames.size(), 1000, new Random());
-        final Set<SAMRecord> multiIntervalRecords = new HashSet<SAMRecord>();
-        final Set<SAMRecord> singleIntervalRecords = new HashSet<SAMRecord>();
-        final SamReader reader = SamReaderFactory.makeDefault().open(BAM_FILE);
-        for (final QueryInterval interval : intervals) {
-            consumeAll(singleIntervalRecords, reader.query(referenceNames.get(interval.referenceIndex), interval.start, interval.end, contained));
-        }
-
-        final QueryInterval[] optimizedIntervals = QueryInterval.optimizeIntervals(intervals);
-        consumeAll(multiIntervalRecords, reader.query(optimizedIntervals, contained));
-        final Iterator<SAMRecord> singleIntervalRecordIterator = singleIntervalRecords.iterator();
-        boolean failed = false;
-        while (singleIntervalRecordIterator.hasNext()) {
-            final SAMRecord record = singleIntervalRecordIterator.next();
-            if (!multiIntervalRecords.remove(record)) {
-                System.out.println("SingleIntervalQuery found " + record + " but MultiIntervalQuery did not");
-                failed = true;
-            }
-        }
-        for (final SAMRecord record : multiIntervalRecords) {
-            System.out.println("MultiIntervalQuery found " + record + " but SingleIntervalQuery did not");
-            failed = true;
-        }
-        Assert.assertFalse(failed);
-        CloserUtil.close(reader);
-    }
-
-    @DataProvider(name = "testMultiIntervalQueryDataProvider")
-    private Object[][] testMultiIntervalQueryDataProvider() {
-        return new Object[][]{{true}, {false}};
-    }
-
-    @Test
-    public void testUnmappedMateWithCoordinate() throws Exception {
-        // TODO: Use SAMRecordSetBuilder when it is able to create a pair with one end unmapped
-        final String samText = "@HD\tVN:1.0\tSO:coordinate\n" +
-                "@SQ\tSN:chr1\tLN:101\n" +
-                "@SQ\tSN:chr2\tLN:101\n" +
-                "@SQ\tSN:chr3\tLN:101\n" +
-                "@SQ\tSN:chr4\tLN:101\n" +
-                "@SQ\tSN:chr5\tLN:101\n" +
-                "@SQ\tSN:chr6\tLN:101\n" +
-                "@SQ\tSN:chr7\tLN:404\n" +
-                "@SQ\tSN:chr8\tLN:202\n" +
-                "@RG\tID:0\tSM:Hi,Mom!\n" +
-                "@PG\tID:1\tPN:Hey!\tVN:2.0\n" +
-                "one_end_mapped\t73\tchr7\t100\t255\t101M\t*\t0\t0\tCAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN\t)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&\tRG:Z:0\n" +
-                "one_end_mapped\t133\tchr7\t100\t0\t*\t=\t100\t0\tNCGCGGCATCNCGATTTCTTTCCGCAGCTAACCTCCCGACAGATCGGCAGCGCGTCGTGTAGGTTATTATGGTACATCTTGTCGTGCGGCNAGAGCATACA\t&/15445666651/566666553+2/14/&/555512+3/)-'/-&-'*+))*''13+3)'//++''/'))/3+&*5++)&'2+&+/*&-&&*)&-./1'1\tRG:Z:0\n";
-        final ByteArrayInputStream bis = new ByteArrayInputStream(StringUtil.stringToBytes(samText));
-        final File bamFile = File.createTempFile("BAMFileIndexTest.", BamFileIoUtils.BAM_FILE_EXTENSION);
-        bamFile.deleteOnExit();
-        final SamReader textReader = SamReaderFactory.makeDefault().open(SamInputResource.of(bis));
-        SAMFileWriterFactory samFileWriterFactory = new SAMFileWriterFactory();
-        samFileWriterFactory.setCreateIndex(true);
-        final SAMFileWriter writer = samFileWriterFactory.makeBAMWriter(textReader.getFileHeader(), true, bamFile);
-        for (final SAMRecord rec : textReader) {
-            writer.addAlignment(rec);
-        }
-        writer.close();
-        final SamReader bamReader = SamReaderFactory.makeDefault().open(bamFile);
-        SamFiles.findIndex(bamFile).deleteOnExit();
-        Assert.assertEquals(countElements(bamReader.queryContained("chr7", 100, 100)), 1);
-        Assert.assertEquals(countElements(bamReader.queryOverlapping("chr7", 100, 100)), 2);
-        bamReader.close();
-        textReader.close();
-    }
-
-    private <E> void consumeAll(final Collection<E> collection, final CloseableIterator<E> iterator) {
-        while (iterator.hasNext()) {
-            collection.add(iterator.next());
-        }
-        iterator.close();
-    }
-
-    private SAMRecord getSingleRecordStartingAt(final SamReader reader, final String sequence, final int alignmentStart) {
-        final CloseableIterator<SAMRecord> it = reader.queryAlignmentStart(sequence, alignmentStart);
-        Assert.assertTrue(it.hasNext());
-        final SAMRecord rec = it.next();
-        Assert.assertNotNull(rec);
-        Assert.assertFalse(it.hasNext());
-        it.close();
-        return rec;
-    }
-
-    private int countElements(final CloseableIterator<SAMRecord> it) {
-        int num;
-        for (num = 0; it.hasNext(); ++num, it.next()) {
-        }
-        it.close();
-        return num;
-    }
-
-    private void checkChromosome(final String name, final int expectedCount) {
-        int count = runQueryTest(BAM_FILE, name, 0, 0, true);
-        assertEquals(count, expectedCount);
-        count = runQueryTest(BAM_FILE, name, 0, 0, false);
-        assertEquals(count, expectedCount);
-    }
-
-    private void runRandomTest(final File bamFile, final int count, final Random generator) {
-        final List<String> referenceNames = getReferenceNames(bamFile);
-        final QueryInterval[] intervals = generateRandomIntervals(referenceNames.size(), count, generator);
-        for (final QueryInterval interval : intervals) {
-            final String refName = referenceNames.get(interval.referenceIndex);
-            final int startPos = interval.start;
-            final int endPos = interval.end;
-            System.out.println("Testing query " + refName + ":" + startPos + "-" + endPos + " ...");
-            try {
-                runQueryTest(bamFile, refName, startPos, endPos, true);
-                runQueryTest(bamFile, refName, startPos, endPos, false);
-            } catch (final Throwable exc) {
-                String message = "Query test failed: " + refName + ":" + startPos + "-" + endPos;
-                message += ": " + exc.getMessage();
-                throw new RuntimeException(message, exc);
-            }
-        }
-    }
-
-    private QueryInterval[] generateRandomIntervals(final int numReferences, final int count, final Random generator) {
-        final QueryInterval[] intervals = new QueryInterval[count];
-        final int maxCoordinate = 10000000;
-        for (int i = 0; i < count; i++) {
-            final int referenceIndex = generator.nextInt(numReferences);
-            final int coord1 = generator.nextInt(maxCoordinate + 1);
-            final int coord2 = generator.nextInt(maxCoordinate + 1);
-            final int startPos = Math.min(coord1, coord2);
-            final int endPos = Math.max(coord1, coord2);
-            intervals[i] = new QueryInterval(referenceIndex, startPos, endPos);
-        }
-
-        return intervals;
-    }
-
-    private List<String> getReferenceNames(final File bamFile) {
-        final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
-        final List<String> result = new ArrayList<String>();
-        final List<SAMSequenceRecord> seqRecords = reader.getFileHeader().getSequenceDictionary().getSequences();
-        for (final SAMSequenceRecord seqRecord : seqRecords) {
-            if (seqRecord.getSequenceName() != null) {
-                result.add(seqRecord.getSequenceName());
-            }
-        }
-        CloserUtil.close(reader);
-        return result;
-    }
-
-    private int runQueryTest(final File bamFile, final String sequence, final int startPos, final int endPos, final boolean contained) {
-        verbose("Testing query " + sequence + ":" + startPos + "-" + endPos + " ...");
-        final SamReader reader1 = SamReaderFactory.makeDefault().open(bamFile);
-        final SamReader reader2 = SamReaderFactory.makeDefault().open(bamFile);
-        final Iterator<SAMRecord> iter1 = reader1.query(sequence, startPos, endPos, contained);
-        final Iterator<SAMRecord> iter2 = reader2.iterator();
-        // Compare ordered iterators.
-        // Confirm that iter1 is a subset of iter2 that properly filters.
-        SAMRecord record1 = null;
-        SAMRecord record2 = null;
-        int count1 = 0;
-        int count2 = 0;
-        int beforeCount = 0;
-        int afterCount = 0;
-        while (true) {
-            if (record1 == null && iter1.hasNext()) {
-                record1 = iter1.next();
-                count1++;
-            }
-            if (record2 == null && iter2.hasNext()) {
-                record2 = iter2.next();
-                count2++;
-            }
-            // System.out.println("Iteration:");
-            // System.out.println(" Record1 = " + ((record1 == null) ? "null" : record1.format()));
-            // System.out.println(" Record2 = " + ((record2 == null) ? "null" : record2.format()));
-            if (record1 == null && record2 == null) {
-                break;
-            }
-            if (record1 == null) {
-                checkPassesFilter(false, record2, sequence, startPos, endPos, contained);
-                record2 = null;
-                afterCount++;
-                continue;
-            }
-            assertNotNull(record2);
-            final int ordering = compareCoordinates(record1, record2);
-            if (ordering > 0) {
-                checkPassesFilter(false, record2, sequence, startPos, endPos, contained);
-                record2 = null;
-                beforeCount++;
-                continue;
-            }
-            assertTrue(ordering == 0);
-            checkPassesFilter(true, record1, sequence, startPos, endPos, contained);
-            checkPassesFilter(true, record2, sequence, startPos, endPos, contained);
-            assertEquals(record1.getReadName(), record2.getReadName());
-            assertEquals(record1.getReadString(), record2.getReadString());
-            record1 = null;
-            record2 = null;
-        }
-        CloserUtil.close(reader1);
-        CloserUtil.close(reader2);
-        verbose("Checked " + count1 + " records against " + count2 + " records.");
-        verbose("Found " + (count2 - beforeCount - afterCount) + " records matching.");
-        verbose("Found " + beforeCount + " records before.");
-        verbose("Found " + afterCount + " records after.");
-        return count1;
-    }
-
-    private void checkPassesFilter(final boolean expected, final SAMRecord record, final String sequence, final int startPos, final int endPos, final boolean contained) {
-        final boolean passes = passesFilter(record, sequence, startPos, endPos, contained);
-        if (passes != expected) {
-            System.out.println("Error: Record erroneously " +
-                    (passes ? "passed" : "failed") +
-                    " filter.");
-            System.out.println(" Record: " + record.getSAMString());
-            System.out.println(" Filter: " + sequence + ":" +
-                    startPos + "-" + endPos +
-                    " (" + (contained ? "contained" : "overlapping") + ")");
-            assertEquals(passes, expected);
-        }
-    }
-
-    private boolean passesFilter(final SAMRecord record, final String sequence, final int startPos, final int endPos, final boolean contained) {
-        if (record == null) {
-            return false;
-        }
-        if (!safeEquals(record.getReferenceName(), sequence)) {
-            return false;
-        }
-        final int alignmentStart = record.getAlignmentStart();
-        int alignmentEnd = record.getAlignmentEnd();
-        if (alignmentStart <= 0) {
-            assertTrue(record.getReadUnmappedFlag());
-            return false;
-        }
-        if (alignmentEnd <= 0) {
-            // For indexing-only records, treat as single base alignment.
-            assertTrue(record.getReadUnmappedFlag());
-            alignmentEnd = alignmentStart;
-        }
-        if (contained) {
-            if (startPos != 0 && alignmentStart < startPos) {
-                return false;
-            }
-            if (endPos != 0 && alignmentEnd > endPos) {
-                return false;
-            }
-        } else {
-            if (startPos != 0 && alignmentEnd < startPos) {
-                return false;
-            }
-            if (endPos != 0 && alignmentStart > endPos) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    private int compareCoordinates(final SAMRecord record1, final SAMRecord record2) {
-        final int seqIndex1 = record1.getReferenceIndex();
-        final int seqIndex2 = record2.getReferenceIndex();
-        if (seqIndex1 == -1) {
-            return ((seqIndex2 == -1) ? 0 : -1);
-        } else if (seqIndex2 == -1) {
-            return 1;
-        }
-        int result = seqIndex1 - seqIndex2;
-        if (result != 0) {
-            return result;
-        }
-        result = record1.getAlignmentStart() - record2.getAlignmentStart();
-        return result;
-    }
-
-    private boolean safeEquals(final Object o1, final Object o2) {
-        if (o1 == o2) {
-            return true;
-        } else if (o1 == null || o2 == null) {
-            return false;
-        } else {
-            return o1.equals(o2);
-        }
-    }
-
-    private void verbose(final String text) {
-        if (mVerbose) {
-            System.out.println("# " + text);
-        }
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/BAMIndexWriterTest.java b/src/tests/java/htsjdk/samtools/BAMIndexWriterTest.java
deleted file mode 100644
index c907980..0000000
--- a/src/tests/java/htsjdk/samtools/BAMIndexWriterTest.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.IOUtil;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-/**
- * Test BAM file index creation
- */
-public class BAMIndexWriterTest {
-    // Two input files for basic test
-    private final String BAM_FILE_LOCATION = "testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam";
-    private final String BAI_FILE_LOCATION = "testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai";
-    private final File BAM_FILE = new File(BAM_FILE_LOCATION);
-    private final File BAI_FILE = new File(BAI_FILE_LOCATION);
-
-    private final boolean mVerbose = true;
-
-    @Test(enabled = true)
-    public void testWriteText() throws Exception {
-        // Compare the text form of the c-generated bai file and a java-generated one
-        final File cBaiTxtFile = File.createTempFile("cBai.", ".bai.txt");
-        BAMIndexer.createAndWriteIndex(BAI_FILE, cBaiTxtFile, true);
-        verbose("Wrote textual C BAM Index file " + cBaiTxtFile);
-
-        final File javaBaiFile = File.createTempFile("javaBai.", "java.bai");
-        final File javaBaiTxtFile = new File(javaBaiFile.getAbsolutePath() + ".txt");
-        final SamReader bam = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS).open(BAM_FILE);
-        BAMIndexer.createIndex(bam, javaBaiFile);
-        verbose("Wrote binary Java BAM Index file " + javaBaiFile);
-
-        // now, turn the bai file into text
-        BAMIndexer.createAndWriteIndex(javaBaiFile, javaBaiTxtFile, true);
-        // and compare them
-        verbose("diff " + javaBaiTxtFile + " " + cBaiTxtFile);
-        IOUtil.assertFilesEqual(javaBaiTxtFile, cBaiTxtFile);
-        cBaiTxtFile.deleteOnExit();
-        javaBaiFile.deleteOnExit();
-        javaBaiTxtFile.deleteOnExit();
-        CloserUtil.close(bam);
-    }
-
-    @Test(enabled = true)
-    public void testWriteBinary() throws Exception {
-        // Compare java-generated bai file with c-generated and sorted bai file
-        final File javaBaiFile = File.createTempFile("javaBai.", ".bai");
-        final SamReader bam = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS).open(BAM_FILE);
-        BAMIndexer.createIndex(bam, javaBaiFile);
-        verbose("Wrote binary java BAM Index file " + javaBaiFile);
-
-        final File cRegeneratedBaiFile = File.createTempFile("cBai.", ".bai");
-        BAMIndexer.createAndWriteIndex(BAI_FILE, cRegeneratedBaiFile, false);
-        verbose("Wrote sorted C binary BAM Index file " + cRegeneratedBaiFile);
-
-        // Binary compare of javaBaiFile and cRegeneratedBaiFile should be the same
-        verbose("diff " + javaBaiFile + " " + cRegeneratedBaiFile);
-        IOUtil.assertFilesEqual(javaBaiFile, cRegeneratedBaiFile);
-        javaBaiFile.deleteOnExit();
-        cRegeneratedBaiFile.deleteOnExit();
-        CloserUtil.close(bam);
-    }
-
-    @Test(enabled = false, dataProvider = "linearIndexTestData")
-    /** Test linear index at specific references and windows */
-    public void testLinearIndex(String testName, String filepath, int problemReference, int problemWindowStart, int problemWindowEnd, int expectedCount) {
-        final SamReader sfr = SamReaderFactory.makeDefault().open(new File(filepath));
-        for (int problemWindow = problemWindowStart; problemWindow <= problemWindowEnd; problemWindow++) {
-            int count = countAlignmentsInWindow(problemReference, problemWindow, sfr, expectedCount);
-            if (expectedCount != -1)
-                assertEquals(expectedCount, count);
-        }
-        CloserUtil.close(sfr);
-    }
-
-    @DataProvider(name = "linearIndexTestData")
-    public Object[][] getLinearIndexTestData() {
-        // Add data here for test cases, reference, and windows where linear index needs testing
-        return new Object[][]{
-                new Object[]{"index_test", BAM_FILE_LOCATION, 1, 29, 66, -1},  // 29-66
-                new Object[]{"index_test", BAM_FILE_LOCATION, 1, 68, 118, -1},  // 29-66
-
-        };
-    }
-
-    private int countAlignmentsInWindow(int reference, int window, SamReader reader, int expectedCount) {
-        final int SIXTEEN_K = 1 << 14;       // 1 << LinearIndex.BAM_LIDX_SHIFT
-        final int start = window >> 14;             // window * SIXTEEN_K;
-        final int stop = ((window + 1) >> 14) - 1; // (window + 1 * SIXTEEN_K) - 1;
-
-        final String chr = reader.getFileHeader().getSequence(reference).getSequenceName();
-
-        // get records for the entire linear index window
-        SAMRecordIterator iter = reader.queryOverlapping(chr, start, stop);
-        SAMRecord rec;
-        int count = 0;
-        while (iter.hasNext()) {
-            rec = iter.next();
-            count++;
-            if (expectedCount == -1)
-                System.err.println(rec.getReadName());
-        }
-        iter.close();
-        return count;
-    }
-
-
-    @Test(enabled = false, dataProvider = "indexComparisonData")
-    /** Test linear index at all references and windows, comparing with existing index */
-    public void compareLinearIndex(String testName, String bamFile, String bamIndexFile) throws IOException {
-        // compare index generated from bamFile with existing bamIndex file
-        // by testing all the references' windows and comparing the counts
-
-        // 1. generate bai file
-        // 2. count its references
-        // 3. count bamIndex references comparing counts
-
-        // 1. generate bai file
-        File bam = new File(bamFile);
-        assertTrue(bam.exists(), testName + " input bam file doesn't exist: " + bamFile);
-
-        File indexFile1 = createIndexFile(bam);
-        assertTrue(indexFile1.exists(), testName + " generated bam file's index doesn't exist: " + indexFile1);
-
-        // 2. count its references
-        File indexFile2 = new File(bamIndexFile);
-        assertTrue(indexFile2.exists(), testName + " input index file doesn't exist: " + indexFile2);
-
-        final CachingBAMFileIndex existingIndex1 = new CachingBAMFileIndex(indexFile1, null); // todo null sequence dictionary?
-        final CachingBAMFileIndex existingIndex2 = new CachingBAMFileIndex(indexFile2, null);
-        final int n_ref = existingIndex1.getNumberOfReferences();
-        assertEquals(n_ref, existingIndex2.getNumberOfReferences());
-
-        final SamReader reader1 = SamReaderFactory.makeDefault().disable(SamReaderFactory.Option.EAGERLY_DECODE).open(bam);
-
-        final SamReader reader2 = SamReaderFactory.makeDefault().disable(SamReaderFactory.Option.EAGERLY_DECODE).open(bam);
-
-        System.out.println("Comparing " + n_ref + " references in " + indexFile1 + " and " + indexFile2);
-
-        for (int i = 0; i < n_ref; i++) {
-            final BAMIndexContent content1 = existingIndex1.getQueryResults(i);
-            final BAMIndexContent content2 = existingIndex2.getQueryResults(i);
-            if (content1 == null) {
-                assertTrue(content2 == null, "No content for 1st bam index, but content for second at reference" + i);
-                continue;
-            }
-            int[] counts1 = new int[LinearIndex.MAX_LINEAR_INDEX_SIZE];
-            int[] counts2 = new int[LinearIndex.MAX_LINEAR_INDEX_SIZE];
-            LinearIndex li1 = content1.getLinearIndex();
-            LinearIndex li2 = content2.getLinearIndex();
-            // todo not li1 and li2 sizes may differ. Implies 0's in the smaller index windows
-            // 3. count bamIndex references comparing counts
-            int baiSize = Math.max(li1.size(), li2.size());
-            for (int win = 0; win < baiSize; win++) {
-                counts1[win] = countAlignmentsInWindow(i, win, reader1, 0);
-                counts2[win] = countAlignmentsInWindow(i, win, reader2, counts1[win]);
-                assertEquals(counts2[win], counts1[win], "Counts don't match for reference " + i +
-                        " window " + win);
-            }
-        }
-
-        indexFile1.deleteOnExit();
-
-    }
-
-    @DataProvider(name = "indexComparisonData")
-    public Object[][] getIndexComparisonData() {
-        // enter bam file and alternate index file to be tested against generated bam index
-        return new Object[][]{
-                new Object[]{"index_test", BAM_FILE_LOCATION, BAI_FILE_LOCATION},
-        };
-    }
-
-    @Test(expectedExceptions = SAMException.class)
-    public void testRequireCoordinateSortOrder() {
-        SAMFileHeader header = new SAMFileHeader();
-        header.setSortOrder(SAMFileHeader.SortOrder.queryname);
-
-        new BAMIndexer(new ByteArrayOutputStream(), header);
-    }
-
-    /** generates the index file using the latest java index generating code */
-    private File createIndexFile(File bamFile) throws IOException {
-        final File bamIndexFile = File.createTempFile("Bai.", ".bai");
-        final SamReader bam = SamReaderFactory.makeDefault().open(bamFile);
-        BAMIndexer.createIndex(bam, bamIndexFile);
-        verbose("Wrote BAM Index file " + bamIndexFile);
-        bam.close();
-        return bamIndexFile;
-    }
-
-    private void verbose(final String text) {
-        if (mVerbose) {
-            System.out.println("#BAMIndexWriterTest " + text);
-        }
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/BAMIteratorTest.java b/src/tests/java/htsjdk/samtools/BAMIteratorTest.java
deleted file mode 100644
index 369d634..0000000
--- a/src/tests/java/htsjdk/samtools/BAMIteratorTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CloserUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * @author alecw at broadinstitute.org
- */
-public class BAMIteratorTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    @Test(dataProvider = "dataProvider")
-    public void testIterateEmptyBam(final String bam) throws Exception {
-        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, bam));
-        int numRecords = 0;
-        for (final SAMRecord rec : reader) {
-            ++numRecords;
-        }
-        Assert.assertEquals(numRecords, 0);
-        CloserUtil.close(reader);
-    }
-
-    @Test(dataProvider = "dataProvider")
-    public void testQueryUnmappedEmptyBam(final String bam) throws Exception {
-        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, bam));
-        final CloseableIterator<SAMRecord> it = reader.queryUnmapped();
-        int numRecords = 0;
-        while (it.hasNext()) {
-            it.next();
-            ++numRecords;
-        }
-        Assert.assertEquals(numRecords, 0);
-        CloserUtil.close(reader);
-    }
-
-    @DataProvider(name = "dataProvider")
-    public Object[][] bams() {
-        return new Object[][]{
-                {"empty.bam"},
-                {"empty_no_empty_gzip_block.bam"}
-        };
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/BAMRemoteFileTest.java b/src/tests/java/htsjdk/samtools/BAMRemoteFileTest.java
deleted file mode 100644
index 6689efb..0000000
--- a/src/tests/java/htsjdk/samtools/BAMRemoteFileTest.java
+++ /dev/null
@@ -1,326 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.CloserUtil;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Random;
-
-import static org.testng.Assert.*;
-
-/**
- * Test BAM file indexing.
- */
-public class BAMRemoteFileTest {
-    private final File BAM_INDEX_FILE = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai");
-    private final File BAM_FILE = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
-    private final String BAM_URL_STRING = "http://www.broadinstitute.org/~picard/testdata/index_test.bam";
-    private final URL bamURL;
-
-    private final boolean mVerbose = false;
-
-    public BAMRemoteFileTest() throws Exception {
-        bamURL = new URL(BAM_URL_STRING);
-    }
-
-
-    @Test
-    void testRemoteLocal()
-            throws Exception {
-        runLocalRemoteTest(bamURL, BAM_FILE, "chrM", 10400, 10600, false);
-    }
-
-    @Test
-    public void testSpecificQueries()
-            throws Exception {
-        assertEquals(runQueryTest(bamURL, "chrM", 10400, 10600, true), 1);
-        assertEquals(runQueryTest(bamURL, "chrM", 10400, 10600, false), 2);
-    }
-
-    @Test(enabled = true)
-    public void testRandomQueries()
-            throws Exception {
-        runRandomTest(bamURL, 20, new Random());
-    }
-
-    @Test
-    public void testWholeChromosomes() {
-        checkChromosome("chrM", 23);
-        checkChromosome("chr1", 885);
-        checkChromosome("chr2", 837);
-        /***
-         checkChromosome("chr3", 683);
-         checkChromosome("chr4", 633);
-         checkChromosome("chr5", 611);
-         checkChromosome("chr6", 585);
-         checkChromosome("chr7", 521);
-         checkChromosome("chr8", 507);
-         checkChromosome("chr9", 388);
-         checkChromosome("chr10", 477);
-         checkChromosome("chr11", 467);
-         checkChromosome("chr12", 459);
-         checkChromosome("chr13", 327);
-         checkChromosome("chr14", 310);
-         checkChromosome("chr15", 280);
-         checkChromosome("chr16", 278);
-         checkChromosome("chr17", 269);
-         checkChromosome("chr18", 265);
-         checkChromosome("chr19", 178);
-         checkChromosome("chr20", 228);
-         checkChromosome("chr21", 123);
-         checkChromosome("chr22", 121);
-         checkChromosome("chrX", 237);
-         checkChromosome("chrY", 29);
-         ***/
-    }
-
-
-    private void checkChromosome(final String name, final int expectedCount) {
-        int count = runQueryTest(bamURL, name, 0, 0, true);
-        assertEquals(count, expectedCount);
-        count = runQueryTest(bamURL, name, 0, 0, false);
-        assertEquals(count, expectedCount);
-    }
-
-    private void runRandomTest(final URL bamFile, final int count, final Random generator) throws IOException {
-        final int maxCoordinate = 10000000;
-        final List<String> referenceNames = getReferenceNames(bamFile);
-        for (int i = 0; i < count; i++) {
-            final String refName = referenceNames.get(generator.nextInt(referenceNames.size()));
-            final int coord1 = generator.nextInt(maxCoordinate + 1);
-            final int coord2 = generator.nextInt(maxCoordinate + 1);
-            final int startPos = Math.min(coord1, coord2);
-            final int endPos = Math.max(coord1, coord2);
-            System.out.println("Testing query " + refName + ":" + startPos + "-" + endPos + " ...");
-            try {
-                runQueryTest(bamFile, refName, startPos, endPos, true);
-                runQueryTest(bamFile, refName, startPos, endPos, false);
-            } catch (Throwable exc) {
-                String message = "Query test failed: " + refName + ":" + startPos + "-" + endPos;
-                message += ": " + exc.getMessage();
-                throw new RuntimeException(message, exc);
-            }
-        }
-    }
-
-    private List<String> getReferenceNames(final URL bamFile) throws IOException {
-
-
-        final SamReader reader = SamReaderFactory.makeDefault().open(SamInputResource.of(bamFile.openStream()));
-
-        final List<String> result = new ArrayList<String>();
-        final List<SAMSequenceRecord> seqRecords = reader.getFileHeader().getSequenceDictionary().getSequences();
-        for (final SAMSequenceRecord seqRecord : seqRecords) {
-            if (seqRecord.getSequenceName() != null) {
-                result.add(seqRecord.getSequenceName());
-            }
-        }
-        reader.close();
-        return result;
-    }
-
-    private void runLocalRemoteTest(final URL bamURL, final File bamFile, final String sequence, final int startPos, final int endPos, final boolean contained) {
-        verbose("Testing query " + sequence + ":" + startPos + "-" + endPos + " ...");
-        final SamReader reader1 = SamReaderFactory.makeDefault()
-                .disable(SamReaderFactory.Option.EAGERLY_DECODE)
-                .open(SamInputResource.of(bamFile).index(BAM_INDEX_FILE));
-        final SamReader reader2 = SamReaderFactory.makeDefault()
-                .disable(SamReaderFactory.Option.EAGERLY_DECODE)
-                .open(SamInputResource.of(bamURL).index(BAM_INDEX_FILE));
-        final Iterator<SAMRecord> iter1 = reader1.query(sequence, startPos, endPos, contained);
-        final Iterator<SAMRecord> iter2 = reader2.query(sequence, startPos, endPos, contained);
-
-        final List<SAMRecord> records1 = new ArrayList<SAMRecord>();
-        final List<SAMRecord> records2 = new ArrayList<SAMRecord>();
-
-        while (iter1.hasNext()) {
-            records1.add(iter1.next());
-        }
-        while (iter2.hasNext()) {
-            records2.add(iter2.next());
-        }
-
-        assertTrue(records1.size() > 0);
-        assertEquals(records1.size(), records2.size());
-        for (int i = 0; i < records1.size(); i++) {
-            //System.out.println(records1.get(i).format());
-            assertEquals(records1.get(i).getSAMString(), records2.get(i).getSAMString());
-        }
-
-
-    }
-
-    private int runQueryTest(final URL bamURL, final String sequence, final int startPos, final int endPos, final boolean contained) {
-        verbose("Testing query " + sequence + ":" + startPos + "-" + endPos + " ...");
-        final SamReader reader1 = SamReaderFactory.makeDefault()
-                .disable(SamReaderFactory.Option.EAGERLY_DECODE)
-                .open(SamInputResource.of(bamURL).index(BAM_INDEX_FILE));
-        final SamReader reader2 = SamReaderFactory.makeDefault()
-                .disable(SamReaderFactory.Option.EAGERLY_DECODE)
-                .open(SamInputResource.of(bamURL).index(BAM_INDEX_FILE));
-        final Iterator<SAMRecord> iter1 = reader1.query(sequence, startPos, endPos, contained);
-        final Iterator<SAMRecord> iter2 = reader2.iterator();
-        // Compare ordered iterators.
-        // Confirm that iter1 is a subset of iter2 that properly filters.
-        SAMRecord record1 = null;
-        SAMRecord record2 = null;
-        int count1 = 0;
-        int count2 = 0;
-        int beforeCount = 0;
-        int afterCount = 0;
-        while (true) {
-            if (record1 == null && iter1.hasNext()) {
-                record1 = iter1.next();
-                count1++;
-            }
-            if (record2 == null && iter2.hasNext()) {
-                record2 = iter2.next();
-                count2++;
-            }
-            // System.out.println("Iteration:");
-            // System.out.println(" Record1 = " + ((record1 == null) ? "null" : record1.format()));
-            // System.out.println(" Record2 = " + ((record2 == null) ? "null" : record2.format()));
-            if (record1 == null && record2 == null) {
-                break;
-            }
-            if (record1 == null) {
-                checkPassesFilter(false, record2, sequence, startPos, endPos, contained);
-                record2 = null;
-                afterCount++;
-                continue;
-            }
-            assertNotNull(record2);
-            final int ordering = compareCoordinates(record1, record2);
-            if (ordering > 0) {
-                checkPassesFilter(false, record2, sequence, startPos, endPos, contained);
-                record2 = null;
-                beforeCount++;
-                continue;
-            }
-            assertTrue(ordering == 0);
-            checkPassesFilter(true, record1, sequence, startPos, endPos, contained);
-            checkPassesFilter(true, record2, sequence, startPos, endPos, contained);
-            assertEquals(record1.getReadName(), record2.getReadName());
-            assertEquals(record1.getReadString(), record2.getReadString());
-            record1 = null;
-            record2 = null;
-        }
-        CloserUtil.close(reader1);
-        CloserUtil.close(reader2);
-        verbose("Checked " + count1 + " records against " + count2 + " records.");
-        verbose("Found " + (count2 - beforeCount - afterCount) + " records matching.");
-        verbose("Found " + beforeCount + " records before.");
-        verbose("Found " + afterCount + " records after.");
-        return count1;
-    }
-
-    private void checkPassesFilter(final boolean expected, final SAMRecord record, final String sequence, final int startPos, final int endPos, final boolean contained) {
-        final boolean passes = passesFilter(record, sequence, startPos, endPos, contained);
-        if (passes != expected) {
-            System.out.println("Error: Record erroneously " +
-                    (passes ? "passed" : "failed") +
-                    " filter.");
-            System.out.println(" Record: " + record.getSAMString());
-            System.out.println(" Filter: " + sequence + ":" +
-                    startPos + "-" + endPos +
-                    " (" + (contained ? "contained" : "overlapping") + ")");
-            assertEquals(passes, expected);
-        }
-    }
-
-    private boolean passesFilter(final SAMRecord record, final String sequence, final int startPos, final int endPos, final boolean contained) {
-        if (record == null) {
-            return false;
-        }
-        if (!safeEquals(record.getReferenceName(), sequence)) {
-            return false;
-        }
-        final int alignmentStart = record.getAlignmentStart();
-        int alignmentEnd = record.getAlignmentEnd();
-        if (alignmentStart <= 0) {
-            assertTrue(record.getReadUnmappedFlag());
-            return false;
-        }
-        if (alignmentEnd <= 0) {
-            // For indexing-only records, treat as single base alignment.
-            assertTrue(record.getReadUnmappedFlag());
-            alignmentEnd = alignmentStart;
-        }
-        if (contained) {
-            if (startPos != 0 && alignmentStart < startPos) {
-                return false;
-            }
-            if (endPos != 0 && alignmentEnd > endPos) {
-                return false;
-            }
-        } else {
-            if (startPos != 0 && alignmentEnd < startPos) {
-                return false;
-            }
-            if (endPos != 0 && alignmentStart > endPos) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    private int compareCoordinates(final SAMRecord record1, final SAMRecord record2) {
-        final int seqIndex1 = record1.getReferenceIndex();
-        final int seqIndex2 = record2.getReferenceIndex();
-        if (seqIndex1 == -1) {
-            return ((seqIndex2 == -1) ? 0 : -1);
-        } else if (seqIndex2 == -1) {
-            return 1;
-        }
-        int result = seqIndex1 - seqIndex2;
-        if (result != 0) {
-            return result;
-        }
-        result = record1.getAlignmentStart() - record2.getAlignmentStart();
-        return result;
-    }
-
-    private boolean safeEquals(final Object o1, final Object o2) {
-        if (o1 == o2) {
-            return true;
-        } else if (o1 == null || o2 == null) {
-            return false;
-        } else {
-            return o1.equals(o2);
-        }
-    }
-
-    private void verbose(final String text) {
-        if (mVerbose) {
-            System.out.println("# " + text);
-        }
-    }
-}
\ No newline at end of file
diff --git a/src/tests/java/htsjdk/samtools/CRAMComplianceTest.java b/src/tests/java/htsjdk/samtools/CRAMComplianceTest.java
deleted file mode 100644
index bea3257..0000000
--- a/src/tests/java/htsjdk/samtools/CRAMComplianceTest.java
+++ /dev/null
@@ -1,150 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.common.CramVersions;
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.Log;
-import org.testng.Assert;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Created by vadim on 28/04/2015.
- */
-public class CRAMComplianceTest {
-
-    @DataProvider(name = "test1")
-    public Object[][] createData1() {
-        return new Object[][]{
-                {"auxf#values"},
-                {"c1#bounds"},
-                {"c1#clip"},
-                {"c1#noseq"},
-                {"c1#pad1"},
-                {"c1#pad2"},
-                {"c1#pad3"},
-                {"c1#unknown"},
-                {"ce#1"},
-                {"ce#2"},
-                {"ce#5b"},
-                {"ce#5"},
-                {"ce#large_seq"},
-                {"ce#supp"},
-                {"ce#tag_depadded"},
-                {"ce#tag_padded"},
-                {"ce#unmap1"},
-                {"ce#unmap2"},
-                {"ce#unmap"},
-                {"xx#blank"},
-                {"xx#large_aux2"},
-                {"xx#large_aux"},
-                {"xx#minimal"},
-                {"xx#pair"},
-                {"xx#rg"},
-                {"xx#triplet"},
-                {"xx#unsorted"},
-        };
-    }
-
-
-    @BeforeTest
-    public void beforeTest() {
-        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
-    }
-
-    private static class TestCase {
-        File bamFile;
-        File refFile;
-        File cramFile_21;
-        File cramFile_30;
-
-        public TestCase(File root, String name) {
-            bamFile = new File(root, name + ".sam");
-            refFile = new File(root, name.split("#")[0] + ".fa");
-            cramFile_21 = new File(root, name + ".2.1.cram");
-            cramFile_30 = new File(root, name + ".3.0.cram");
-        }
-    }
-
-    @Test(dataProvider = "test1")
-    public void test(String name) throws IOException {
-        TestCase t = new TestCase(new File("testdata/htsjdk/samtools/cram/"), name);
-
-        ReferenceSource source = new ReferenceSource(t.refFile);
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(t.bamFile);
-
-        final SAMRecordIterator samRecordIterator = reader.iterator();
-        List<SAMRecord> samRecords = new ArrayList<SAMRecord>();
-        while (samRecordIterator.hasNext())
-            samRecords.add(samRecordIterator.next());
-        SAMFileHeader samFileHeader = reader.getFileHeader();
-        reader.close();
-
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        CRAMFileWriter cramFileWriter = new CRAMFileWriter(baos, source, samFileHeader, name);
-        for (SAMRecord samRecord : samRecords) {
-            cramFileWriter.addAlignment(samRecord);
-        }
-        cramFileWriter.close();
-
-
-        CRAMFileReader cramFileReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (SeekableStream)null, source, ValidationStringency.SILENT);
-        SAMRecordIterator cramFileReaderIterator = cramFileReader.getIterator();
-        for (SAMRecord samRecord : samRecords) {
-            Assert.assertTrue(cramFileReaderIterator.hasNext());
-            SAMRecord restored = cramFileReaderIterator.next();
-            Assert.assertNotNull(restored);
-            assertSameRecords(CramVersions.DEFAULT_CRAM_VERSION.major, samRecord, restored);
-        }
-        Assert.assertFalse(cramFileReaderIterator.hasNext());
-
-        //v2.1 test
-        cramFileReader = new CRAMFileReader(new FileInputStream(t.cramFile_21), (SeekableStream)null, source, ValidationStringency.SILENT);
-        cramFileReaderIterator = cramFileReader.getIterator();
-        for (SAMRecord samRecord : samRecords) {
-            Assert.assertTrue(cramFileReaderIterator.hasNext());
-            SAMRecord restored = cramFileReaderIterator.next();
-            Assert.assertNotNull(restored);
-            assertSameRecords(CramVersions.CRAM_v2_1.major, samRecord, restored);
-        }
-        Assert.assertFalse(cramFileReaderIterator.hasNext());
-
-        //v3.0 test
-        cramFileReader = new CRAMFileReader(new FileInputStream(t.cramFile_30), (SeekableStream)null, source, ValidationStringency.SILENT);
-        cramFileReaderIterator = cramFileReader.getIterator();
-        for (SAMRecord samRecord : samRecords) {
-            Assert.assertTrue(cramFileReaderIterator.hasNext());
-            SAMRecord restored = cramFileReaderIterator.next();
-            Assert.assertNotNull(restored);
-            assertSameRecords(CramVersions.CRAM_v3.major, samRecord, restored);
-        }
-        Assert.assertFalse(cramFileReaderIterator.hasNext());
-    }
-
-    private void assertSameRecords(int majorVersion, SAMRecord record1, SAMRecord record2) {
-        Assert.assertEquals(record2.getFlags(), record1.getFlags());
-        Assert.assertEquals(record2.getReadName(), record1.getReadName());
-        Assert.assertEquals(record2.getReferenceName(), record1.getReferenceName());
-        Assert.assertEquals(record2.getAlignmentStart(), record1.getAlignmentStart());
-
-        /**
-         * Known issue: CRAM v2.1 doesn't handle reads with missing bases correctly. This
-         * causes '*' bases to arise when reading CRAM. Skipping the base comparison asserts.
-         */
-        if (record1.getReadBases() != SAMRecord.NULL_SEQUENCE || majorVersion >= CramVersions.CRAM_v3.major) {
-            Assert.assertEquals(record2.getReadBases(), record1.getReadBases());
-        }
-
-        Assert.assertEquals(record2.getBaseQualities(), record1.getBaseQualities());
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java b/src/tests/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java
deleted file mode 100644
index 0846846..0000000
--- a/src/tests/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java
+++ /dev/null
@@ -1,184 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
-import htsjdk.samtools.seekablestream.SeekableMemoryStream;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.Log.LogLevel;
-import htsjdk.samtools.util.RuntimeIOException;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-public class CRAMContainerStreamWriterTest {
-
-    @BeforeClass
-    public void initClass() {
-        Log.setGlobalLogLevel(LogLevel.ERROR);
-    }
-
-    private List<SAMRecord> createRecords(int count) {
-        final List<SAMRecord> list = new ArrayList<SAMRecord>(count);
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
-        if (builder.getHeader().getReadGroups().isEmpty()) {
-            throw new IllegalStateException("Read group expected in the header");
-        }
-
-        int posInRef = 1;
-        for (int i = 0; i < count / 2; i++) {
-            builder.addPair(Integer.toString(i), i % 2, posInRef += 1, posInRef += 3);
-        }
-        list.addAll(builder.getRecords());
-
-        Collections.sort(list, new SAMRecordCoordinateComparator());
-
-        return list;
-    }
-
-    private SAMFileHeader createSAMHeader(SAMFileHeader.SortOrder sortOrder) {
-        final SAMFileHeader header = new SAMFileHeader();
-        header.setSortOrder(sortOrder);
-        header.addSequence(new SAMSequenceRecord("chr1", 123));
-        header.addSequence(new SAMSequenceRecord("chr2", 123));
-        SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord("1");
-        header.addReadGroup(readGroupRecord);
-        return header;
-    }
-
-    private ReferenceSource createReferenceSource() {
-        final byte[] refBases = new byte[1024 * 1024];
-        Arrays.fill(refBases, (byte) 'A');
-        InMemoryReferenceSequenceFile rsf = new InMemoryReferenceSequenceFile();
-        rsf.add("chr1", refBases);
-        rsf.add("chr2", refBases);
-        return new ReferenceSource(rsf);
-    }
-
-    private void doTest(final List<SAMRecord> samRecords, final ByteArrayOutputStream outStream, final OutputStream indexStream) {
-        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
-        final ReferenceSource refSource = createReferenceSource();
-
-        final CRAMContainerStreamWriter containerStream = new CRAMContainerStreamWriter(outStream, indexStream, refSource, header, "test");
-        containerStream.writeHeader(header);
-
-        for (SAMRecord record : samRecords) {
-            containerStream.writeAlignment(record);
-        }
-        containerStream.finish(true); // finish and issue EOF
-
-        // read all the records back in
-        final CRAMFileReader cReader = new CRAMFileReader(null, new ByteArrayInputStream(outStream.toByteArray()), refSource);
-        final SAMRecordIterator iterator = cReader.getIterator();
-        int count = 0;
-        while (iterator.hasNext()) {
-            SAMRecord actualRecord = iterator.next();
-            count++;
-        }
-        Assert.assertEquals(count, samRecords.size());
-    }
-
-    @Test(description = "Test CRAMContainerStream no index")
-    public void testCRAMContainerStreamNoIndex() {
-        final List<SAMRecord> samRecords = createRecords(100);
-        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
-        doTest(samRecords, outStream, null);
-    }
-
-    @Test(description = "Test CRAMContainerStream aggregating multiple partitions")
-    public void testCRAMContainerAggregatePartitions() throws IOException {
-        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
-        final ReferenceSource refSource = createReferenceSource();
-
-        // create a bunch of records and write them out to separate streams in groups
-        final int nRecs = 100;
-        final int recsPerPartition = 20;
-        final int nPartitions = nRecs/recsPerPartition;
-
-        final List<SAMRecord> samRecords = createRecords(nRecs);
-        final ArrayList<ByteArrayOutputStream> byteStreamArray = new ArrayList<>(nPartitions);
-
-        for (int partition = 0, recNum = 0; partition < nPartitions; partition++) {
-            byteStreamArray.add(partition, new ByteArrayOutputStream());
-            final CRAMContainerStreamWriter containerStream =
-                    new CRAMContainerStreamWriter(byteStreamArray.get(partition), null, refSource, header, "test");
-
-            // don't write a header for the intermediate streams
-            for (int i = 0; i <  recsPerPartition; i++) {
-                containerStream.writeAlignment(samRecords.get(recNum++));
-            }
-            containerStream.finish(false); // finish but don't issue EOF container
-        }
-
-        // now create the final aggregate file by concatenating the individual streams, but this
-        // time with a CRAM and SAM header at the front and an EOF container at the end
-        final ByteArrayOutputStream aggregateStream = new ByteArrayOutputStream();
-        final CRAMContainerStreamWriter aggregateContainerStreamWriter = new CRAMContainerStreamWriter(aggregateStream, null, refSource, header, "test");
-        aggregateContainerStreamWriter .writeHeader(header); // write out one CRAM and SAM header
-        for (int j = 0; j < nPartitions; j++) {
-            byteStreamArray.get(j).writeTo(aggregateStream);
-        }
-        aggregateContainerStreamWriter.finish(true);// write out the EOF container
-
-        // now iterate through all the records in the aggregate file
-        final CRAMFileReader cReader = new CRAMFileReader(null, new ByteArrayInputStream(aggregateStream.toByteArray()), refSource);
-        final SAMRecordIterator iterator = cReader.getIterator();
-        int count = 0;
-        while (iterator.hasNext()) {
-            Assert.assertEquals(iterator.next().toString(), samRecords.get(count).toString());
-            count++;
-        }
-        Assert.assertEquals(count, nRecs);
-    }
-
-    @Test(description = "Test CRAMContainerStream with index")
-    public void testCRAMContainerStreamWithIndex() throws IOException {
-        final List<SAMRecord> samRecords = createRecords(100);
-        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
-        final ByteArrayOutputStream indexStream = new ByteArrayOutputStream();
-        doTest(samRecords, outStream, indexStream);
-        outStream.close();
-        indexStream.close();
-
-        // write the file out
-        final File cramTempFile = File.createTempFile("cramContainerStreamTest", ".cram");
-        cramTempFile.deleteOnExit();
-        final OutputStream cramFileStream = new FileOutputStream(cramTempFile);
-        cramFileStream.write(outStream.toByteArray());
-        cramFileStream.close();
-
-        // write the index out
-        final File indexTempFile = File.createTempFile("cramContainerStreamTest", ".bai");
-        indexTempFile.deleteOnExit();
-        OutputStream indexFileStream = new FileOutputStream(indexTempFile);
-        indexFileStream.write(indexStream.toByteArray());
-        indexFileStream.close();
-
-        final ReferenceSource refSource = createReferenceSource();
-        final CRAMFileReader reader = new CRAMFileReader(
-                cramTempFile,
-                indexTempFile,
-                refSource,
-                ValidationStringency.SILENT);
-        final CloseableIterator<SAMRecord> iterator = reader.query(1, 10, 10, true);
-        int count = 0;
-        while (iterator.hasNext()) {
-            SAMRecord actualRecord = iterator.next();
-            count++;
-        }
-        Assert.assertEquals(count, 2);
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/CRAMEdgeCasesTest.java b/src/tests/java/htsjdk/samtools/CRAMEdgeCasesTest.java
deleted file mode 100644
index 4d3b0a7..0000000
--- a/src/tests/java/htsjdk/samtools/CRAMEdgeCasesTest.java
+++ /dev/null
@@ -1,158 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.CRAMException;
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.Log;
-import org.testng.Assert;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Iterator;
-
-/**
- * A collection of CRAM test based on round trip comparison of SAMRecord before and after CRAM compression.
- */
-public class CRAMEdgeCasesTest {
-
-    @BeforeTest
-    public void beforeTest() {
-        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
-    }
-
-    @Test
-    public void testUnsorted() throws IOException {
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder(false, SAMFileHeader.SortOrder.unsorted);
-        builder.addFrag("1", 0, 2, false);
-        builder.addFrag("1", 0, 1, false);
-        final Collection<SAMRecord> records = builder.getRecords();
-
-        testRecords(records, records.iterator().next().getReadBases());
-    }
-
-    // int test for CRAMException
-    // testing for a contig found in the reads but not in the reference
-    @Test(expectedExceptions = CRAMException.class)
-    public void testContigNotFoundInRef() throws IOException {
-        boolean sawException = false;
-        final File CRAMFile = new File("testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.cram");
-        final File refFile = new File("testdata/htsjdk/samtools/cram/CRAMException/testContigNotInRef.fa");
-        final ReferenceSource refSource = new ReferenceSource(refFile);
-        final CRAMIterator iterator = new CRAMIterator(new FileInputStream(CRAMFile), refSource, ValidationStringency.STRICT);
-        while (iterator.hasNext()) {
-            iterator.next();
-        }
-    }
-
-    @Test
-    public void testBizilionTags() throws IOException {
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
-        builder.addFrag("1", 0, 1, false);
-        SAMRecord record = builder.getRecords().iterator().next();
-        for (int i = 0; i < 1000; i++) {
-            char b1 = (char) ('A' + i / 26);
-            char b2 = (char) ('A' + i % 26);
-            String tag = new String(new char[]{b1, b2});
-            if ("RG".equals(tag)) {
-                continue;
-            }
-            record.setAttribute(tag, i);
-        }
-
-        record.setAlignmentStart(1);
-        testSingleRecord(record, record.getReadBases());
-    }
-
-    @Test
-    public void testNullsAndBeyondRef() throws IOException {
-        testSingleRecord("A".getBytes(), "!".getBytes(), "A".getBytes());
-        testSingleRecord("A".getBytes(), SAMRecord.NULL_QUALS, "A".getBytes());
-        testSingleRecord(SAMRecord.NULL_SEQUENCE, SAMRecord.NULL_QUALS, "A".getBytes());
-        testSingleRecord("AAA".getBytes(), "!!!".getBytes(), "A".getBytes());
-    }
-
-    private void testRecords(Collection<SAMRecord> records, byte[] ref) throws IOException {
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        InMemoryReferenceSequenceFile refFile = new InMemoryReferenceSequenceFile();
-        refFile.add("chr1", ref);
-        ReferenceSource source = new ReferenceSource(refFile);
-        final SAMFileHeader header = records.iterator().next().getHeader();
-        CRAMFileWriter cramFileWriter = new CRAMFileWriter(baos, source, header, "whatever");
-
-        Iterator<SAMRecord> it = records.iterator();
-        while (it.hasNext()) {
-            SAMRecord record = it.next();
-            cramFileWriter.addAlignment(record);
-        }
-        cramFileWriter.close();
-
-        CRAMFileReader cramFileReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (SeekableStream) null, source, ValidationStringency.SILENT);
-        final SAMRecordIterator iterator = cramFileReader.getIterator();
-        Assert.assertTrue(iterator.hasNext());
-
-        it = records.iterator();
-        while (it.hasNext()) {
-            SAMRecord record = it.next();
-            SAMRecord s2 = iterator.next();
-            Assert.assertNotNull(s2);
-            Assert.assertEquals(record.getFlags(), s2.getFlags());
-            Assert.assertEquals(record.getReadName(), s2.getReadName());
-            Assert.assertEquals(record.getReferenceName(), s2.getReferenceName());
-            Assert.assertEquals(record.getAlignmentStart(), s2.getAlignmentStart());
-            Assert.assertEquals(record.getReadBases(), s2.getReadBases());
-            Assert.assertEquals(record.getBaseQualities(), s2.getBaseQualities());
-        }
-        Assert.assertFalse(iterator.hasNext());
-    }
-
-    private void testSingleRecord(SAMRecord record, byte[] ref) throws IOException {
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        InMemoryReferenceSequenceFile refFile = new InMemoryReferenceSequenceFile();
-        refFile.add("chr1", ref);
-        ReferenceSource source = new ReferenceSource(refFile);
-        CRAMFileWriter cramFileWriter = new CRAMFileWriter(baos, source, record.getHeader(), "whatever");
-        cramFileWriter.addAlignment(record);
-        cramFileWriter.close();
-
-        CRAMFileReader cramFileReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (SeekableStream) null, source, ValidationStringency.SILENT);
-        final SAMRecordIterator iterator = cramFileReader.getIterator();
-        Assert.assertTrue(iterator.hasNext());
-        SAMRecord s2 = iterator.next();
-        Assert.assertNotNull(s2);
-        Assert.assertFalse(iterator.hasNext());
-
-        Assert.assertEquals(record.getFlags(), s2.getFlags());
-        Assert.assertEquals(record.getReadName(), s2.getReadName());
-        Assert.assertEquals(record.getReferenceName(), s2.getReferenceName());
-        Assert.assertEquals(record.getAlignmentStart(), s2.getAlignmentStart());
-        Assert.assertEquals(record.getReadBases(), s2.getReadBases());
-        Assert.assertEquals(record.getBaseQualities(), s2.getBaseQualities());
-    }
-
-    private void testSingleRecord(byte[] bases, byte[] scores, byte[] ref) throws IOException {
-        SAMFileHeader header = new SAMFileHeader();
-        header.addReadGroup(new SAMReadGroupRecord("1"));
-        header.addSequence(new SAMSequenceRecord("chr1", ref.length));
-        SAMRecord s = new SAMRecord(header);
-        s.setReadBases(bases);
-        s.setBaseQualities(scores);
-        s.setFlags(0);
-        s.setAlignmentStart(1);
-        s.setReferenceName("chr1");
-        s.setReadName("1");
-        if (bases == SAMRecord.NULL_SEQUENCE) {
-            s.setCigarString("10M");
-        } else {
-            s.setCigarString(s.getReadLength() + "M");
-        }
-
-        testSingleRecord(s, ref);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/CRAMFileBAIIndexTest.java b/src/tests/java/htsjdk/samtools/CRAMFileBAIIndexTest.java
deleted file mode 100644
index 9108283..0000000
--- a/src/tests/java/htsjdk/samtools/CRAMFileBAIIndexTest.java
+++ /dev/null
@@ -1,294 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.build.ContainerParser;
-import htsjdk.samtools.cram.build.CramContainerIterator;
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.cram.structure.AlignmentSpan;
-import htsjdk.samtools.cram.structure.Container;
-import htsjdk.samtools.reference.FakeReferenceSequenceFile;
-import htsjdk.samtools.seekablestream.ByteArraySeekableStream;
-import htsjdk.samtools.seekablestream.SeekableFileStream;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CoordMath;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.Log;
-import org.testng.Assert;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.Map;
-import java.util.TreeSet;
-
-/**
- * A collection of tests for CRAM BAI index write/read that use BAMFileIndexTest/index_test.bam file as the source of the test data.
- * The test will create a BAI index of the cram file before hand.
- * The scan* tests check that for every records in the BAM file the query returns the same records from the CRAM file.
- * Created by Vadim on 14/03/2015.
- */
-public class CRAMFileBAIIndexTest {
-    private final File BAM_FILE = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
-    private File cramFile;
-    private File indexFile;
-    private byte[] cramBytes;
-    private byte[] baiBytes;
-    private ReferenceSource source;
-    private int nofUnmappedReads = 279 ;
-    private int nofMappedReads = 9721;
-    private int nofReads = 10000 ;
-    private int nofReadsPerContainer = 1000 ;
-
-
-    // Mixes testing queryAlignmentStart with each CRAMFileReaderConstructor
-    // Separate into individual tests
-    @Test
-    public void testConstructors () throws IOException {
-        CRAMFileReader reader = new CRAMFileReader(cramFile, indexFile, source, ValidationStringency.SILENT);
-        CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart("chrM", 1500);
-        Assert.assertTrue(iterator.hasNext());
-        SAMRecord record = iterator.next();
-
-        Assert.assertEquals(record.getReferenceName(), "chrM");
-        Assert.assertTrue(record.getAlignmentStart() >= 1500);
-        reader.close();
-
-        reader = new CRAMFileReader(new SeekableFileStream(cramFile), indexFile, source, ValidationStringency.SILENT);
-        iterator = reader.queryAlignmentStart("chrM", 1500);
-        Assert.assertTrue(iterator.hasNext());
-        record = iterator.next();
-
-        Assert.assertEquals(record.getReferenceName(), "chrM");
-        Assert.assertTrue(record.getAlignmentStart() >= 1500);
-        reader.close();
-
-        reader = new CRAMFileReader(new SeekableFileStream(cramFile), new SeekableFileStream(indexFile), source, ValidationStringency.SILENT);
-        iterator = reader.queryAlignmentStart("chrM", 1500);
-        Assert.assertTrue(iterator.hasNext());
-        record = iterator.next();
-
-        Assert.assertEquals(record.getReferenceName(), "chrM");
-        Assert.assertTrue(record.getAlignmentStart() >= 1500);
-        reader.close();
-
-        reader = new CRAMFileReader(new SeekableFileStream(cramFile), (File)null, source, ValidationStringency.SILENT);
-        try {
-            reader.queryAlignmentStart("chrM", 1500);
-            Assert.fail("Expecting query to fail when there is no index");
-        } catch (SAMException e) {
-        }
-        reader.close();
-
-        reader = new CRAMFileReader(new SeekableFileStream(cramFile), (SeekableFileStream)null, source, ValidationStringency.SILENT);
-        try {
-            reader.queryAlignmentStart("chrM", 1500);
-            Assert.fail("Expecting query to fail when there is no index");
-        } catch (SAMException e) {
-        }
-        reader.close();
-    }
-
-    // this test is the same as the ones above in testConstructors
-    @Test
-    public void test_chrM_1500_location() throws IOException {
-        CRAMFileReader reader = new CRAMFileReader(cramFile, indexFile, source);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-        CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart("chrM", 1500);
-        Assert.assertTrue(iterator.hasNext());
-        SAMRecord record = iterator.next();
-
-        Assert.assertEquals(record.getReferenceName(), "chrM");
-        Assert.assertTrue(record.getAlignmentStart() >= 1500);
-    }
-
-    @Test
-    public void scanMappedReads() throws IOException {
-        SamReader samReader = SamReaderFactory.makeDefault().open(BAM_FILE);
-        SAMRecordIterator samRecordIterator = samReader.iterator();
-        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-
-        int counter = 0;
-        while (samRecordIterator.hasNext()) {
-            SAMRecord samRecord = samRecordIterator.next();
-            if (samRecord.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) break;
-            // test only 1st and 2nd in every 100 to speed the test up:
-            if (counter++ %100 > 1) continue;
-            String s1 = samRecord.getSAMString();
-
-            CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart(samRecord.getReferenceName(), samRecord.getAlignmentStart());
-            Assert.assertTrue(iterator.hasNext(), counter + ": " + s1);
-            SAMRecord cramRecord = iterator.next();
-
-            String s2 = cramRecord.getSAMString();
-
-            Assert.assertEquals(samRecord.getReferenceName(), cramRecord.getReferenceName(), s1 + s2);
-            // default 'overlap' is true, so test records intersect the query:
-            Assert.assertTrue(CoordMath.overlaps(cramRecord.getAlignmentStart(), cramRecord.getAlignmentEnd(), samRecord.getAlignmentStart(), samRecord.getAlignmentEnd()), s1 + s2);
-        }
-        samRecordIterator.close();
-        reader.close();
-
-        Assert.assertEquals(counter, nofMappedReads);
-    }
-
-    @Test
-    public void testNoStringencyConstructor() throws IOException {
-        final File CRAMFile = new File("testdata/htsjdk/samtools/cram/auxf#values.3.0.cram");
-        final File refFile = new File("testdata/htsjdk/samtools/cram/auxf.fa");
-        ReferenceSource refSource = new ReferenceSource(refFile);
-
-        long start = 0;
-        long end = CRAMFile.length();
-        long[] boundaries = new long[] {start << 16, (end - 1) << 16};
-        final CRAMIterator iterator = new CRAMIterator(new SeekableFileStream(CRAMFile), refSource, boundaries);
-        long count = 0;
-        while (iterator.hasNext()) {
-            count++;
-            iterator.next();
-        }
-        Assert.assertEquals(count, 2);
-    }
-
-    @Test
-    public void testIteratorFromFileSpan_WholeFile() throws IOException {
-        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-
-        final SAMFileSpan allContainers = reader.getFilePointerSpanningReads();
-        final CloseableIterator<SAMRecord> iterator = reader.getIterator(allContainers);
-        Assert.assertTrue(iterator.hasNext());
-        int counter = 0;
-        while (iterator.hasNext()) {
-            iterator.next();
-            counter++;
-        }
-        Assert.assertEquals(counter, nofReads);
-    }
-
-    @Test
-    public void testIteratorFromFileSpan_SecondContainer() throws IOException, IllegalAccessException {
-        CramContainerIterator it = new CramContainerIterator(new ByteArrayInputStream(cramBytes));
-        it.hasNext();
-        it.next();
-        it.hasNext();
-        Container secondContainer = it.next();
-        Assert.assertNotNull(secondContainer);
-        final Map<Integer, AlignmentSpan> references = new ContainerParser(it.getCramHeader().getSamFileHeader()).getReferences(secondContainer, ValidationStringency.STRICT);
-        it.close();
-        int refId = new TreeSet<Integer>(references.keySet()).iterator().next();
-        final AlignmentSpan alignmentSpan = references.get(refId);
-
-        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-
-        final BAMIndex index = reader.getIndex();
-        final SAMFileSpan spanOfSecondContainer = index.getSpanOverlapping(refId, alignmentSpan.getStart(), alignmentSpan.getStart()+ alignmentSpan.getSpan());
-        Assert.assertNotNull(spanOfSecondContainer);
-        Assert.assertFalse(spanOfSecondContainer.isEmpty());
-        Assert.assertTrue(spanOfSecondContainer instanceof BAMFileSpan);
-
-        final CloseableIterator<SAMRecord> iterator = reader.getIterator(spanOfSecondContainer);
-        Assert.assertTrue(iterator.hasNext());
-        int counter = 0;
-        boolean matchFound = false;
-        while (iterator.hasNext()) {
-            final SAMRecord record = iterator.next();
-            if (record.getReferenceIndex().intValue() == refId) {
-                boolean overlaps = CoordMath.overlaps(record.getAlignmentStart(), record.getAlignmentEnd(), alignmentSpan.getStart(), alignmentSpan.getStart()+ alignmentSpan.getSpan());
-                if (overlaps) matchFound = true;
-            }
-            counter++;
-        }
-        Assert.assertTrue(matchFound);
-        Assert.assertTrue(counter <= CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE);
-    }
-
-    @Test
-    public void testQueryInterval() throws IOException {
-        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
-        QueryInterval[] query = new QueryInterval[]{new QueryInterval(0, 1519, 1520), new QueryInterval(1, 470535, 470536)};
-        final CloseableIterator<SAMRecord> iterator = reader.query(query, false);
-        Assert.assertTrue(iterator.hasNext());
-        SAMRecord r1 = iterator.next();
-        Assert.assertEquals(r1.getReadName(), "3968040");
-
-        Assert.assertTrue(iterator.hasNext());
-        SAMRecord r2 = iterator.next();
-        Assert.assertEquals(r2.getReadName(), "140419");
-
-        Assert.assertFalse(iterator.hasNext());
-        iterator.close();
-        reader.close();
-    }
-
-    @Test
-    public void scanAllUnmappedReads() throws IOException {
-        SamReader samReader = SamReaderFactory.makeDefault().open(BAM_FILE);
-        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(baiBytes), source, ValidationStringency.SILENT);
-        reader.setValidationStringency(ValidationStringency.SILENT);
-        int counter = 0;
-
-        SAMRecordIterator unmappedSamIterator = samReader.queryUnmapped();
-        CloseableIterator<SAMRecord> unmappedCramIterator = reader.queryUnmapped();
-        while (unmappedSamIterator.hasNext()) {
-            Assert.assertTrue(unmappedCramIterator.hasNext());
-            SAMRecord r1 = unmappedSamIterator.next();
-            SAMRecord r2 = unmappedCramIterator.next();
-            Assert.assertEquals(r1.getReadName(), r2.getReadName());
-            Assert.assertEquals(r1.getBaseQualityString(), r2.getBaseQualityString());
-
-            counter++;
-        }
-        Assert.assertFalse(unmappedCramIterator.hasNext());
-        Assert.assertEquals(counter, nofUnmappedReads);
-
-        reader.close();
-    }
-
-    @BeforeTest
-    public void prepare() throws IOException {
-        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
-        source = new ReferenceSource(new FakeReferenceSequenceFile(SamReaderFactory.makeDefault().getFileHeader(BAM_FILE).getSequenceDictionary().getSequences()));
-        cramBytes = cramFromBAM(BAM_FILE, source);
-        cramFile = File.createTempFile(BAM_FILE.getName(), ".cram") ;
-        cramFile.deleteOnExit();
-        indexFile = new File (cramFile.getAbsolutePath() + ".bai");
-        indexFile.deleteOnExit();
-        FileOutputStream fos = new FileOutputStream(cramFile);
-        fos.write(cramBytes);
-        fos.close();
-
-        CRAMBAIIndexer.createIndex(new SeekableFileStream(cramFile), indexFile, null, ValidationStringency.STRICT);
-        baiBytes = readFile(indexFile);
-    }
-
-    private static byte[] readFile(File file) throws FileNotFoundException {
-        FileInputStream fis = new FileInputStream(file);
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        IOUtil.copyStream(fis, baos);
-        return baos.toByteArray();
-    }
-
-    private byte[] cramFromBAM(File bamFile, ReferenceSource source) throws IOException {
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
-        final SAMRecordIterator iterator = reader.iterator();
-        // to reduce granularity let's use this hacky approach:
-        int previousValue = CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE ;
-        CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE = nofReadsPerContainer;
-        CRAMFileWriter writer = new CRAMFileWriter(baos, source, reader.getFileHeader(), bamFile.getName());
-        while (iterator.hasNext()) {
-            SAMRecord record = iterator.next();
-            writer.addAlignment(record);
-        }
-        writer.close();
-        CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE = previousValue;
-        return baos.toByteArray();
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/CRAMFileReaderTest.java b/src/tests/java/htsjdk/samtools/CRAMFileReaderTest.java
deleted file mode 100644
index e9db7e8..0000000
--- a/src/tests/java/htsjdk/samtools/CRAMFileReaderTest.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2015 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
-import htsjdk.samtools.seekablestream.SeekableFileStream;
-import htsjdk.samtools.util.Log;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-
-/**
- * Additional tests for CRAMFileReader are in CRAMFileIndexTest
- */
-public class CRAMFileReaderTest {
-
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    @BeforeClass
-    public void initClass() {
-        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
-    }
-
-    private ReferenceSource createReferenceSource() {
-        byte[] refBases = new byte[10 * 10];
-        Arrays.fill(refBases, (byte) 'A');
-        InMemoryReferenceSequenceFile rsf = new InMemoryReferenceSequenceFile();
-        rsf.add("chr1", refBases);
-        return new ReferenceSource(rsf);
-    }
-
-    // constructor 1: CRAMFileReader(final File cramFile, final InputStream inputStream)
-
-    @Test(description = "Test CRAMReader 1 reference required", expectedExceptions = IllegalStateException.class)
-    public void testCRAMReader1_ReferenceRequired() {
-        File file = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
-        InputStream bis = null;
-        // assumes that reference_fasta property is not set and the download service is not enabled
-        new CRAMFileReader(file, bis);
-    }
-
-    // constructor 2: CRAMFileReader(final File cramFile, final InputStream inputStream, final ReferenceSource referenceSource)
-
-    @Test(description = "Test CRAMReader 2 reference required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader2ReferenceRequired() {
-        File file = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
-        InputStream bis =  null;
-        new CRAMFileReader(file, bis, null);
-    }
-
-    @Test(description = "Test CRAMReader 2 input required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader2_InputRequired() {
-        File file = null;
-        InputStream bis =  null;
-        new CRAMFileReader(file, bis, createReferenceSource());
-    }
-
-    // constructor 3: CRAMFileReader(final File cramFile, final File indexFile, final ReferenceSource referenceSource)
-
-    @Test(description = "Test CRAMReader 3 reference required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader3_RequiredReference() {
-        File inputFile = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
-        File indexFile = null;
-        ReferenceSource refSource = null;
-        new CRAMFileReader(inputFile, indexFile, refSource);
-    }
-
-    @Test(description = "Test CRAMReader 3 input required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader3_InputRequirted() {
-        File inputFile = null;
-        File indexFile = null;
-        ReferenceSource refSource = null;
-        new CRAMFileReader(inputFile, indexFile, refSource);
-    }
-
-    // constructor 4: CRAMFileReader(final File cramFile, final ReferenceSource referenceSource)
-
-    @Test(description = "Test CRAMReader 4 reference required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader4_ReferenceRequired() {
-        File inputFile = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
-        ReferenceSource refSource = null;
-        new CRAMFileReader(inputFile, refSource);
-    }
-
-    @Test(description = "Test CRAMReader 4 input required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader4_InputRequired() {
-        File inputFile = null;
-        new CRAMFileReader(inputFile, createReferenceSource());
-    }
-
-    // constructor 5: CRAMFileReader(final InputStream inputStream, final SeekableStream indexInputStream,
-    //          final ReferenceSource referenceSource, final ValidationStringency validationStringency)
-    @Test(description = "Test CRAMReader 5 reference required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader5_ReferenceRequired() throws IOException {
-        InputStream bis = new ByteArrayInputStream(new byte[0]);
-        SeekableFileStream sfs = null;
-        ReferenceSource refSource = null;
-        new CRAMFileReader(bis, sfs, refSource, ValidationStringency.STRICT);
-    }
-
-    @Test(description = "Test CRAMReader 5 input required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader5_InputRequired() throws IOException {
-        InputStream bis = null;
-        SeekableFileStream sfs = null;
-        new CRAMFileReader(bis, sfs, createReferenceSource(), ValidationStringency.STRICT);
-    }
-
-    // constructor 6: CRAMFileReader(final InputStream stream, final File indexFile, final ReferenceSource referenceSource,
-    //                final ValidationStringency validationStringency)
-    @Test(description = "Test CRAMReader 6 reference required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader6_ReferenceRequired() throws IOException {
-        InputStream bis = new ByteArrayInputStream(new byte[0]);
-        File file = null;
-        ReferenceSource refSource = null;
-        new CRAMFileReader(bis, file, refSource, ValidationStringency.STRICT);
-    }
-
-    @Test(description = "Test CRAMReader 6 input required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader6_InputRequired() throws IOException {
-        InputStream bis = null;
-        File file = null;
-        ReferenceSource refSource = null;
-        new CRAMFileReader(bis, file, createReferenceSource(), ValidationStringency.STRICT);
-    }
-
-    // constructor 7: CRAMFileReader(final File cramFile, final File indexFile, final ReferenceSource referenceSource,
-    //                final ValidationStringency validationStringency)
-    @Test(description = "Test CRAMReader 7 reference required", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMReader7_ReferenceRequired() throws IOException {
-        InputStream bis = new ByteArrayInputStream(new byte[0]);
-        File file = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
-        ReferenceSource refSource = null;
-        new CRAMFileReader(file, file, refSource, ValidationStringency.STRICT);
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/CRAMFileWriterTest.java b/src/tests/java/htsjdk/samtools/CRAMFileWriterTest.java
deleted file mode 100644
index e09f38c..0000000
--- a/src/tests/java/htsjdk/samtools/CRAMFileWriterTest.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.Log.LogLevel;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-public class CRAMFileWriterTest {
-
-    @BeforeClass
-    public void initClass() {
-        Log.setGlobalLogLevel(LogLevel.ERROR);
-    }
-
-    @Test(description = "Test for lossy CRAM compression invariants.")
-    public void lossyCramInvariantsTest() {
-        doTest(createRecords(1000));
-    }
-
-    @Test(description = "Tests a writing records with null SAMFileHeaders")
-    public void writeRecordsWithNullHeader() throws Exception {
-
-        final List<SAMRecord> samRecs = createRecords(50);
-        for (SAMRecord rec : samRecs) {
-            rec.setHeader(null);
-        }
-        doTest(samRecs);
-    }
-
-    @Test(description = "Tests a unmapped record with sequence and quality fields")
-    public void unmappedWithSequenceAndQualityField() throws Exception {
-        unmappedSequenceAndQualityFieldHelper(true);
-    }
-
-    @Test(description = "Tests a unmapped record with no sequence or quality fields")
-    public void unmappedWithNoSequenceAndQualityField() throws Exception {
-        unmappedSequenceAndQualityFieldHelper(false);
-    }
-
-    private void unmappedSequenceAndQualityFieldHelper(boolean unmappedHasBasesAndQualities) throws Exception {
-        List<SAMRecord> list = new ArrayList<SAMRecord>(2);
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
-        if (builder.getHeader().getReadGroups().isEmpty()) {
-            throw new Exception("Read group expected in the header");
-        }
-
-        builder.setUnmappedHasBasesAndQualities(unmappedHasBasesAndQualities);
-
-        builder.addUnmappedFragment("test1");
-        builder.addUnmappedPair("test2");
-
-        list.addAll(builder.getRecords());
-
-        Collections.sort(list, new SAMRecordCoordinateComparator());
-
-        doTest(list);
-    }
-
-    private List<SAMRecord> createRecords(int count) {
-        List<SAMRecord> list = new ArrayList<SAMRecord>(count);
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
-        if (builder.getHeader().getReadGroups().isEmpty()) {
-            throw new IllegalStateException("Read group expected in the header");
-        }
-
-        int posInRef = 1;
-        for (int i = 0; i < count / 2; i++) {
-            builder.addPair(Integer.toString(i), 0, posInRef += 1,
-                    posInRef += 3);
-        }
-        list.addAll(builder.getRecords());
-
-        Collections.sort(list, new SAMRecordCoordinateComparator());
-
-        return list;
-    }
-
-    private SAMFileHeader createSAMHeader(SAMFileHeader.SortOrder sortOrder) {
-        final SAMFileHeader header = new SAMFileHeader();
-        header.setSortOrder(sortOrder);
-        header.addSequence(new SAMSequenceRecord("chr1", 123));
-        SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord("1");
-        header.addReadGroup(readGroupRecord);
-        return header;
-    }
-
-    private ReferenceSource createReferenceSource() {
-        byte[] refBases = new byte[1024 * 1024];
-        Arrays.fill(refBases, (byte) 'A');
-        InMemoryReferenceSequenceFile rsf = new InMemoryReferenceSequenceFile();
-        rsf.add("chr1", refBases);
-        return new ReferenceSource(rsf);
-    }
-
-    private void writeRecordsToCRAM(CRAMFileWriter writer, List<SAMRecord> samRecords) {
-        for (SAMRecord record : samRecords) {
-            writer.addAlignment(record);
-        }
-        writer.close();
-    }
-
-    private void validateRecords(final List<SAMRecord> expectedRecords, ByteArrayInputStream is, ReferenceSource referenceSource) {
-        CRAMFileReader cReader = new CRAMFileReader(null, is, referenceSource);
-
-        SAMRecordIterator iterator2 = cReader.getIterator();
-        int index = 0;
-        while (iterator2.hasNext()) {
-            SAMRecord actualRecord = iterator2.next();
-            SAMRecord expectedRecord = expectedRecords.get(index++);
-
-            Assert.assertEquals(actualRecord.getReadName(), expectedRecord.getReadName());
-            Assert.assertEquals(actualRecord.getFlags(), expectedRecord.getFlags());
-            Assert.assertEquals(actualRecord.getAlignmentStart(), expectedRecord.getAlignmentStart());
-            Assert.assertEquals(actualRecord.getAlignmentEnd(), expectedRecord.getAlignmentEnd());
-            Assert.assertEquals(actualRecord.getReferenceName(), expectedRecord.getReferenceName());
-            Assert.assertEquals(actualRecord.getMateAlignmentStart(),
-                    expectedRecord.getMateAlignmentStart());
-            Assert.assertEquals(actualRecord.getMateReferenceName(),
-                    expectedRecord.getMateReferenceName());
-            Assert.assertEquals(actualRecord.getReadBases(), expectedRecord.getReadBases());
-            Assert.assertEquals(actualRecord.getBaseQualities(), expectedRecord.getBaseQualities());
-        }
-        cReader.close();
-    }
-
-    private void doTest(final List<SAMRecord> samRecords) {
-        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
-        final ReferenceSource refSource = createReferenceSource();
-        final ByteArrayOutputStream os = new ByteArrayOutputStream();
-
-        CRAMFileWriter writer = new CRAMFileWriter(os, refSource, header, null);
-        writeRecordsToCRAM(writer, samRecords);
-
-        validateRecords(samRecords, new ByteArrayInputStream(os.toByteArray()), refSource);
-    }
-
-    @Test(description = "Test CRAMWriter constructor with index stream")
-    public void testCRAMWriterWithIndex() {
-        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
-        final ReferenceSource refSource = createReferenceSource();
-        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
-        final ByteArrayOutputStream indexStream = new ByteArrayOutputStream();
-
-        final List<SAMRecord> samRecords = createRecords(100);
-        CRAMFileWriter writer = new CRAMFileWriter(outStream, indexStream, refSource, header, null);
-
-        writeRecordsToCRAM(writer, samRecords);
-        validateRecords(samRecords, new ByteArrayInputStream(outStream.toByteArray()), refSource);
-        Assert.assertTrue(indexStream.size() != 0);
-    }
-
-    @Test(description = "Test CRAMWriter constructor with presorted==false")
-    public void testCRAMWriterNotPresorted() {
-        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
-        final ReferenceSource refSource = createReferenceSource();
-        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
-        final ByteArrayOutputStream indexStream = new ByteArrayOutputStream();
-
-        CRAMFileWriter writer = new CRAMFileWriter(outStream, indexStream, false, refSource, header, null);
-
-        // force records to not be coordinate sorted to ensure we're relying on presorted=false
-        final List<SAMRecord> samRecords = createRecords(100);
-        Collections.sort(samRecords, new SAMRecordCoordinateComparator().reversed());
-
-        writeRecordsToCRAM(writer, samRecords);
-
-        // for validation, restore the sort order of the expected records so they match the order of the written records
-        Collections.sort(samRecords, new SAMRecordCoordinateComparator());
-        validateRecords(samRecords, new ByteArrayInputStream(outStream.toByteArray()), refSource);
-        Assert.assertTrue(indexStream.size() != 0);
-    }
-
-    @Test(description = "Test CRAMWriter constructor reference required 1", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMWriterConstructorRequiredReference_1() {
-        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
-        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
-        new CRAMFileWriter(outStream, null, header, null);
-    }
-
-    @Test(description = "Test CRAMWriter constructor reference required 2", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMWriterConstructorRequiredReference_2() {
-        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
-        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
-        new CRAMFileWriter(outStream, null, null, header, null);
-    }
-
-    @Test(description = "Test CRAMWriter constructor reference required 3", expectedExceptions = IllegalArgumentException.class)
-    public void testCRAMWriterConstructorRequiredReference_3() {
-        final SAMFileHeader header = createSAMHeader(SAMFileHeader.SortOrder.coordinate);
-        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
-        new CRAMFileWriter(outStream, null, true, null, header, null);
-    }
-
-    @Test
-    public void test_roundtrip_tlen_preserved() throws IOException {
-        SamReader reader = SamReaderFactory.make().open(new File("testdata/htsjdk/samtools/cram_tlen_reads.sorted.sam"));
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        final ReferenceSource source = new ReferenceSource(new File("testdata/htsjdk/samtools/cram_tlen.fasta"));
-        CRAMFileWriter writer = new CRAMFileWriter(baos, source, reader.getFileHeader(), "test.cram");
-        SAMRecordIterator iterator = reader.iterator();
-        List<SAMRecord> records = new ArrayList<SAMRecord>();
-        while (iterator.hasNext()) {
-            final SAMRecord record = iterator.next();
-            writer.addAlignment(record);
-            records.add(record);
-        }
-        writer.close();
-
-        CRAMFileReader cramReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (File) null, source, ValidationStringency.STRICT);
-        iterator = cramReader.getIterator();
-        int i = 0;
-        while (iterator.hasNext()) {
-            SAMRecord record1 = iterator.next();
-            SAMRecord record2 = records.get(i++);
-            Assert.assertEquals(record1.getInferredInsertSize(), record2.getInferredInsertSize(), record1.getReadName());
-        }
-        Assert.assertEquals(records.size(), i);
-    }
-
-    @Test
-    public void testCRAMQuerySort() throws IOException {
-        final File input = new File("testdata/htsjdk/samtools/cram_query_sorted.cram");
-        final File reference = new File("testdata/htsjdk/samtools/cram_query_sorted.fasta");
-        final File outputFile = File.createTempFile("tmp.", ".cram");
-
-        try (final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(input);
-             final SAMFileWriter writer = new SAMFileWriterFactory().makeWriter(reader.getFileHeader().clone(), false, outputFile, reference)) {
-            for (SAMRecord rec : reader) {
-                writer.addAlignment(rec);
-            }
-        }
-
-        try (final SamReader outReader = SamReaderFactory.makeDefault().referenceSequence(reference).open(outputFile)) {
-            String prevName = null;
-            for (final SAMRecord rec : outReader) {
-                if (prevName == null) {
-                    prevName = rec.getReadName();
-                    continue;
-                }
-                // test if the read names are sorted alphabetically:
-                Assert.assertTrue(rec.getReadName().compareTo(prevName) >= 0);
-            }
-        }
-
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java b/src/tests/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java
deleted file mode 100644
index 3e07076..0000000
--- a/src/tests/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java
+++ /dev/null
@@ -1,203 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
-import htsjdk.samtools.seekablestream.ByteArraySeekableStream;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.Log;
-import org.testng.Assert;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Random;
-
-/**
- * Created by vadim on 23/03/2015.
- */
-public class CRAMFileWriterWithIndexTest {
-    private byte[] cramBytes;
-    private byte[] indexBytes;
-    private InMemoryReferenceSequenceFile rsf;
-    private ReferenceSource source;
-    private SAMFileHeader header;
-
-    @Test
-    public void test() throws IOException {
-        CRAMFileReader reader = new CRAMFileReader(new ByteArraySeekableStream(cramBytes), new ByteArraySeekableStream(indexBytes), source, ValidationStringency.SILENT);
-        for (SAMSequenceRecord sequenceRecord : reader.getFileHeader().getSequenceDictionary().getSequences()) {
-            final CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart(sequenceRecord.getSequenceName(), 1);
-            Assert.assertNotNull(iterator);
-            Assert.assertTrue(iterator.hasNext());
-            SAMRecord record = iterator.next();
-            Assert.assertEquals(record.getReferenceName(), sequenceRecord.getSequenceName());
-            Assert.assertEquals(record.getAlignmentStart(), 1);
-        }
-    }
-
-    private static class TabuRegionInputStream extends SeekableStream {
-        private SeekableStream delegate;
-        private List<Chunk> tabuChunks;
-
-        public TabuRegionInputStream(List<Chunk> tabuChunks, SeekableStream delegate) {
-            this.tabuChunks = tabuChunks;
-            this.delegate = delegate;
-        }
-
-        private boolean isTabu(long position) {
-
-            for (Chunk chunk : tabuChunks) {
-                if ((chunk.getChunkStart() >> 16) < position && position < (chunk.getChunkEnd() >> 16)) return true;
-            }
-
-            return false;
-        }
-
-        @Override
-        public long length() {
-            return delegate.length();
-        }
-
-        @Override
-        public long position() throws IOException {
-            return delegate.position();
-        }
-
-        @Override
-        public void seek(long position) throws IOException {
-            if (isTabu(position)) throw new TabuError();
-            delegate.seek(position);
-        }
-
-        @Override
-        public int read() throws IOException {
-            if (isTabu(position())) throw new TabuError();
-            return delegate.read();
-        }
-
-        @Override
-        public int read(byte[] buffer, int offset, int length) throws IOException {
-            for (long pos = position(); pos < position() + length; pos++)
-                if (isTabu(pos)) throw new TabuError();
-            return delegate.read(buffer, offset, length);
-        }
-
-        @Override
-        public void close() throws IOException {
-            delegate.close();
-        }
-
-        @Override
-        public boolean eof() throws IOException {
-            return delegate.eof();
-        }
-
-        @Override
-        public String getSource() {
-            return delegate.getSource();
-        }
-    }
-
-    private static class TabuError extends RuntimeException {
-
-    }
-
-    /**
-     * This is to check that the indexing actually works and not just skips records. The approach is to forbid reading of the first
-     * container and try accessing reads from the first and the second containers. The first attempt should fail but the second should succeed.
-     *
-     * @throws IOException
-     */
-    @Test
-    public void testUnnecessaryIO() throws IOException {
-        BAMIndex index = new CachingBAMFileIndex(new ByteArraySeekableStream(indexBytes), header.getSequenceDictionary());
-        int refID = 0;
-        long start = index.getSpanOverlapping(refID, 1, Integer.MAX_VALUE).getFirstOffset();
-        long end = index.getSpanOverlapping(refID + 1, 1, Integer.MAX_VALUE).getFirstOffset();
-        TabuRegionInputStream tabuIS = new TabuRegionInputStream(Arrays.asList(new Chunk[]{new Chunk(start, end)}), new ByteArraySeekableStream(cramBytes));
-
-        CRAMFileReader reader = new CRAMFileReader(tabuIS, new ByteArraySeekableStream(indexBytes), source, ValidationStringency.SILENT);
-        try {
-            reader.queryAlignmentStart(header.getSequence(refID).getSequenceName(), 1);
-            // attempt to read 1st container must fail:
-            Assert.fail();
-        } catch (TabuError e) {
-
-        }
-
-        // reading after the 1st container should be ok:
-        refID = 2;
-        final CloseableIterator<SAMRecord> iterator = reader.queryAlignmentStart(header.getSequence(refID).getSequenceName(), 1);
-        Assert.assertNotNull(iterator);
-        Assert.assertTrue(iterator.hasNext());
-    }
-
-    @BeforeTest
-    public void beforeTest() throws Exception {
-        Log.setGlobalLogLevel(Log.LogLevel.ERROR);
-
-        header = new SAMFileHeader();
-        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
-        SAMReadGroupRecord readGroupRecord = new SAMReadGroupRecord("1");
-
-        rsf = new InMemoryReferenceSequenceFile();
-        int nofSequencesInDictionary = 3;
-        int sequenceLength = 1024 * 1024;
-        for (int i = 0; i < nofSequencesInDictionary; i++)
-            addRandomSequence(header, sequenceLength, rsf);
-
-        source = new ReferenceSource(rsf);
-
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder(false, SAMFileHeader.SortOrder.coordinate);
-        builder.setHeader(header);
-        builder.setReadGroup(readGroupRecord);
-        header.addReadGroup(readGroupRecord);
-
-        ByteArrayOutputStream os = new ByteArrayOutputStream();
-        ByteArrayOutputStream indexOS = new ByteArrayOutputStream();
-        CRAMFileWriter writer = new CRAMFileWriter(os, indexOS, source, header, null);
-
-
-        int readPairsPerSequence = CRAMContainerStreamWriter.DEFAULT_RECORDS_PER_SLICE;
-
-        for (SAMSequenceRecord sequenceRecord : header.getSequenceDictionary().getSequences()) {
-            int alignmentStart = 1;
-            for (int i = 0; i < readPairsPerSequence / 2; i++) {
-                builder.addPair(Integer.toString(i), sequenceRecord.getSequenceIndex(), alignmentStart, alignmentStart + 2);
-                alignmentStart++;
-            }
-
-        }
-
-        List<SAMRecord> list = new ArrayList<SAMRecord>(readPairsPerSequence);
-        list.addAll(builder.getRecords());
-        Collections.sort(list, new SAMRecordCoordinateComparator());
-
-        for (SAMRecord record : list)
-            writer.addAlignment(record);
-
-        list.clear();
-        writer.finish();
-        writer.close();
-        cramBytes = os.toByteArray();
-        indexBytes = indexOS.toByteArray();
-    }
-
-    private static void addRandomSequence(SAMFileHeader header, int length, InMemoryReferenceSequenceFile rsf) {
-        String name = String.valueOf(header.getSequenceDictionary().size() + 1);
-        header.addSequence(new SAMSequenceRecord(name, length));
-        byte[] refBases = new byte[length];
-        Random random = new Random();
-        byte[] alphabet = "ACGTN".getBytes();
-        for (int i = 0; i < refBases.length; i++)
-            refBases[i] = alphabet[random.nextInt(alphabet.length)];
-
-        rsf.add(name, refBases);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/DownsamplingIteratorTests.java b/src/tests/java/htsjdk/samtools/DownsamplingIteratorTests.java
deleted file mode 100644
index d492f11..0000000
--- a/src/tests/java/htsjdk/samtools/DownsamplingIteratorTests.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package htsjdk.samtools;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.EnumMap;
-import java.util.List;
-import java.util.Random;
-
-import htsjdk.samtools.DownsamplingIteratorFactory.Strategy;
-
-/**
- * Tests for the downsampling iterator class.
- * @author Tim Fennell
- */
-public class DownsamplingIteratorTests {
-    final int NUM_TEMPLATES = 50000;
-    final EnumMap<Strategy, Double> ACCURACY = new EnumMap<Strategy,Double>(Strategy.class){{
-        put(Strategy.HighAccuracy, 0.001);
-        put(Strategy.Chained, 0.005);
-        put(Strategy.ConstantMemory, 0.01);
-    }};
-
-    @Test
-    public void testBasicFunction() {
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
-        final Random r = new Random();
-        for (int i=0; i<NUM_TEMPLATES; ++i) {
-            builder.addPair("pair" + r.nextInt(), r.nextInt(24), r.nextInt(1000000), r.nextInt(1000000));
-        }
-        final Collection<SAMRecord> recs = builder.getRecords();
-
-        runTests("testBasicFunction", recs);
-    }
-
-    private void runTests(final String name, final Collection<SAMRecord> recs) {
-        for (final DownsamplingIteratorFactory.Strategy strategy : DownsamplingIteratorFactory.Strategy.values()) {
-            final double accuracy = ACCURACY.get(strategy);
-
-            for (final double p : new double[]{0, 0.01, 0.1, 0.5, 0.9, 1}) {
-                final DownsamplingIterator iterator = DownsamplingIteratorFactory.make(recs.iterator(), strategy, p, accuracy, 42);
-                final List<SAMRecord> out = new ArrayList<SAMRecord>();
-                while (iterator.hasNext()) out.add(iterator.next());
-
-                final String testcase = name + ": strategy=" + strategy.name() + ", p=" + p + ", accuracy=" + accuracy;
-
-                final double readFraction = iterator.getAcceptedFraction();
-                Assert.assertEquals(out.size(), iterator.getAcceptedCount(), "Mismatched sizes with " + testcase);
-                Assert.assertTrue(readFraction > p - accuracy && readFraction < p + accuracy, "Read fraction " + readFraction + " out of bounds in " + testcase);
-            }
-        }
-    }
-
-    @Test
-    public void testMixOfPairsAndFrags() {
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
-        final Random r = new Random();
-        for (int i=0; i<NUM_TEMPLATES; ++i) {
-            builder.addFrag("frag" + r.nextInt(), r.nextInt(24), r.nextInt(1000000), false);
-            builder.addPair("pair" + r.nextInt(), r.nextInt(24), r.nextInt(1000000), r.nextInt(1000000));
-        }
-
-        final Collection<SAMRecord> recs = builder.getRecords();
-        runTests("testMixOfPairsAndFrags", recs);
-    }
-
-    @Test
-    public void testSecondaryAlignments() {
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
-        final Random r = new Random();
-        for (int i=0; i<NUM_TEMPLATES; ++i) {
-            final int x = r.nextInt();
-            builder.addPair("pair" + x, r.nextInt(24), r.nextInt(1000000), r.nextInt(1000000));
-            builder.addPair("pair" + x, r.nextInt(24), r.nextInt(24), r.nextInt(1000000), r.nextInt(1000000), false, false, "50M", "50M", false, true, true, true, 20);
-        }
-
-        final Collection<SAMRecord> recs = builder.getRecords();
-        runTests("testSecondaryAlignments", recs);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SAMFileReaderTest.java b/src/tests/java/htsjdk/samtools/SAMFileReaderTest.java
deleted file mode 100644
index d0f9d5b..0000000
--- a/src/tests/java/htsjdk/samtools/SAMFileReaderTest.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.CRAMException;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CloserUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-public class SAMFileReaderTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    @Test(dataProvider = "variousFormatReaderTestCases")
-    public void variousFormatReaderTest(final String inputFile) {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-        final SamReader reader = SamReaderFactory.makeDefault().open(input);
-        for (final SAMRecord rec : reader) {
-        }
-        CloserUtil.close(reader);
-    }
-
-    @DataProvider(name = "variousFormatReaderTestCases")
-    public Object[][] variousFormatReaderTestCases() {
-        final Object[][] scenarios = new Object[][]{
-                {"block_compressed.sam.gz"},
-                {"uncompressed.sam"},
-                {"compressed.sam.gz"},
-                {"compressed.bam"},
-        };
-        return scenarios;
-    }
-
-    // tests for CRAM indexing
-
-    @Test(dataProvider = "SmallCRAMTest")
-    public void CRAMIndexTest(final String inputFile, final String referenceFile, QueryInterval queryInterval, String expectedReadName) {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-        final File reference = new File(TEST_DATA_DIR, referenceFile);
-        final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(input);
-        Assert.assertTrue(reader.hasIndex());
-
-        final CloseableIterator<SAMRecord> iterator = reader.query(new QueryInterval[]{queryInterval}, false);
-        Assert.assertTrue(iterator.hasNext());
-        SAMRecord r1 = iterator.next();
-        Assert.assertEquals(r1.getReadName(), expectedReadName);
-
-        CloserUtil.close(reader);
-    }
-
-    @DataProvider(name = "SmallCRAMTest")
-    public Object[][] CRAMIndexTestData() {
-        final Object[][] testFiles = new Object[][]{
-                {"cram/test.cram", "cram/auxf.fa", new QueryInterval(0, 12, 13), "Jim"},
-                {"cram_with_bai_index.cram", "hg19mini.fasta", new QueryInterval(3, 700, 0), "k"},
-                {"cram_with_crai_index.cram", "hg19mini.fasta", new QueryInterval(2, 350, 0), "i"},
-        };
-        return testFiles;
-    }
-
-    @Test(dataProvider = "NoIndexCRAMTest")
-    public void CRAMNoIndexTest(final String inputFile, final String referenceFile) {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-        final File reference = new File(TEST_DATA_DIR, referenceFile);
-        final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(input);
-        Assert.assertFalse(reader.hasIndex());
-        CloserUtil.close(reader);
-    }
-
-    @DataProvider(name = "NoIndexCRAMTest")
-    public Object[][] CRAMNoIndexTestData() {
-        final Object[][] testFiles = new Object[][]{
-                {"cram/test2.cram", "cram/auxf.fa"},
-        };
-        return testFiles;
-    }
-
-    // Tests for the SAMRecordFactory usage
-    class SAMRecordFactoryTester extends DefaultSAMRecordFactory {
-        int samRecordsCreated;
-        int bamRecordsCreated;
-
-        public SAMRecord createSAMRecord(final SAMFileHeader header) {
-            ++samRecordsCreated;
-            return super.createSAMRecord(header);
-        }
-
-        public BAMRecord createBAMRecord(final SAMFileHeader header, final int referenceSequenceIndex, final int alignmentStart, final short readNameLength, final short mappingQuality, final int indexingBin, final int cigarLen, final int flags, final int readLen, final int mateReferenceSequenceIndex, final int mateAlignmentStart, final int insertSize, final byte[] variableLengthBlock) {
-            ++bamRecordsCreated;
-            return super.createBAMRecord(header, referenceSequenceIndex, alignmentStart, readNameLength, mappingQuality, indexingBin, cigarLen, flags, readLen, mateReferenceSequenceIndex, mateAlignmentStart, insertSize, variableLengthBlock);
-        }
-    }
-
-    @Test(dataProvider = "variousFormatReaderTestCases")
-    public void samRecordFactoryTest(final String inputFile) {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-        final SAMRecordFactoryTester factory = new SAMRecordFactoryTester();
-        final SamReader reader = SamReaderFactory.makeDefault().samRecordFactory(factory).open(input);
-
-        int i = 0;
-        for (final SAMRecord rec : reader) {
-            ++i;
-        }
-        CloserUtil.close(reader);
-
-        Assert.assertTrue(i > 0);
-        if (inputFile.endsWith(".sam") || inputFile.endsWith(".sam.gz")) Assert.assertEquals(factory.samRecordsCreated, i);
-        else if (inputFile.endsWith(".bam")) Assert.assertEquals(factory.bamRecordsCreated, i);
-    }
-
-    @Test(dataProvider = "cramTestCases", expectedExceptions=IllegalStateException.class)
-    public void testReferenceRequiredForCRAM(final String inputFile, final String ignoredReferenceFile) {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-        final SamReader reader = SamReaderFactory.makeDefault().open(input);
-        for (final SAMRecord rec : reader) {
-        }
-        CloserUtil.close(reader);
-    }
-
-    @DataProvider(name = "cramTestCases")
-    public Object[][] cramTestPositiveCases() {
-        final Object[][] scenarios = new Object[][]{
-                {"cram_with_bai_index.cram", "hg19mini.fasta"},
-                {"cram_with_crai_index.cram", "hg19mini.fasta"},
-        };
-        return scenarios;
-    }
-
-    @Test(dataProvider = "cramTestCases")
-    public void testIterateCRAMWithIndex(final String inputFile, final String referenceFile) {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-        final File reference = new File(TEST_DATA_DIR, referenceFile);
-        final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(reference).open(input);
-        for (final SAMRecord rec : reader) {
-        }
-        CloserUtil.close(reader);
-    }
-
-    @Test
-    public void samRecordFactoryNullHeaderTest() {
-        final SAMRecordFactory factory = new DefaultSAMRecordFactory();
-        final SAMRecord samRec = factory.createSAMRecord(null);
-        Assert.assertTrue(samRec.getHeader() == null);
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/SAMFileWriterFactoryTest.java b/src/tests/java/htsjdk/samtools/SAMFileWriterFactoryTest.java
deleted file mode 100644
index 9f0447b..0000000
--- a/src/tests/java/htsjdk/samtools/SAMFileWriterFactoryTest.java
+++ /dev/null
@@ -1,317 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.build.CramIO;
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.variant.variantcontext.writer.AsyncVariantContextWriter;
-import htsjdk.variant.variantcontext.writer.Options;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.InputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-
-public class SAMFileWriterFactoryTest {
-
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    /** PIC-442Confirm that writing to a special file does not cause exception when writing additional files. */
-    @Test(groups={"unix"})
-    public void specialFileWriterTest() {
-        createSmallBam(new File("/dev/null"));
-    }
-
-    @Test()
-    public void ordinaryFileWriterTest() throws Exception {
-        final File outputFile = File.createTempFile("tmp.", BamFileIoUtils.BAM_FILE_EXTENSION);
-        outputFile.delete();
-        outputFile.deleteOnExit();
-        createSmallBam(outputFile);
-        final File indexFile = SamFiles.findIndex(outputFile);
-        indexFile.deleteOnExit();
-        final File md5File = new File(outputFile.getParent(), outputFile.getName() + ".md5");
-        md5File.deleteOnExit();
-        Assert.assertTrue(outputFile.length() > 0);
-        Assert.assertTrue(indexFile.length() > 0);
-        Assert.assertTrue(md5File.length() > 0);
-    }
-
-    @Test(description="create a BAM in memory,  should start with GZipInputStream.GZIP_MAGIC")
-    public void inMemoryBam()  throws Exception  {
-    	ByteArrayOutputStream os=new ByteArrayOutputStream();
-    	createSmallBamToOutputStream(os,true);
-    	os.flush();
-    	os.close();
-    	byte blob[]=os.toByteArray();
-        Assert.assertTrue(blob.length > 2);
-        int head = ((int) blob[0] & 0xff) | ((blob[1] << 8 ) & 0xff00 );
-        Assert.assertTrue(java.util.zip.GZIPInputStream.GZIP_MAGIC == head);
-    }
-
-    @Test(description="create a SAM in memory,  should start with '@HD'")
-    public void inMemorySam()  throws Exception  {
-    	ByteArrayOutputStream os=new ByteArrayOutputStream();
-    	createSmallBamToOutputStream(os,false);
-    	os.flush();
-    	os.close();
-    	String sam=new String(os.toByteArray());
-        Assert.assertFalse(sam.isEmpty());
-        Assert.assertTrue(sam.startsWith("@HD\t"),"SAM: bad prefix");
-    }
-
-    @Test(description="Read and then write SAM to verify header attribute ordering does not change depending on JVM version")
-    public void samRoundTrip()  throws Exception  {
-        final File input = new File(TEST_DATA_DIR, "roundtrip.sam");
-
-        final SamReader reader = SamReaderFactory.makeDefault().open(input);
-        final File outputFile = File.createTempFile("roundtrip-out", ".sam");
-        outputFile.delete();
-        outputFile.deleteOnExit();
-        FileOutputStream os = new FileOutputStream(outputFile);
-        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
-        final SAMFileWriter writer = factory.makeSAMWriter(reader.getFileHeader(), false, os);
-        for (SAMRecord rec : reader) {
-            writer.addAlignment(rec);
-        }
-        writer.close();
-        os.close();
-
-        InputStream is = new FileInputStream(input);
-        String originalsam = IOUtil.readFully(is);
-        is.close();
-
-        is = new FileInputStream(outputFile);
-        String writtensam = IOUtil.readFully(is);
-        is.close();
-
-        Assert.assertEquals(writtensam, originalsam);
-    }
-
-    @Test(description="Write SAM records with null SAMFileHeader")
-    public void samNullHeaderRoundTrip()  throws Exception  {
-        final File input = new File(TEST_DATA_DIR, "roundtrip.sam");
-
-        final SamReader reader = SamReaderFactory.makeDefault().open(input);
-        final File outputFile = File.createTempFile("nullheader-out", ".sam");
-        outputFile.delete();
-        outputFile.deleteOnExit();
-        FileOutputStream os = new FileOutputStream(outputFile);
-        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
-        final SAMFileWriter writer = factory.makeSAMWriter(reader.getFileHeader(), false, os);
-        for (SAMRecord rec : reader) {
-            rec.setHeader(null);
-            writer.addAlignment(rec);
-        }
-        writer.close();
-        os.close();
-
-        InputStream is = new FileInputStream(input);
-        String originalsam = IOUtil.readFully(is);
-        is.close();
-
-        is = new FileInputStream(outputFile);
-        String writtensam = IOUtil.readFully(is);
-        is.close();
-
-        Assert.assertEquals(writtensam, originalsam);
-    }
-
-    private void createSmallBam(final File outputFile) {
-        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
-        factory.setCreateIndex(true);
-        factory.setCreateMd5File(true);
-        final SAMFileHeader header = new SAMFileHeader();
-        // index only created if coordinate sorted
-        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
-        header.addSequence(new SAMSequenceRecord("chr1", 123));
-        final SAMFileWriter writer = factory.makeBAMWriter(header, false, outputFile);
-        fillSmallBam(writer);
-        writer.close();
-    }
-
-
-   private void createSmallBamToOutputStream(final OutputStream outputStream,boolean binary) {
-        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
-        factory.setCreateIndex(false);
-        factory.setCreateMd5File(false);
-        final SAMFileHeader header = new SAMFileHeader();
-        // index only created if coordinate sorted
-        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
-        header.addSequence(new SAMSequenceRecord("chr1", 123));
-        final SAMFileWriter writer = (binary?
-        			factory.makeBAMWriter(header, false, outputStream):
-        			factory.makeSAMWriter(header, false, outputStream)
-        			);
-        fillSmallBam(writer);
-        writer.close();
-    }
-   
-   private int fillSmallBam(SAMFileWriter writer) {
-       final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
-       builder.addUnmappedFragment("HiMom!");
-       int numRecs = builder.getRecords().size();
-       for (final SAMRecord rec: builder.getRecords()) {
-           writer.addAlignment(rec);
-       }
-       return numRecs;
-    }
-
-    private File prepareOutputFile(String extension) throws IOException {
-        final File outputFile = File.createTempFile("tmp.", extension);
-        outputFile.delete();
-        outputFile.deleteOnExit();
-        return outputFile;
-    }
-
-    //  Create a writer factory that creates and index and md5 file and set the header to coord sorted
-    private SAMFileWriterFactory createWriterFactoryWithOptions(SAMFileHeader header) {
-        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
-        factory.setCreateIndex(true);
-        factory.setCreateMd5File(true);
-        // index only created if coordinate sorted
-        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
-        header.addSequence(new SAMSequenceRecord("chr1", 123));
-        header.addReadGroup(new SAMReadGroupRecord("1"));
-        return factory;
-    }
-
-    private void verifyWriterOutput(File outputFile, ReferenceSource refSource, int nRecs, boolean verifySupplementalFiles) {
-        if (verifySupplementalFiles) {
-            final File indexFile = SamFiles.findIndex(outputFile);
-            indexFile.deleteOnExit();
-            final File md5File = new File(outputFile.getParent(), outputFile.getName() + ".md5");
-            md5File.deleteOnExit();
-            Assert.assertTrue(indexFile.length() > 0);
-            Assert.assertTrue(md5File.length() > 0);
-        }
-
-        SamReaderFactory factory =  SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT);
-        if (refSource != null) {
-            factory.referenceSource(refSource);
-        }
-        SamReader reader = factory.open(outputFile);
-        SAMRecordIterator it = reader.iterator();
-        int count = 0;
-        for (; it.hasNext(); it.next()) {
-            count++;
-        }
-
-        Assert.assertTrue(count == nRecs);
-    }
-
-    @DataProvider(name="bamOrCramWriter")
-    public Object[][] bamOrCramWriter() {
-        return new Object[][] {
-                { BamFileIoUtils.BAM_FILE_EXTENSION, },
-                { CramIO.CRAM_FILE_EXTENSION }
-        };
-    }
-
-    @Test(dataProvider="bamOrCramWriter")
-    public void testMakeWriter(String extension) throws Exception {
-        final File outputFile = prepareOutputFile(extension);
-        final SAMFileHeader header = new SAMFileHeader();
-        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
-        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
-
-        final SAMFileWriter samWriter = factory.makeWriter(header, false, outputFile, referenceFile);
-        int nRecs = fillSmallBam(samWriter);
-        samWriter.close();
-
-        verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
-    }
-
-    @Test
-    public void testMakeCRAMWriterWithOptions() throws Exception {
-        final File outputFile = prepareOutputFile(CramIO.CRAM_FILE_EXTENSION);
-        final SAMFileHeader header = new SAMFileHeader();
-        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
-        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
-
-        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, false, outputFile, referenceFile);
-        int nRecs = fillSmallBam(samWriter);
-        samWriter.close();
-
-        verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
-    }
-
-    @Test
-    public void testMakeCRAMWriterIgnoresOptions() throws Exception {
-        final File outputFile = prepareOutputFile(CramIO.CRAM_FILE_EXTENSION);
-        final SAMFileHeader header = new SAMFileHeader();
-        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
-        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
-
-        // Note: does not honor factory settings for CREATE_MD5 or CREATE_INDEX.
-        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, new FileOutputStream(outputFile), referenceFile);
-        int nRecs = fillSmallBam(samWriter);
-        samWriter.close();
-
-        verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, false);
-    }
-
-    @Test
-    public void testMakeCRAMWriterPresortedDefault() throws Exception {
-        final File outputFile = prepareOutputFile(CramIO.CRAM_FILE_EXTENSION);
-        final SAMFileHeader header = new SAMFileHeader();
-        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
-        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
-
-        // Defaults to preSorted==true
-        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, outputFile, referenceFile);
-        int nRecs = fillSmallBam(samWriter);
-        samWriter.close();
-
-        verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
-    }
-
-    @Test
-    public void testAsync() throws IOException {
-        final SAMFileWriterFactory builder = new SAMFileWriterFactory();
-
-        final File outputFile = prepareOutputFile(BamFileIoUtils.BAM_FILE_EXTENSION);
-        final SAMFileHeader header = new SAMFileHeader();
-        final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
-        final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
-
-        SAMFileWriter writer = builder.makeWriter(header, false, outputFile, referenceFile);
-        Assert.assertEquals(writer instanceof AsyncSAMFileWriter, Defaults.USE_ASYNC_IO_FOR_SAMTOOLS, "testAsync default");
-
-        writer = builder.setUseAsyncIo(true).makeWriter(header, false, outputFile, referenceFile);
-        Assert.assertTrue(writer instanceof AsyncSAMFileWriter, "testAsync option=set");
-
-        writer = builder.setUseAsyncIo(false).makeWriter(header, false, outputFile, referenceFile);
-        Assert.assertFalse(writer instanceof AsyncSAMFileWriter, "testAsync option=unset");
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SAMIntegerTagTest.java b/src/tests/java/htsjdk/samtools/SAMIntegerTagTest.java
deleted file mode 100644
index 059900d..0000000
--- a/src/tests/java/htsjdk/samtools/SAMIntegerTagTest.java
+++ /dev/null
@@ -1,329 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.ref.ReferenceSource;
-import htsjdk.samtools.util.BinaryCodec;
-import htsjdk.samtools.util.CloserUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Confirm that integer tag types are stored and retrieved properly.
- *
- * @author alecw at broadinstitute.org
- */
-public class SAMIntegerTagTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools/SAMIntegerTagTest");
-
-    private static final String BYTE_TAG = "BY";
-    private static final String SHORT_TAG = "SH";
-    private static final String INTEGER_TAG = "IN";
-    private static final String UNSIGNED_INTEGER_TAG = "UI";
-    private static final String STRING_TAG = "ST";
-
-    private static final long TOO_LARGE_UNSIGNED_INT_VALUE = BinaryCodec.MAX_UINT + 1L;
-
-    enum FORMAT {SAM, BAM, CRAM}
-
-    @Test
-    public void testBAM() throws Exception {
-        final SAMRecord rec = writeAndReadSamRecord("bam");
-        Assert.assertTrue(rec.getAttribute(BYTE_TAG) instanceof Integer);
-        Assert.assertEquals(((Number) rec.getAttribute(BYTE_TAG)).intValue(), 1);
-        Assert.assertTrue(rec.getAttribute(SHORT_TAG) instanceof Integer);
-        Assert.assertEquals(((Number) rec.getAttribute(SHORT_TAG)).intValue(), 1);
-        Assert.assertTrue(rec.getAttribute(INTEGER_TAG) instanceof Integer);
-        Assert.assertEquals(((Number) rec.getAttribute(INTEGER_TAG)).intValue(), 1);
-    }
-
-    @Test
-    public void testSAM() throws Exception {
-        final SAMRecord rec = writeAndReadSamRecord("sam");
-        Assert.assertTrue(rec.getAttribute(BYTE_TAG) instanceof Integer);
-        Assert.assertEquals(((Number) rec.getAttribute(BYTE_TAG)).intValue(), 1);
-        Assert.assertTrue(rec.getAttribute(SHORT_TAG) instanceof Integer);
-        Assert.assertEquals(((Number) rec.getAttribute(SHORT_TAG)).intValue(), 1);
-        Assert.assertTrue(rec.getAttribute(INTEGER_TAG) instanceof Integer);
-        Assert.assertEquals(((Number) rec.getAttribute(INTEGER_TAG)).intValue(), 1);
-    }
-
-    @Test
-    public void testUnsignedIntegerSAM() throws Exception {
-        final SAMRecord rec = createSamRecord();
-        final long val = 1l + Integer.MAX_VALUE;
-        rec.setAttribute(UNSIGNED_INTEGER_TAG, val);
-        final Object roundTripValue = rec.getAttribute(UNSIGNED_INTEGER_TAG);
-        Assert.assertTrue(roundTripValue instanceof Long);
-        Assert.assertEquals(((Long)roundTripValue).longValue(), val);
-    }
-
-    @Test
-    public void testGetTypedAttributeMethods() throws Exception {
-        final SAMRecord rec = writeAndReadSamRecord("bam");
-        Assert.assertEquals(rec.getByteAttribute(INTEGER_TAG).intValue(), 1);
-        Assert.assertEquals(rec.getShortAttribute(INTEGER_TAG).intValue(), 1);
-        Assert.assertEquals(rec.getIntegerAttribute(INTEGER_TAG).intValue(), 1);
-    }
-
-    /**
-     * Should be an exception if a typed attribute call is made for the wrong type.
-     */
-    @Test(expectedExceptions = RuntimeException.class)
-    public void testGetTypedAttributeForWrongType() throws Exception {
-        final SAMRecord rec = createSamRecord();
-        rec.setAttribute(STRING_TAG, "Hello, World!");
-        writeAndReadSamRecord("bam", rec);
-        rec.getIntegerAttribute(STRING_TAG);
-        Assert.fail("Exception should have been thrown.");
-    }
-
-    /**
-     * Should be an exception if a typed attribute call is made for a value that cannot
-     * be coerced into the correct type.
-     * This test is a little lame because a RuntimeException could be thrown for some other reason.
-     */
-    @Test(expectedExceptions = RuntimeException.class)
-    public void testGetTypedAttributeOverflow() throws Exception {
-        final SAMRecord rec = createSamRecord();
-        rec.setAttribute(INTEGER_TAG, Integer.MAX_VALUE);
-        writeAndReadSamRecord("bam", rec);
-        rec.getShortAttribute(INTEGER_TAG);
-        Assert.fail("Exception should have been thrown.");
-    }
-
-    /**
-     * Should be an exception if a typed attribute call is made for a value that cannot
-     * be coerced into the correct type.
-     * This test is a little lame because a RuntimeException could be thrown for some other reason.
-     */
-    @Test(expectedExceptions = RuntimeException.class)
-    public void testGetTypedAttributeUnerflow() throws Exception {
-        final SAMRecord rec = createSamRecord();
-        rec.setAttribute(INTEGER_TAG, Integer.MIN_VALUE);
-        writeAndReadSamRecord("bam", rec);
-        rec.getShortAttribute(INTEGER_TAG);
-        Assert.fail("Exception should have been thrown.");
-    }
-
-    /**
-     * Create a SAMRecord with integer tags of various sizes, write to a file, and read it back.
-     *
-     * @param format "sam" or "bam".
-     * @return The record after having being read from file.
-     */
-    private SAMRecord writeAndReadSamRecord(final String format) throws IOException {
-        SAMRecord rec = createSamRecord();
-        rec.setAttribute(BYTE_TAG, (byte) 1);
-        rec.setAttribute(SHORT_TAG, (short) 1);
-        rec.setAttribute(INTEGER_TAG, 1);
-        rec = writeAndReadSamRecord(format, rec);
-        return rec;
-    }
-
-    /**
-     * Write a SAMRecord to a SAM file in the given format, and read it back.
-     *
-     * @param format "sam" or "bam".
-     * @param rec    The record to write.
-     * @return The same record, after having being written and read back.
-     */
-    private SAMRecord writeAndReadSamRecord(final String format, SAMRecord rec) throws IOException {
-        final File bamFile = File.createTempFile("htsjdk-writeAndReadSamRecord.", "." + format);
-        final SAMFileWriter bamWriter = new SAMFileWriterFactory().makeSAMOrBAMWriter(rec.getHeader(), false, bamFile);
-        bamWriter.addAlignment(rec);
-        bamWriter.close();
-        final SamReader reader = SamReaderFactory.makeDefault().open(bamFile);
-        rec = reader.iterator().next();
-        reader.close();
-        bamFile.delete();
-        return rec;
-    }
-
-    private SAMRecord createSamRecord() {
-        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder(false, SAMFileHeader.SortOrder.unsorted);
-        builder.addFrag("readA", 20, 140, false);
-        return builder.iterator().next();
-    }
-
-    private static SamInputResource createSamForIntAttr(long value) {
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        PrintStream ps = new PrintStream(baos);
-        ps.println("@HD\tVN:1.0");
-        ps.print("1\t4\t*\t0\t0\t*\t*\t0\t0\tA\t<\tUI:i:");
-        ps.println(value);
-        ps.close();
-
-        return new SamInputResource(new InputStreamInputResource(new ByteArrayInputStream(baos.toByteArray())));
-    }
-
-    @Test
-    public void testGoodSamStrict() throws IOException {
-        final SamReaderFactory factory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.STRICT);
-
-        Assert.assertEquals(0, ((Number) factory.open(createSamForIntAttr(0)).iterator().next().getAttribute("UI")).intValue());
-        Assert.assertEquals(-1, ((Number) factory.open(createSamForIntAttr(-1)).iterator().next().getAttribute("UI")).intValue());
-        Assert.assertEquals(Integer.MIN_VALUE, ((Number) factory.open(createSamForIntAttr(Integer.MIN_VALUE)).iterator().next().getAttribute("UI")).intValue());
-        Assert.assertEquals(Integer.MAX_VALUE, ((Number) factory.open(createSamForIntAttr(Integer.MAX_VALUE)).iterator().next().getAttribute("UI")).intValue());
-        Assert.assertEquals(1L + (long) Integer.MAX_VALUE, ((Number) factory.open(createSamForIntAttr(1L + (long) Integer.MAX_VALUE)).iterator().next().getAttribute("UI")).longValue());
-        Assert.assertEquals(BinaryCodec.MAX_UINT, ((Number) factory.open(createSamForIntAttr(BinaryCodec.MAX_UINT)).iterator().next().getAttribute("UI")).longValue());
-    }
-
-    @Test(expectedExceptions = SAMException.class)
-    public void testBadSamStrict() throws IOException {
-        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.STRICT).open(createSamForIntAttr(BinaryCodec.MAX_UINT + 1L));
-        reader.iterator().next();
-    }
-
-    @Test
-    public void testBadSamSilent() throws IOException {
-        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(createSamForIntAttr(BinaryCodec.MAX_UINT + 1L));
-        reader.iterator().next();
-    }
-
-    @DataProvider(name = "legalIntegerAttributesFiles")
-    public Object[][] getLegalIntegerAttributesFiles() {
-        return new Object[][] {
-                { new File(TEST_DATA_DIR, "variousAttributes.sam") },
-                { new File(TEST_DATA_DIR, "variousAttributes.bam") }
-        };
-    }
-
-    @Test(dataProvider = "legalIntegerAttributesFiles")
-    public void testLegalIntegerAttributesFilesStrict( final File inputFile ) {
-        final SamReader reader = SamReaderFactory.makeDefault()
-                .enable(SamReaderFactory.Option.EAGERLY_DECODE)
-                .validationStringency(ValidationStringency.STRICT)
-                .open(inputFile);
-
-        final SAMRecord rec = reader.iterator().next();
-        final Map<String, Number> expectedTags = new HashMap<String, Number>();
-        expectedTags.put("SB", -128);
-        expectedTags.put("UB", 129);
-        expectedTags.put("SS", 32767);
-        expectedTags.put("US", 65535);
-        expectedTags.put("SI", 2147483647);
-        expectedTags.put("I2", -2147483647);
-        expectedTags.put("UI", 4294967295L);
-        for (final Map.Entry<String, Number> entry : expectedTags.entrySet()) {
-            final Object value = rec.getAttribute(entry.getKey());
-            Assert.assertTrue(((Number) value).longValue() == entry.getValue().longValue());
-        }
-        CloserUtil.close(reader);
-    }
-
-    @DataProvider(name = "valid_set")
-    public static Object[][] valid_set() {
-        List<Object[]> params = new ArrayList<Object[]>();
-        for (FORMAT format:FORMAT.values()) {
-            for (ValidationStringency stringency:ValidationStringency.values()) {
-                params.add(new Object[]{0, format, stringency});
-                params.add(new Object[]{1, format, stringency});
-                params.add(new Object[]{-1, format, stringency});
-                params.add(new Object[]{Integer.MIN_VALUE, format, stringency});
-                params.add(new Object[]{Integer.MAX_VALUE, format, stringency});
-
-                params.add(new Object[]{1L, format, stringency});
-                params.add(new Object[]{-1L, format, stringency});
-                params.add(new Object[]{(long)Integer.MAX_VALUE+1L, format, stringency});
-                params.add(new Object[]{BinaryCodec.MAX_UINT, format, stringency});
-            }
-        }
-
-        return params.toArray(new Object[3][params.size()]);
-    }
-
-    @DataProvider(name = "invalid_set")
-    public static Object[][] invalid_set() {
-        List<Object[]> params = new ArrayList<Object[]>();
-        for (FORMAT format:FORMAT.values()) {
-            for (ValidationStringency stringency:ValidationStringency.values()) {
-                params.add(new Object[]{(long)Integer.MIN_VALUE -1L, format, stringency});
-                params.add(new Object[]{TOO_LARGE_UNSIGNED_INT_VALUE, format, stringency});
-            }
-        }
-
-        return params.toArray(new Object[3][params.size()]);
-    }
-
-    @Test(dataProvider = "valid_set")
-    public void testValidIntegerAttributeRoundtrip(final long value, final FORMAT format, ValidationStringency validationStringency) throws IOException {
-        testRoundtripIntegerAttribute(value, format, validationStringency);
-    }
-
-    @Test(dataProvider = "invalid_set", expectedExceptions = RuntimeException.class)
-    public void testInvalidIntegerAttributeRoundtrip(final long value, final FORMAT format, ValidationStringency validationStringency) throws IOException {
-        testRoundtripIntegerAttribute(value, format, validationStringency);
-    }
-
-    private void testRoundtripIntegerAttribute(final Number value, final FORMAT format, ValidationStringency validationStringency) throws IOException {
-        final SAMFileHeader header = new SAMFileHeader();
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-
-        final SAMFileWriter w;
-        switch (format) {
-            case SAM:
-                w = new SAMFileWriterFactory().makeSAMWriter(header, false, baos);
-                break;
-            case BAM:
-                w = new SAMFileWriterFactory().makeBAMWriter(header, false, baos);
-                break;
-            case CRAM:
-                w = new SAMFileWriterFactory().makeCRAMWriter(header, baos, null);
-                break;
-            default:
-                throw new RuntimeException("Unknown format: " + format);
-        }
-
-        final SAMRecord record = new SAMRecord(header);
-        record.setAttribute("UI", value);
-        record.setReadName("1");
-        record.setReadUnmappedFlag(true);
-        record.setReadBases("A".getBytes());
-        record.setBaseQualityString("!");
-        Assert.assertEquals(value, record.getAttribute("UI"));
-
-        w.addAlignment(record);
-        w.close();
-
-        final SamReader reader = SamReaderFactory.make().validationStringency(validationStringency).referenceSource(new ReferenceSource((File)null)).
-                open(SamInputResource.of(new ByteArrayInputStream(baos.toByteArray())));
-        final SAMRecordIterator iterator = reader.iterator();
-        Assert.assertTrue(iterator.hasNext());
-        final SAMRecord record2 = iterator.next();
-        final Number returnedValue = (Number) record2.getAttribute("UI");
-        Assert.assertEquals(value.longValue(), returnedValue.longValue());
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SAMRecordUnitTest.java b/src/tests/java/htsjdk/samtools/SAMRecordUnitTest.java
deleted file mode 100644
index dca9de6..0000000
--- a/src/tests/java/htsjdk/samtools/SAMRecordUnitTest.java
+++ /dev/null
@@ -1,963 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2014 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.BinaryCodec;
-import htsjdk.samtools.util.TestUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.*;
-import java.util.Arrays;
-import java.util.List;
-
-public class SAMRecordUnitTest {
-
-    @DataProvider(name = "serializationTestData")
-    public Object[][] getSerializationTestData() {
-        return new Object[][] {
-                { new File("testdata/htsjdk/samtools/serialization_test.sam") },
-                { new File("testdata/htsjdk/samtools/serialization_test.bam") }
-        };
-    }
-
-    @Test(dataProvider = "serializationTestData")
-    public void testSAMRecordSerialization( final File inputFile ) throws Exception {
-        final SamReader reader = SamReaderFactory.makeDefault().open(inputFile);
-        final SAMRecord initialSAMRecord = reader.iterator().next();
-        reader.close();
-
-        final SAMRecord deserializedSAMRecord = TestUtil.serializeAndDeserialize(initialSAMRecord);
-
-        Assert.assertEquals(deserializedSAMRecord, initialSAMRecord, "Deserialized SAMRecord not equal to original SAMRecord");
-    }
-
-    @DataProvider
-    public Object [][] offsetAtReferenceData() {
-        return new Object[][]{
-                {"3S9M",   7, 10, false},
-                {"3S9M",   0,  0, false},
-                {"3S9M",  -1,  0, false},
-                {"3S9M",  13,  0, false},
-                {"4M1D6M", 4,  4, false},
-                {"4M1D6M", 4,  4, true},
-                {"4M1D6M", 5,  0, false},
-                {"4M1D6M", 5,  4, true},
-                {"4M1I6M", 5,  6, false},
-                {"4M1I6M", 11, 0, false},
-        };
-    }
-
-    @Test(dataProvider = "offsetAtReferenceData")
-    public void testOffsetAtReference(String cigar, int posInReference, int expectedPosInRead, boolean returnLastBaseIfDeleted) {
-
-            SAMRecord sam = new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, cigar, null, 2);
-            Assert.assertEquals(SAMRecord.getReadPositionAtReferencePosition(sam, posInReference, returnLastBaseIfDeleted), expectedPosInRead);
-    }
-
-    @DataProvider
-    public Object [][] referenceAtReadData() {
-        return new Object[][]{
-                {"3S9M", 7, 10},
-                {"3S9M", 0, 0},
-                {"3S9M", 0, 13},
-                {"4M1D6M", 4, 4},
-                {"4M1D6M", 6, 5},
-                {"4M1I6M", 0, 5},
-                {"4M1I6M", 5, 6},
-        };
-    }
-
-    @Test(dataProvider = "referenceAtReadData")
-    public void testOffsetAtRead(String cigar, int expectedReferencePos, int posInRead) {
-
-            SAMRecord sam = new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, cigar, null, 2);
-            Assert.assertEquals(sam.getReferencePositionAtReadPosition(posInRead), expectedReferencePos);
-    }
-
-    @DataProvider(name = "deepCopyTestData")
-    public Object [][] deepCopyTestData() {
-        return new Object[][]{
-                { new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "3S9M", null, 2) },
-                { new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "4M1I6M", null, 2) }
-        };
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepCopyBasic(final SAMRecord sam) {
-        testDeepCopy(sam);
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepCopyCigar(SAMRecord sam) {
-        sam.setCigar(sam.getCigar());
-        final SAMRecord deepCopy = sam.deepCopy();
-        Assert.assertTrue(sam.equals(deepCopy));
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepCopyGetCigarString(SAMRecord sam) {
-        sam.setCigarString(sam.getCigarString());
-        final SAMRecord deepCopy = sam.deepCopy();
-        Assert.assertTrue(sam.equals(deepCopy));
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepCopyGetCigar(final SAMRecord sam)
-    {
-        testDeepCopy(sam);
-        sam.setCigarString(sam.getCigarString());
-        sam.getCigar(); // force cigar elements to be resolved for equals
-        testDeepCopy(sam);
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepCopyMutate(final SAMRecord sam) {
-        final byte[] initialBaseQualityCopy = Arrays.copyOf(sam.getBaseQualities(), sam.getBaseQualities().length);
-        final int initialStart = sam.getAlignmentStart();
-
-        final SAMRecord deepCopy = testDeepCopy(sam);
-        Assert.assertTrue(Arrays.equals(sam.getBaseQualities(), deepCopy.getBaseQualities()));
-        Assert.assertTrue(sam.getAlignmentStart() == deepCopy.getAlignmentStart());
-
-        // mutate copy and make sure original remains unchanged
-        final byte[] copyBaseQuals = deepCopy.getBaseQualities();
-        for (int i = 0; i < copyBaseQuals.length; i++) {
-            copyBaseQuals[i]++;
-        }
-        deepCopy.setBaseQualities(copyBaseQuals);
-        deepCopy.setAlignmentStart(initialStart + 1);
-        Assert.assertTrue(Arrays.equals(sam.getBaseQualities(), initialBaseQualityCopy));
-        Assert.assertTrue(sam.getAlignmentStart() == initialStart);
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepByteAttributes( final SAMRecord sam ) throws Exception {
-        SAMRecord deepCopy = testDeepCopy(sam);
-
-        final byte bytes[] = { -2, -1, 0, 1, 2 };
-        sam.setAttribute("BY", bytes);
-        deepCopy = sam.deepCopy();
-        Assert.assertEquals(sam, deepCopy);
-
-        // validate reference inequality and content equality
-        final byte samBytes[] = sam.getByteArrayAttribute("BY");
-        final byte copyBytes[] = deepCopy.getByteArrayAttribute("BY");
-        Assert.assertFalse(copyBytes == samBytes);
-        Assert.assertTrue(Arrays.equals(copyBytes, samBytes));
-
-        // validate mutation independence
-        final byte testByte = -1;
-        Assert.assertTrue(samBytes[2] != testByte);  // ensure initial test condition
-        Assert.assertTrue(copyBytes[2] != testByte); // ensure initial test condition
-        samBytes[2] = testByte;                      // mutate original
-        Assert.assertTrue(samBytes[2] == testByte);
-        Assert.assertTrue(copyBytes[2] != testByte);
-        sam.setAttribute("BY", samBytes);
-        Assert.assertTrue(sam.getByteArrayAttribute("BY")[2] != deepCopy.getByteArrayAttribute("BY")[2]);
-
-        // now unsigned...
-        sam.setUnsignedArrayAttribute("BY", bytes);
-        deepCopy = sam.deepCopy();
-        Assert.assertEquals(sam, deepCopy);
-        final byte samUBytes[] = sam.getUnsignedByteArrayAttribute("BY");
-        final byte copyUBytes[] = deepCopy.getUnsignedByteArrayAttribute("BY");
-        Assert.assertFalse(copyUBytes == bytes);
-        Assert.assertTrue(Arrays.equals(copyUBytes, samUBytes));
-
-        // validate mutation independence
-        final byte uByte = 1;
-        Assert.assertTrue(samUBytes[2] != uByte); //  ensure initial test condition
-        Assert.assertTrue(samUBytes[2] != uByte); //  ensure initial test condition
-        samUBytes[2] = uByte;  // mutate original
-        Assert.assertTrue(samUBytes[2] == uByte);
-        Assert.assertTrue(copyUBytes[2] != uByte);
-        sam.setUnsignedArrayAttribute("BY", samBytes);
-        Assert.assertTrue(sam.getUnsignedByteArrayAttribute("BY")[2] != deepCopy.getUnsignedByteArrayAttribute("BY")[2]);
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepShortAttributes( final SAMRecord sam ) throws Exception {
-        SAMRecord deepCopy = testDeepCopy(sam);
-
-        final short shorts[] = { -20, -10, 0, 10, 20 };
-        sam.setAttribute("SH", shorts);
-        deepCopy = sam.deepCopy();
-        Assert.assertEquals(sam, deepCopy);
-
-        // validate reference inequality, content equality
-        final short samShorts[] = sam.getSignedShortArrayAttribute("SH");
-        final short copyShorts[] = deepCopy.getSignedShortArrayAttribute("SH");
-        Assert.assertFalse(copyShorts == samShorts);
-        Assert.assertTrue(Arrays.equals(copyShorts, samShorts));
-
-        // validate mutation independence
-        final short testShort = -1;
-        Assert.assertTrue(samShorts[2] != testShort); //  ensure initial test condition
-        Assert.assertTrue(samShorts[2] != testShort); //  ensure initial test condition
-        samShorts[2] = testShort;  // mutate original
-        Assert.assertTrue(samShorts[2] == testShort);
-        Assert.assertTrue(copyShorts[2] != testShort);
-        sam.setAttribute("SH", samShorts);
-        Assert.assertTrue(sam.getSignedShortArrayAttribute("SH")[2] != deepCopy.getSignedShortArrayAttribute("SH")[2]);
-
-        // now unsigned...
-        sam.setUnsignedArrayAttribute("SH", shorts);
-        deepCopy = sam.deepCopy();
-        Assert.assertEquals(sam, deepCopy);
-
-        final short samUShorts[] = sam.getUnsignedShortArrayAttribute("SH");
-        final short copyUShorts[] = deepCopy.getUnsignedShortArrayAttribute("SH");
-        Assert.assertFalse(copyUShorts == shorts);
-        Assert.assertTrue(Arrays.equals(copyUShorts, samUShorts));
-
-        // validate mutation independence
-        final byte uShort = 1;
-        Assert.assertTrue(samUShorts[2] != uShort); //  ensure initial test condition
-        Assert.assertTrue(samUShorts[2] != uShort); //  ensure initial test condition
-        samUShorts[2] = uShort;  // mutate original
-        Assert.assertTrue(samUShorts[2] == uShort);
-        Assert.assertTrue(copyUShorts[2] != uShort);
-        sam.setUnsignedArrayAttribute("SH", samShorts);
-        Assert.assertTrue(sam.getUnsignedShortArrayAttribute("SH")[2] != deepCopy.getUnsignedShortArrayAttribute("SH")[2]);
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepIntAttributes( final SAMRecord sam ) throws Exception {
-        SAMRecord deepCopy = testDeepCopy(sam);
-
-        final int ints[] = { -200, -100, 0, 100, 200 };
-        sam.setAttribute("IN", ints);
-        deepCopy = sam.deepCopy();
-        Assert.assertEquals(sam, deepCopy);
-
-        // validate reference inequality and content equality
-        final  int samInts[] = sam.getSignedIntArrayAttribute("IN");
-        final  int copyInts[] = deepCopy.getSignedIntArrayAttribute("IN");
-        Assert.assertFalse(copyInts == ints);
-        Assert.assertTrue(Arrays.equals(copyInts, samInts));
-
-        // validate mutation independence
-        final short testInt = -1;
-        Assert.assertTrue(samInts[2] != testInt); //  ensure initial test condition
-        Assert.assertTrue(samInts[2] != testInt); //  ensure initial test condition
-        samInts[2] = testInt;  // mutate original
-        Assert.assertTrue(samInts[2] == testInt);
-        Assert.assertTrue(copyInts[2] != testInt);
-        sam.setAttribute("IN", samInts);
-        Assert.assertTrue(sam.getSignedIntArrayAttribute("IN")[2] != deepCopy.getSignedIntArrayAttribute("IN")[2]);
-
-        // now unsigned...
-        sam.setUnsignedArrayAttribute("IN", ints);
-        deepCopy = sam.deepCopy();
-        Assert.assertEquals(sam, deepCopy);
-
-        final int samUInts[] = sam.getUnsignedIntArrayAttribute("IN");
-        final int copyUInts[] = deepCopy.getUnsignedIntArrayAttribute("IN");
-        Assert.assertFalse(copyUInts == ints);
-        Assert.assertTrue(Arrays.equals(copyUInts, samUInts));
-
-        // validate mutation independence
-        byte uInt = 1;
-        Assert.assertTrue(samUInts[2] != uInt); //  ensure initial test condition
-        Assert.assertTrue(samUInts[2] != uInt); //  ensure initial test condition
-        samInts[2] = uInt;  // mutate original
-        Assert.assertTrue(samUInts[2] == uInt);
-        Assert.assertTrue(copyUInts[2] != uInt);
-        sam.setUnsignedArrayAttribute("IN", samInts);
-        Assert.assertTrue(sam.getUnsignedIntArrayAttribute("IN")[2] != deepCopy.getUnsignedIntArrayAttribute("IN")[2]);
-    }
-
-    @Test(dataProvider = "deepCopyTestData")
-    public void testDeepFloatAttributes( final SAMRecord sam ) throws Exception {
-        SAMRecord deepCopy = testDeepCopy(sam);
-
-        final float floats[] = { -2.4f, -1.2f, 0, 2.3f, 4.6f };
-        sam.setAttribute("FL", floats);
-        deepCopy = sam.deepCopy();
-        Assert.assertEquals(sam, deepCopy);
-
-        // validate reference inequality and content equality
-        final float samFloats[] = sam.getFloatArrayAttribute("FL");
-        final float copyFloats[] = deepCopy.getFloatArrayAttribute("FL");
-        Assert.assertFalse(copyFloats == floats);
-        Assert.assertFalse(copyFloats == samFloats);
-        Assert.assertTrue(Arrays.equals(copyFloats, samFloats));
-
-        // validate mutation independence
-        final float testFloat = -1.0f;
-        Assert.assertTrue(samFloats[2] != testFloat); //  ensure initial test condition
-        Assert.assertTrue(samFloats[2] != testFloat); //  ensure initial test condition
-        samFloats[2] = testFloat;  // mutate original
-        Assert.assertTrue(samFloats[2] == testFloat);
-        Assert.assertTrue(copyFloats[2] != testFloat);
-        sam.setAttribute("FL", samFloats);
-        Assert.assertTrue(sam.getFloatArrayAttribute("FL")[2] != deepCopy.getFloatArrayAttribute("FL")[2]);
-    }
-
-    private SAMRecord testDeepCopy(SAMRecord sam) {
-        final SAMRecord deepCopy = sam.deepCopy();
-        Assert.assertTrue(sam.equals(deepCopy));
-        return deepCopy;
-    }
-
-    @Test
-    public void test_getUnsignedIntegerAttribute_valid() {
-        final String stringTag = "UI";
-        final short binaryTag = SAMTagUtil.getSingleton().makeBinaryTag(stringTag);
-        SAMFileHeader header = new SAMFileHeader();
-        SAMRecord record = new SAMRecord(header);
-        Assert.assertNull(record.getUnsignedIntegerAttribute(stringTag));
-        Assert.assertNull(record.getUnsignedIntegerAttribute(binaryTag));
-
-        record.setAttribute("UI", (long) 0L);
-        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(stringTag));
-        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(binaryTag));
-
-        record.setAttribute("UI", BinaryCodec.MAX_UINT);
-        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(stringTag));
-        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(binaryTag));
-
-        final SAMBinaryTagAndValue tv_zero = new SAMBinaryTagAndValue(binaryTag, 0L);
-        record = new SAMRecord(header){
-            {
-                setAttributes(tv_zero);
-            }
-        };
-        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(stringTag));
-        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(binaryTag));
-
-        final SAMBinaryTagAndValue tv_max = new SAMBinaryTagAndValue(binaryTag, BinaryCodec.MAX_UINT);
-        record = new SAMRecord(header){
-            {
-                setAttributes(tv_max);
-            }
-        };
-        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(stringTag));
-        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(binaryTag));
-    }
-
-    /**
-     * This is an alternative to test_getUnsignedIntegerAttribute_valid().
-     * This is required for testing invalid (out of range) unsigned integer value.
-     */
-    @Test
-    public void test_getUnsignedIntegerAttribute_valid_alternative() {
-        final short tag = SAMTagUtil.getSingleton().makeBinaryTag("UI");
-        SAMFileHeader header = new SAMFileHeader();
-        SAMRecord record;
-
-        record = new SAMRecord(header);
-        record.setAttribute("UI", 0L);
-        Assert.assertEquals(new Long(0L), record.getUnsignedIntegerAttribute(tag));
-
-        record = new SAMRecord(header);
-        record.setAttribute("UI", BinaryCodec.MAX_UINT);
-        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute("UI"));
-    }
-
-    @Test(expectedExceptions = SAMException.class)
-    public void test_getUnsignedIntegerAttribute_negative() {
-        SAMFileHeader header = new SAMFileHeader();
-        SAMRecord record = new SAMRecord(header);
-        record.setAttribute("UI", -1L);
-        record.getUnsignedIntegerAttribute("UI");
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void test_setUnsignedIntegerAttributeTooLarge() {
-        SAMFileHeader header = new SAMFileHeader();
-        SAMRecord record = new SAMRecord(header);
-        record.setAttribute("UI", BinaryCodec.MAX_UINT + 1);
-    }
-
-    // NOTE: SAMRecord.asAllowedAttribute is deprecated, as it has been moved into
-    // SAMBinaryTagAndValue, but we'll leave this test here until the code is removed.
-    @Test
-    public void test_isAllowedAttributeDataType() {
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Byte((byte) 0)));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Short((short) 0)));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Integer(0)));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue("a string"));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Character('C')));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Float(0.1F)));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new byte[]{0}));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new short[]{0}));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new int[]{0}));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new float[]{0.1F}));
-
-        // unsigned integers:
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Long(0)));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Long(BinaryCodec.MAX_UINT)));
-        Assert.assertTrue(SAMRecord.isAllowedAttributeValue(new Long(-1L)));
-        Assert.assertFalse(SAMRecord.isAllowedAttributeValue(new Long(BinaryCodec.MAX_UINT + 1L)));
-        Assert.assertFalse(SAMRecord.isAllowedAttributeValue(new Long(Integer.MIN_VALUE - 1L)));
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void test_setAttribute_unsigned_int_negative() {
-        SAMFileHeader header = new SAMFileHeader();
-        SAMRecord record = null;
-        record = new SAMRecord(header);
-        Assert.assertNull(record.getUnsignedIntegerAttribute("UI"));
-        record.setAttribute("UI", (long) Integer.MIN_VALUE - 1L);
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void test_setAttribute_unsigned_int_tooLarge() {
-        SAMFileHeader header = new SAMFileHeader();
-        SAMRecord record = new SAMRecord(header);
-        Assert.assertNull(record.getUnsignedIntegerAttribute("UI"));
-        record.setAttribute("UI", (long) BinaryCodec.MAX_UINT + 1L);
-    }
-
-    @Test
-    public void test_setAttribute_null_removes_tag() {
-        final short tag = SAMTagUtil.getSingleton().makeBinaryTag("UI");
-        SAMFileHeader header = new SAMFileHeader();
-        SAMRecord record = new SAMRecord(header);
-        Assert.assertNull(record.getUnsignedIntegerAttribute(tag));
-
-        record.setAttribute(tag, BinaryCodec.MAX_UINT);
-        Assert.assertEquals(new Long(BinaryCodec.MAX_UINT), record.getUnsignedIntegerAttribute(tag));
-
-        record.setAttribute(tag, null);
-        Assert.assertNull(record.getUnsignedIntegerAttribute(tag));
-    }
-
-    private SAMRecord createTestRecordHelper() {
-        return new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "3S9M", null, 2);
-    }
-
-    @Test
-    public void testReferenceName() {
-        SAMRecord sam = createTestRecordHelper();
-
-        // NO_ALIGNMENT_NAME
-        sam.setReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
-        Assert.assertTrue(sam.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
-        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-
-        // valid reference name
-        sam = createTestRecordHelper();
-        sam.setReferenceName("chr4");
-        Assert.assertTrue(sam.getReferenceName().equals("chr4"));
-        Assert.assertTrue(sam.getReferenceIndex().equals(3));
-
-        // invalid reference name sets name but leaves ref index invalid
-        sam = createTestRecordHelper();
-        sam.setReferenceName("unresolvableName");
-        Assert.assertTrue(sam.getReferenceName().equals("unresolvableName"));
-        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-    }
-
-    @Test
-    public void testReferenceIndex() {
-        // NO_ALIGNMENT_REFERENCE
-        SAMRecord sam = createTestRecordHelper();
-        sam.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-        Assert.assertTrue(sam.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
-
-        // valid reference
-        sam = createTestRecordHelper();
-        sam.setReferenceIndex(3);
-        Assert.assertTrue(sam.getReferenceIndex().equals(3));
-        Assert.assertTrue(sam.getReferenceName().equals("chr4"));
-    }
-
-    @Test(expectedExceptions=IllegalArgumentException.class)
-    public void testInvalidReferenceIndex() {
-        // unresolvable reference
-        final SAMRecord sam = createTestRecordHelper();
-        sam.setReferenceIndex(9999);
-    }
-
-    @Test
-    public void testMateReferenceName() {
-        // NO_ALIGNMENT_NAME
-        SAMRecord sam = createTestRecordHelper();
-        sam.setMateReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
-        Assert.assertTrue(sam.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
-        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-
-        // valid reference
-        sam = createTestRecordHelper();
-        sam.setMateReferenceName("chr4");
-        Assert.assertTrue(sam.getMateReferenceName().equals("chr4"));
-        Assert.assertTrue(sam.getMateReferenceIndex().equals(3));
-
-        // unresolvable reference
-        sam = createTestRecordHelper();
-        sam.setMateReferenceName("unresolvableName");
-        Assert.assertTrue(sam.getMateReferenceName().equals("unresolvableName"));
-        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-    }
-
-    @Test
-    public void testMateReferenceIndex() {
-        // NO_ALIGNMENT_REFERENCE
-        SAMRecord sam = createTestRecordHelper();
-        sam.setMateReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-        Assert.assertTrue(sam.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
-
-        // valid reference
-        sam = createTestRecordHelper();
-        sam.setMateReferenceIndex(3);
-        Assert.assertTrue(sam.getMateReferenceIndex().equals(3));
-        Assert.assertTrue(sam.getMateReferenceName().equals("chr4"));
-    }
-
-    @Test(expectedExceptions=IllegalArgumentException.class)
-    public void testInvalidMateReferenceIndex() {
-        // unresolvable reference
-        final SAMRecord sam = createTestRecordHelper();
-        sam.setMateReferenceIndex(9999);
-    }
-
-    @Test
-    public void testRecordValidation() {
-        final SAMRecord sam = createTestRecordHelper();
-        List<SAMValidationError> validationErrors = sam.isValid(false);
-        Assert.assertTrue(validationErrors == null);
-    }
-
-    @Test
-    public void testInvalidAlignmentStartValidation() {
-        final SAMRecord sam = createTestRecordHelper();
-        sam.setAlignmentStart(0);
-        List<SAMValidationError> validationErrors = sam.isValid(false);
-        Assert.assertTrue(validationErrors != null && validationErrors.size() == 1);
-    }
-
-    // ----------------- NULL header tests ---------------------
-
-    @Test
-    public void testNullHeaderReferenceName() {
-        final SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-        Assert.assertTrue(null != samHeader);
-        final String originalRefName = sam.getReferenceName();
-
-        // setting header to null retains the previously assigned ref name
-        sam.setHeader(null);
-        Assert.assertTrue(originalRefName.equals(sam.getReferenceName()));
-
-        // null header allows reference name to be set to NO_ALIGNMENT_REFERENCE_NAME
-        sam.setReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
-        Assert.assertTrue(sam.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
-        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-
-        // null header allows reference name to be reset to a valid namw
-        sam.setReferenceName(originalRefName);
-        Assert.assertTrue(sam.getReferenceName().equals(originalRefName));
-    }
-
-    @Test
-    public void testNullHeaderReferenceIndex() {
-        SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-        int originalRefIndex = sam.getReferenceIndex();
-        Assert.assertTrue(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX != originalRefIndex);
-
-        // setting header to null resets the reference index to null
-        sam.setHeader(null);
-        Assert.assertTrue(null == sam.mReferenceIndex);
-        // restoring the header to restores the reference index back to the original
-        sam.setHeader(samHeader);
-        Assert.assertTrue(sam.getReferenceIndex().equals(originalRefIndex));
-
-        // setting the header to null allows setting the reference index to NO_ALIGNMENT_REFERENCE_INDEX
-        sam.setHeader(null);
-        sam.setReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-        Assert.assertTrue(sam.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-        Assert.assertTrue(sam.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
-
-        // force the internal SAMRecord reference index value to (null) initial state
-        sam = new SAMRecord(null);
-        Assert.assertTrue(null == sam.mReferenceIndex);
-        Assert.assertTrue(sam.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-
-        // an unresolvable reference name doesn't throw
-        final String unresolvableRefName = "unresolvable";
-        sam.setReferenceName(unresolvableRefName);
-        // now force the SAMRecord to try to resolve the unresolvable name
-        sam.setHeader(samHeader);
-        Assert.assertTrue(null == sam.mReferenceIndex);
-        Assert.assertTrue(sam.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testNullHeaderSetReferenceIndex() {
-        final SAMRecord sam = createTestRecordHelper();
-        sam.setHeader(null);
-        // setReferenceIndex with null header throws
-        sam.setReferenceIndex(3);
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testNullHeaderGetReferenceIndex() {
-        final SAMRecord sam = createTestRecordHelper();
-        sam.setHeader(null);
-        // getReferenceIndex with null header throws
-        sam.getReferenceIndex();
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testNullHeaderForceIndexResolutionFailure() {
-        // force the internal SAMRecord reference index value to null initial state
-        final SAMRecord sam = new SAMRecord(null);
-        sam.setReferenceName("unresolvable");
-        sam.getReferenceIndex();
-    }
-
-    @Test
-    public void testNullHeaderMateReferenceName() {
-        final SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-        Assert.assertTrue(null != samHeader);
-        final String originalMateRefName = sam.getMateReferenceName();
-
-        // setting header to null retains the previously assigned mate ref name
-        sam.setHeader(null);
-        Assert.assertTrue(originalMateRefName.equals(sam.getMateReferenceName()));
-
-        // null header allows mate reference name to be set to NO_ALIGNMENT_REFERENCE_NAME
-        sam.setMateReferenceName(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
-        Assert.assertTrue(sam.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
-        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-
-        // null header allows reference name to be reset to a valid namw
-        sam.setMateReferenceName(originalMateRefName);
-        Assert.assertTrue(sam.getMateReferenceName().equals(originalMateRefName));
-    }
-
-    @Test
-    public void testNullHeaderMateReferenceIndex() {
-        SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-        sam.setMateReferenceName("chr1");
-        int originalMateRefIndex = sam.getMateReferenceIndex();
-        Assert.assertTrue(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX != originalMateRefIndex);
-
-        // setting header to null resets the mate reference index to null
-        sam.setHeader(null);
-        Assert.assertTrue(null == sam.mMateReferenceIndex);
-        // restoring the header to restores the reference index back to the original
-        sam.setHeader(samHeader);
-        Assert.assertTrue(sam.getMateReferenceIndex().equals(originalMateRefIndex));
-
-        // setting the header to null allows setting the mate reference index to NO_ALIGNMENT_REFERENCE_INDEX
-        sam.setHeader(null);
-        sam.setMateReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-        Assert.assertTrue(sam.getMateReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-        Assert.assertTrue(sam.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME));
-
-        // force the internal SAMRecord mate reference index value to (null) initial state
-        sam = new SAMRecord(null);
-        Assert.assertTrue(null == sam.mMateReferenceIndex);
-        Assert.assertTrue(sam.getMateReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-
-        // an unresolvable mate reference name doesn't throw
-        final String unresolvableRefName = "unresolvable";
-        sam.setMateReferenceName(unresolvableRefName);
-        // now force the SAMRecord to try to resolve the unresolvable mate reference name
-        sam.setHeader(samHeader);
-        Assert.assertTrue(null == sam.mMateReferenceIndex);
-        Assert.assertTrue(sam.getMateReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testNullHeaderSetMateReferenceIndex() {
-        final SAMRecord sam = createTestRecordHelper();
-        sam.setHeader(null);
-        sam.setMateReferenceIndex(3);
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testNullHeaderGetMateReferenceIndex() {
-        final SAMRecord sam = createTestRecordHelper();
-        sam.setMateReferenceName("chr1");
-        sam.setHeader(null);
-        // getMateReferenceIndex with null header throws
-        sam.getMateReferenceIndex();
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testNullHeaderForceMateIndexResolutionFailure() {
-        // force the internal SAMRecord reference index value to null initial state
-        final SAMRecord sam = new SAMRecord(null);
-        sam.setMateReferenceName("unresolvable");
-        sam.getMateReferenceIndex();
-    }
-
-    @Test
-    public void testNullHeaderGetReadGroup() {
-        final SAMRecord sam = createTestRecordHelper();
-        Assert.assertTrue(null != sam.getHeader());
-
-        Assert.assertTrue(null != sam.getReadGroup() && sam.getReadGroup().getId().equals("1"));
-        sam.setHeader(null);
-        Assert.assertNull(sam.getReadGroup());
-    }
-
-    @Test(dataProvider = "serializationTestData")
-    public void testNullHeaderSerialization(final File inputFile) throws Exception {
-        final SamReader reader = SamReaderFactory.makeDefault().open(inputFile);
-        final SAMRecord initialSAMRecord = reader.iterator().next();
-        reader.close();
-
-        initialSAMRecord.setHeader(null);
-        final SAMRecord deserializedSAMRecord = TestUtil.serializeAndDeserialize(initialSAMRecord);
-        Assert.assertEquals(deserializedSAMRecord, initialSAMRecord, "Deserialized SAMRecord not equal to original SAMRecord");
-    }
-
-
-    @Test
-    public void testValidateNonsenseCigar(){
-        // Create nonsense record
-        SAMRecord rec = createTestRecordHelper();
-        rec.setCigarString("nonsense");
-
-        //The default validationStringency of a sam record is SILENT.
-        rec.setValidationStringency(ValidationStringency.STRICT);
-        // Validate record
-        List<SAMValidationError> err = rec.validateCigar(-1);
-
-        Assert.assertNotNull(err);
-        Assert.assertEquals(err.size(), 1);
-        Assert.assertEquals(err.get(0).getType(), SAMValidationError.Type.INVALID_CIGAR);
-    }
-
-    @Test
-    public void testNullHeaderRecordValidation() {
-        final SAMRecord sam = createTestRecordHelper();
-        sam.setHeader(null);
-        List<SAMValidationError> validationErrors = sam.isValid(false);
-        Assert.assertTrue(validationErrors == null);
-    }
-
-    @Test
-    private void testNullHeaderDeepCopy() {
-        SAMRecord sam = createTestRecordHelper();
-        sam.setHeader(null);
-        final SAMRecord deepCopy = sam.deepCopy();
-
-        Assert.assertTrue(sam.equals(deepCopy));
-    }
-
-    private void testNullHeaderCigar(SAMRecord rec) {
-        Cigar origCigar = rec.getCigar();
-        Assert.assertNotNull(origCigar);
-        String originalCigarString = rec.getCigarString();
-
-        // set the cigar to null and then reset the cigar string in order to force getCigar to decode it
-        rec.setCigar(null);
-        Assert.assertNull(rec.getCigar());
-        rec.setCigarString(originalCigarString);
-        rec.setValidationStringency(ValidationStringency.STRICT);
-        rec.setHeader(null);
-        Assert.assertTrue(rec.getValidationStringency() == ValidationStringency.STRICT);
-
-        // force getCigar to decode the cigar string, validate that SAMRecord doesn't try to validate the cigar
-        Cigar cig = rec.getCigar();
-        Assert.assertNotNull(cig);
-        String cigString = TextCigarCodec.encode(cig);
-        Assert.assertEquals(cigString, originalCigarString);
-    }
-
-    @Test
-    private void testNullHeadGetCigarSAM() {
-        SAMRecord sam = createTestRecordHelper();
-        testNullHeaderCigar(sam);
-    }
-
-    @Test
-    private void testNullHeadGetCigarBAM() {
-        SAMRecord sam = createTestRecordHelper();
-        SAMRecordFactory factory = new DefaultSAMRecordFactory();
-        BAMRecord bamRec = factory.createBAMRecord(
-                sam.getHeader(),
-                sam.getReferenceIndex(),
-                sam.getAlignmentStart(),
-                (short) sam.getReadNameLength(),
-                (short) sam.getMappingQuality(),
-                0,
-                sam.getCigarLength(),
-                sam.getFlags(),
-                sam.getReadLength(),
-                sam.getMateReferenceIndex(),
-                sam.getMateAlignmentStart(),
-                0, null);
-
-        bamRec.setCigarString(sam.getCigarString());
-
-        testNullHeaderCigar(bamRec);
-    }
-
-    @Test
-    public void testSetHeaderStrictValid() {
-        SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-        Integer originalRefIndex = sam.getReferenceIndex();
-        Assert.assertTrue(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX != originalRefIndex);
-
-        // force re-resolution of the reference name
-        sam.setHeaderStrict(samHeader);
-        Assert.assertEquals(sam.getReferenceIndex(), originalRefIndex);
-    }
-
-    @Test
-    public void testSetHeaderStrictValidHeaderless() {
-        SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-        Integer originalRefIndex = sam.getReferenceIndex();
-        Assert.assertTrue(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX != originalRefIndex);
-
-        sam.setHeader(null);
-        // force re-resolution of the reference name
-        sam.setHeaderStrict(samHeader);
-        Assert.assertEquals(sam.getReferenceIndex(), originalRefIndex);
-    }
-
-    @Test
-    public void testSetHeaderStrictValidNewHeader() {
-        final SAMRecord sam = createTestRecordHelper();
-        final String origSequenceName = sam.getContig();
-
-        final SAMFileHeader origSamHeader = sam.getHeader();
-        final int origSequenceLength = origSamHeader.getSequence(origSequenceName).getSequenceLength();
-        final SAMFileHeader newHeader = new SAMFileHeader();
-        newHeader.addSequence(new SAMSequenceRecord(origSequenceName, origSequenceLength));
-
-        // force re-resolution of the reference name against the new header
-        sam.setHeaderStrict(newHeader);
-        Assert.assertEquals(sam.getReferenceIndex(), new Integer(0));
-    }
-
-    @Test(expectedExceptions=IllegalArgumentException.class)
-    public void testSetHeaderStrictInvalidReference() {
-        SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-
-        sam.setReferenceName("unresolvable");
-        Assert.assertEquals(new Integer(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX), sam.getReferenceIndex());
-
-        // throw on force re-resolution of the unresolvable reference name
-        sam.setHeaderStrict(samHeader);
-    }
-
-    @Test(expectedExceptions=IllegalArgumentException.class)
-    public void testSetHeaderStrictInvalidMateReference() {
-        SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-
-        sam.setMateReferenceName("unresolvable");
-        Assert.assertEquals(new Integer(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX), sam.getMateReferenceIndex());
-
-        // throw on force re-resolution of the unresolvable mate reference name
-        sam.setHeaderStrict(samHeader);
-    }
-
-    @Test
-    public void testSetHeaderStrictNull() {
-        SAMRecord sam = createTestRecordHelper();
-        Assert.assertNotNull(sam.getHeader());
-        sam.setHeaderStrict(null);
-        Assert.assertNull(sam.getHeader());
-        Assert.assertNull(sam.mReferenceIndex);
-    }
-
-    // resolveIndexFromName
-
-    @Test
-    public void testResolveIndexResolvable() {
-        final SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-        final String contigName = sam.getContig();
-        Assert.assertEquals(SAMRecord.resolveIndexFromName(contigName, samHeader, true), new Integer(samHeader.getSequenceIndex(contigName)));
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testResolveIndexUnresolvableNullHeader() {
-        SAMRecord.resolveIndexFromName("unresolvable", null, false);
-    }
-
-    @Test(expectedExceptions=IllegalArgumentException.class)
-    public void testResolveIndexUnresolvableStrict() {
-        final SAMFileHeader samHeader = new SAMFileHeader();
-        SAMRecord.resolveIndexFromName("unresolvable", samHeader, true);
-    }
-
-    @Test
-    public void testResolveIndexUnresolvableNotStrict() {
-        final SAMFileHeader samHeader = new SAMFileHeader();
-        Assert.assertEquals(SAMRecord.resolveIndexFromName("unresolvable", samHeader, false), null);
-    }
-
-    @Test
-    public void testResolveIndexNoAlignment() {
-        final SAMFileHeader samHeader = new SAMFileHeader();
-        Assert.assertEquals(SAMRecord.resolveIndexFromName(
-                SAMRecord.NO_ALIGNMENT_REFERENCE_NAME, samHeader, true), new Integer(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX));
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testResolveIndexNullHeader() {
-        SAMRecord.resolveIndexFromName("unresolvable", null, true);
-    }
-
-    // resolveNameFromIndex
-
-    @Test
-    public void testResolveNameResolvable() {
-        final SAMRecord sam = createTestRecordHelper();
-        final SAMFileHeader samHeader = sam.getHeader();
-        final String contigName = sam.getContig();
-        final Integer contigIndex = samHeader.getSequenceIndex(contigName);
-        Assert.assertEquals(SAMRecord.resolveNameFromIndex(contigIndex, samHeader), contigName);
-    }
-
-    @Test(expectedExceptions=IllegalArgumentException.class)
-    public void testResolveNameUnresolvable() {
-        final SAMFileHeader samHeader = new SAMFileHeader();
-        SAMRecord.resolveNameFromIndex(99, samHeader);
-    }
-
-    @Test
-    public void testResolveNameNoAlignment() {
-        final SAMFileHeader samHeader = new SAMFileHeader();
-        Assert.assertEquals(SAMRecord.resolveNameFromIndex(
-                SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX, samHeader), SAMRecord.NO_ALIGNMENT_REFERENCE_NAME);
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void testResolveNameNullHeader() {
-        SAMRecord.resolveNameFromIndex(1, null);
-    }
-
-}
\ No newline at end of file
diff --git a/src/tests/java/htsjdk/samtools/SAMTextWriterTest.java b/src/tests/java/htsjdk/samtools/SAMTextWriterTest.java
deleted file mode 100644
index 46ce5be..0000000
--- a/src/tests/java/htsjdk/samtools/SAMTextWriterTest.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-
-public class SAMTextWriterTest {
-
-    private SAMRecordSetBuilder getSAMReader(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder) {
-        final SAMRecordSetBuilder ret = new SAMRecordSetBuilder(sortForMe, sortOrder);
-        ret.addPair("readB", 20, 200, 300);
-        ret.addPair("readA", 20, 100, 150);
-        ret.addFrag("readC", 20, 140, true);
-        ret.addFrag("readD", 20, 140, false);
-        return ret;
-    }
-
-    @Test
-    public void testBasic() throws Exception {
-        doTest(getSAMReader(true, SAMFileHeader.SortOrder.coordinate));
-    }
-
-    @Test
-    public void testNullHeader() throws Exception {
-        final SAMRecordSetBuilder recordSetBuilder = getSAMReader(true, SAMFileHeader.SortOrder.coordinate);
-        for (final SAMRecord rec : recordSetBuilder.getRecords()) {
-            rec.setHeader(null);
-        }
-        doTest(recordSetBuilder);
-    }
-
-    private void doTest(final SAMRecordSetBuilder recordSetBuilder) throws Exception{
-        SamReader inputSAM = recordSetBuilder.getSamReader();
-        final File samFile = File.createTempFile("tmp.", ".sam");
-        samFile.deleteOnExit();
-        final Map<String, Object> tagMap = new HashMap<String, Object>();
-        tagMap.put("XC", new Character('q'));
-        tagMap.put("XI", 12345);
-        tagMap.put("XF", 1.2345f);
-        tagMap.put("XS", "Hi,Mom!");
-        for (final Map.Entry<String, Object> entry : tagMap.entrySet()) {
-            inputSAM.getFileHeader().setAttribute(entry.getKey(), entry.getValue().toString());
-        }
-        final SAMFileWriter samWriter = new SAMFileWriterFactory().makeSAMWriter(inputSAM.getFileHeader(), false, samFile);
-        for (final SAMRecord samRecord : inputSAM) {
-            samWriter.addAlignment(samRecord);
-        }
-        samWriter.close();
-
-        // Read it back in and confirm that it matches the input
-        inputSAM = recordSetBuilder.getSamReader();
-        // Stuff in the attributes again since this has been created again.
-        for (final Map.Entry<String, Object> entry : tagMap.entrySet()) {
-            inputSAM.getFileHeader().setAttribute(entry.getKey(), entry.getValue().toString());
-        }
-
-        final SamReader newSAM = SamReaderFactory.makeDefault().open(samFile);
-        Assert.assertEquals(newSAM.getFileHeader(), inputSAM.getFileHeader());
-        final Iterator<SAMRecord> inputIt = inputSAM.iterator();
-        final Iterator<SAMRecord> newSAMIt = newSAM.iterator();
-        while (inputIt.hasNext()) {
-            Assert.assertTrue(newSAMIt.hasNext());
-            final SAMRecord inputSAMRecord = inputIt.next();
-            final SAMRecord newSAMRecord = newSAMIt.next();
-
-            // Force reference index attributes to be populated
-            inputSAMRecord.getReferenceIndex();
-            newSAMRecord.getReferenceIndex();
-            inputSAMRecord.getMateReferenceIndex();
-            newSAMRecord.getMateReferenceIndex();
-
-            // Force these to be equal
-            newSAMRecord.setIndexingBin(inputSAMRecord.getIndexingBin());
-
-            Assert.assertEquals(newSAMRecord, inputSAMRecord);
-        }
-        Assert.assertFalse(newSAMIt.hasNext());
-        inputSAM.close();
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SamFileHeaderMergerTest.java b/src/tests/java/htsjdk/samtools/SamFileHeaderMergerTest.java
deleted file mode 100644
index b241ec3..0000000
--- a/src/tests/java/htsjdk/samtools/SamFileHeaderMergerTest.java
+++ /dev/null
@@ -1,265 +0,0 @@
-/**
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- **/
-
-
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.SequenceUtil;
-import htsjdk.samtools.util.StringUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import static org.testng.Assert.assertEquals;
-
-
-/**
- * @author aaron
- * @version 1.0
- * @date May 20, 2009
- * <p/>
- * Class SamFileHeaderMergerTest
- * <p/>
- * Tests the ability of the SamFileHeaderMerger class to merge sequence dictionaries.
- */
-public class SamFileHeaderMergerTest {
-
-    private static File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    /** tests that if we've set the merging to false, we get a SAMException for bam's with different dictionaries. */
-    @Test(expectedExceptions = SequenceUtil.SequenceListsDifferException.class)
-    public void testMergedException() {
-        File INPUT[] = {new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome1to10.bam"),
-                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome5to9.bam")};
-        final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
-        for (final File inFile : INPUT) {
-            IOUtil.assertFileIsReadable(inFile);
-            headers.add(SamReaderFactory.makeDefault().getFileHeader(inFile));
-        }
-        new SamFileHeaderMerger(SAMFileHeader.SortOrder.unsorted, headers, false);
-    }
-
-    /** Tests that we can successfully merge two files with */
-    @Test
-    public void testMerging() {
-        File INPUT[] = {new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome1to10.bam"),
-                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/Chromosome5to9.bam")};
-        final List<SamReader> readers = new ArrayList<SamReader>();
-        final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
-        for (final File inFile : INPUT) {
-            IOUtil.assertFileIsReadable(inFile);
-            // We are now checking for zero-length reads, so suppress complaint about that.
-            final SamReader in = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(inFile);
-
-            readers.add(in);
-            headers.add(in.getFileHeader());
-        }
-        final MergingSamRecordIterator iterator;
-        final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.unsorted, headers, true);
-        iterator = new MergingSamRecordIterator(headerMerger, readers, false);
-        headerMerger.getMergedHeader();
-
-        // count the total reads, and record read counts for each sequence
-        Map<Integer, Integer> seqCounts = new HashMap<Integer, Integer>();
-        int totalCount = 0;
-
-        while (iterator.hasNext()) {
-            SAMRecord r = iterator.next();
-            if (seqCounts.containsKey(r.getReferenceIndex())) {
-                seqCounts.put(r.getReferenceIndex(), seqCounts.get(r.getReferenceIndex()) + 1);
-            } else {
-                seqCounts.put(r.getReferenceIndex(), 1);
-            }
-            ++totalCount;
-        }
-        assertEquals(totalCount, 1500);
-        for (Integer i : seqCounts.keySet()) {
-            if (i < 4 || i > 8) {
-                // seqeunce 5 - 9 should have 200 reads (indices 4 - 8)
-                assertEquals(seqCounts.get(i).intValue(), 100);
-            } else {
-                // the others should have 100
-                assertEquals(seqCounts.get(i).intValue(), 200);
-            }
-        }
-        CloserUtil.close(readers);
-    }
-
-    private static final String sq1 = "@SQ\tSN:chr1\tLN:1000\n";
-    private static final String sq2 = "@SQ\tSN:chr2\tLN:1000\n";
-    private static final String sq3 = "@SQ\tSN:chr3\tLN:1000\n";
-    private static final String sq4 = "@SQ\tSN:chr4\tLN:1000\n";
-    private static final String sq5 = "@SQ\tSN:chr5\tLN:1000\n";
-
-    @Test
-    public void testSequenceDictionaryMerge() {
-        final String sd1 = sq1 + sq2 + sq5;
-        final String sd2 = sq2 + sq3 + sq4;
-        SamReader reader1 = SamReaderFactory.makeDefault().open(SamInputResource.of(new ByteArrayInputStream(StringUtil.stringToBytes(sd1))));
-        SamReader reader2 = SamReaderFactory.makeDefault().open(SamInputResource.of(new ByteArrayInputStream(StringUtil.stringToBytes(sd2))));
-        final List<SAMFileHeader> inputHeaders = Arrays.asList(reader1.getFileHeader(), reader2.getFileHeader());
-        SamFileHeaderMerger merger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, inputHeaders, true);
-        final SAMFileHeader mergedHeader = merger.getMergedHeader();
-        for (final SAMFileHeader inputHeader : inputHeaders) {
-            int prevTargetIndex = -1;
-            for (final SAMSequenceRecord sequenceRecord : inputHeader.getSequenceDictionary().getSequences()) {
-                final int targetIndex = mergedHeader.getSequenceIndex(sequenceRecord.getSequenceName());
-                Assert.assertNotSame(targetIndex, -1);
-                Assert.assertTrue(prevTargetIndex < targetIndex);
-                prevTargetIndex = targetIndex;
-            }
-        }
-        CloserUtil.close(reader1);
-        CloserUtil.close(reader2);
-    }
-
-    @Test(dataProvider = "data")
-    public void testProgramGroupAndReadGroupMerge(File inputFiles[], File expectedOutputFile) throws IOException {
-
-        BufferedReader reader = new BufferedReader(new FileReader(expectedOutputFile));
-
-        String line;
-        String expected_output = "";
-        while ((line = reader.readLine()) != null) {
-            expected_output += line + "\n";
-        }
-
-        final List<SamReader> readers = new ArrayList<SamReader>();
-        final List<SAMFileHeader> headers = new ArrayList<SAMFileHeader>();
-        for (final File inFile : inputFiles) {
-            IOUtil.assertFileIsReadable(inFile);
-
-            // We are now checking for zero-length reads, so suppress complaint about that.
-            final SamReader in = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(inFile);
-            readers.add(in);
-            headers.add(in.getFileHeader());
-        }
-        final MergingSamRecordIterator iterator;
-
-        final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, headers, true);
-        iterator = new MergingSamRecordIterator(headerMerger, readers, false);
-
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        SAMFileWriter writer = new SAMFileWriterFactory().makeSAMWriter(headerMerger.getMergedHeader(), true, baos);
-        while (iterator.hasNext()) {
-            writer.addAlignment(iterator.next());
-        }
-        writer.close();
-
-        String actual_output = StringUtil.bytesToString(baos.toByteArray());
-
-        List<String> actual = Arrays.asList(actual_output.split("\\n"));
-        List<String> expected = Arrays.asList(expected_output.split("\\n"));
-        for (int i = 0; i < expected.size(); i++) {
-            if (expected.get(i).startsWith("@")) {
-                Assert.assertTrue(headersEquivalent(actual.get(i), expected.get(i)));
-            } else {
-                List<String> expectedSamParts = Arrays.asList(expected.get(i).split("\\s*"));
-                List<String> actualSamParts = Arrays.asList(actual.get(i).split("\\s*"));
-                for (String exp : expectedSamParts) {
-                    Assert.assertTrue(actualSamParts.contains(exp));
-                }
-                for (String act : actualSamParts) {
-                    Assert.assertTrue(expectedSamParts.contains(act));
-                }
-            }
-        }
-        CloserUtil.close(readers);
-    }
-
-    private static final boolean headersEquivalent(String a, String b) {
-        if (a.length() != b.length()) return false;
-        List<String> remaining = new LinkedList<String>(Arrays.asList(a.split("\\t")));
-        for (final String item : b.split("\\t")) {
-            if (!remaining.remove(item)) return false;
-        }
-        return remaining.isEmpty(); 
-    }
-
-    @DataProvider(name = "data")
-    private Object[][] getProgramGroupAndReadGroupMergeData() {
-
-        return new Object[][]{
-                {
-
-                        new File[]{
-                                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case1/chr11sub_file1.sam"),
-                                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case1/chr11sub_file2.sam")},
-                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case1/expected_output.sam")
-                }, {
-                new File[]{
-                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/chr11sub_file1.sam"),
-                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/chr11sub_file2.sam"),
-                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/chr11sub_file3.sam"),
-                        new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/chr11sub_file4.sam")},
-                new File(TEST_DATA_DIR, "SamFileHeaderMergerTest/case2/expected_output.sam")
-        }
-        };
-    }
-
-    @Test(expectedExceptions = {SAMException.class})
-    public void testUnmergeableSequenceDictionary() {
-        final String sd1 = sq1 + sq2 + sq5;
-        final String sd2 = sq2 + sq3 + sq4 + sq1;
-        final SamReader reader1 = SamReaderFactory.makeDefault().open(SamInputResource.of(new ByteArrayInputStream(StringUtil.stringToBytes(sd1))));
-        final SamReader reader2 = SamReaderFactory.makeDefault().open(SamInputResource.of(new ByteArrayInputStream(StringUtil.stringToBytes(sd2))));
-        final List<SAMFileHeader> inputHeaders = Arrays.asList(reader1.getFileHeader(), reader2.getFileHeader());
-        new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, inputHeaders, true);
-        CloserUtil.close(reader1);
-        CloserUtil.close(reader2);
-    }
-
-    @DataProvider(name = "fourDigitBase36StrPositiveData")
-    public Object[][] positiveFourDigitBase36StrData() {
-        return new Object[][]{
-                {0, "0"},
-                {15, "F"},
-                {36, "10"},
-                {1200000, "PPXC"},
-                {36 * 36 * 36 * 36 - 2, "ZZZY"},
-                {36 * 36 * 36 * 36 - 1, "ZZZZ"},
-        };
-    }
-
-    @Test(dataProvider = "fourDigitBase36StrPositiveData")
-    public void fourDigitBase36StrPositiveTest(final int toConvert, final String expectedValue) {
-        final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, new ArrayList<SAMFileHeader>(), true);
-        Assert.assertEquals(expectedValue, headerMerger.positiveFourDigitBase36Str(toConvert));
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SamFilesTest.java b/src/tests/java/htsjdk/samtools/SamFilesTest.java
deleted file mode 100644
index a7e2fa7..0000000
--- a/src/tests/java/htsjdk/samtools/SamFilesTest.java
+++ /dev/null
@@ -1,81 +0,0 @@
-package htsjdk.samtools;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Test valid combinations of bam/cram vs bai/crai files.
- * Created by vadim on 10/08/2015.
- */
-public class SamFilesTest {
-    private static final String TEST_DATA = "testdata/htsjdk/samtools/BAMFileIndexTest/";
-    private static final File BAM_FILE = new File(TEST_DATA + "index_test.bam");
-
-    @DataProvider(name = "FindIndexParams")
-    public static Object[][] paramsFindIndexForSuffixes() {
-        return new Object[][]{
-                // no index available sanity checks:
-                {".tmp", null, null},
-                {".bam", null, null},
-                {".cram", null, null},
-
-                // legit cases for BAM files:
-                {".bam", ".bai", ".bai"},
-                {".bam", ".bam.bai", ".bam.bai"},
-
-                // legit cases for CRAM files:
-                {".cram", ".cram.bai", ".cram.bai"},
-                {".cram", ".cram.crai", ".cram.crai"},
-
-                // special prohibited cases:
-                {".bam", ".crai", null},
-                {".tmp", ".crai", null},
-        };
-    }
-
-    @Test(dataProvider = "FindIndexParams")
-    public void testFindIndexForSuffixes(final String dataFileSuffix, final String indexFileSuffix, final String expectIndexSuffix) throws IOException {
-        final File dataFile = File.createTempFile("test", dataFileSuffix);
-        dataFile.deleteOnExit();
-        Assert.assertNull(SamFiles.findIndex(dataFile));
-
-        File indexFile = null;
-        if (indexFileSuffix != null) {
-            indexFile = new File(dataFile.getAbsolutePath().replaceFirst("\\.\\S+$", indexFileSuffix));
-            indexFile.createNewFile();
-            indexFile.deleteOnExit();
-        }
-
-        final File foundIndexFile = SamFiles.findIndex(dataFile);
-        if (expectIndexSuffix == null) {
-            Assert.assertNull(foundIndexFile);
-            return;
-        }
-
-        Assert.assertNotNull(foundIndexFile);
-        Assert.assertTrue(foundIndexFile.getName().endsWith(expectIndexSuffix));
-    }
-
-    @DataProvider(name = "filesAndIndicies")
-    public Object[][] getFilesAndIndicies() throws IOException {
-
-        final File REAL_INDEX_FILE = new File(BAM_FILE + ".bai"); //test regular file
-        final File SYMLINKED_BAM_WITH_SYMLINKED_INDEX = new File(TEST_DATA, "symlink_with_index.bam");
-
-        return new Object[][]{
-                {BAM_FILE, REAL_INDEX_FILE},
-                {SYMLINKED_BAM_WITH_SYMLINKED_INDEX, new File(SYMLINKED_BAM_WITH_SYMLINKED_INDEX + ".bai")},
-                {new File(TEST_DATA, "symlink_without_linked_index.bam"), REAL_INDEX_FILE.getCanonicalFile()},
-                {new File(TEST_DATA, "FileThatDoesntExist"), null}
-        };
-    }
-
-    @Test(dataProvider ="filesAndIndicies")
-    public void testIndexSymlinking(File bam, File expected_index) {
-        Assert.assertEquals(SamFiles.findIndex(bam), expected_index);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SamIndexesTest.java b/src/tests/java/htsjdk/samtools/SamIndexesTest.java
deleted file mode 100644
index 7e0bc85..0000000
--- a/src/tests/java/htsjdk/samtools/SamIndexesTest.java
+++ /dev/null
@@ -1,192 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.cram.CRAIEntry;
-import htsjdk.samtools.cram.CRAIIndex;
-import htsjdk.samtools.seekablestream.SeekableFileStream;
-import htsjdk.samtools.seekablestream.SeekableMemoryStream;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.IOUtil;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.zip.GZIPOutputStream;
-
-public class SamIndexesTest {
-
-    @Test
-    public void testEmptyBai() throws IOException {
-        final File baiFile = File.createTempFile("test", ".bai");
-        baiFile.deleteOnExit();
-        final FileOutputStream fos = new FileOutputStream(baiFile);
-        fos.write(SamIndexes.BAI.magic);
-        fos.close();
-
-
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        baos.write(SamIndexes.BAI.magic);
-        baos.close();
-
-        final InputStream inputStream = SamIndexes.asBaiStreamOrNull(new ByteArrayInputStream(baos.toByteArray()), null);
-        for (final byte b : SamIndexes.BAI.magic) {
-            Assert.assertEquals(inputStream.read(), 0xFF & b);
-        }
-    }
-
-    @Test(expectedExceptions = NullPointerException.class)
-    public void testCraiRequiresDictionary() throws IOException {
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        final GZIPOutputStream gos = new GZIPOutputStream(baos);
-        gos.close();
-
-        SamIndexes.asBaiStreamOrNull(new ByteArrayInputStream(baos.toByteArray()), null);
-    }
-
-    @Test
-    public void testEmptyCraiReadAsBai() throws IOException {
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        final GZIPOutputStream gos = new GZIPOutputStream(baos);
-        gos.close();
-
-        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
-        dictionary.addSequence(new SAMSequenceRecord("1", 100));
-        final InputStream inputStream = SamIndexes.asBaiStreamOrNull(new ByteArrayInputStream(baos.toByteArray()), dictionary);
-        for (final byte b : SamIndexes.BAI.magic) {
-            Assert.assertEquals(inputStream.read(), 0xFF & b);
-        }
-    }
-
-    @Test
-    public void testCraiInMemory() throws IOException {
-        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
-        final CRAIEntry entry = new CRAIEntry();
-        entry.sequenceId = 0;
-        entry.alignmentStart = 1;
-        entry.alignmentSpan = 2;
-        entry.sliceOffset = 3;
-        entry.sliceSize = 4;
-        entry.containerStartOffset = 5;
-        index.add(entry);
-
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        final GZIPOutputStream gos = new GZIPOutputStream(baos);
-        CRAIIndex.writeIndex(gos, index);
-        gos.close();
-
-        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
-        dictionary.addSequence(new SAMSequenceRecord("1", 100));
-
-        final InputStream baiStream = SamIndexes.asBaiStreamOrNull(new ByteArrayInputStream(baos.toByteArray()), dictionary);
-        Assert.assertNotNull(baiStream);
-
-        baos = new ByteArrayOutputStream();
-        IOUtil.copyStream(baiStream, baos);
-        final CachingBAMFileIndex bamIndex = new CachingBAMFileIndex(new SeekableMemoryStream(baos.toByteArray(), null), dictionary);
-        final BAMFileSpan span = bamIndex.getSpanOverlapping(entry.sequenceId, entry.alignmentStart, entry.alignmentStart);
-        Assert.assertNotNull(span);
-        final long[] coordinateArray = span.toCoordinateArray();
-        Assert.assertEquals(coordinateArray.length, 2);
-        Assert.assertEquals(coordinateArray[0] >> 16, entry.containerStartOffset);
-        Assert.assertEquals(coordinateArray[1] & 0xFFFF, 1);
-    }
-
-    @Test
-    public void testCraiFromFile() throws IOException {
-        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
-        final CRAIEntry entry = new CRAIEntry();
-        entry.sequenceId = 0;
-        entry.alignmentStart = 1;
-        entry.alignmentSpan = 2;
-        entry.sliceOffset = 3;
-        entry.sliceSize = 4;
-        entry.containerStartOffset = 5;
-        index.add(entry);
-
-        final File file = File.createTempFile("test", ".crai");
-        file.deleteOnExit();
-        final FileOutputStream fos = new FileOutputStream(file);
-        final GZIPOutputStream gos = new GZIPOutputStream(fos);
-        CRAIIndex.writeIndex(gos, index);
-        gos.close();
-
-        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
-        dictionary.addSequence(new SAMSequenceRecord("1", 100));
-
-        final SeekableStream baiStream = SamIndexes.asBaiSeekableStreamOrNull(new SeekableFileStream(file), dictionary);
-        Assert.assertNotNull(baiStream);
-
-        final CachingBAMFileIndex bamIndex = new CachingBAMFileIndex(baiStream, dictionary);
-        final BAMFileSpan span = bamIndex.getSpanOverlapping(entry.sequenceId, entry.alignmentStart, entry.alignmentStart);
-        Assert.assertNotNull(span);
-        final long[] coordinateArray = span.toCoordinateArray();
-        Assert.assertEquals(coordinateArray.length, 2);
-        Assert.assertEquals(coordinateArray[0] >> 16, entry.containerStartOffset);
-        Assert.assertEquals(coordinateArray[1] & 0xFFFF, 1);
-    }
-
-    @Test(expectedExceptions = NullPointerException.class)
-    public void testOpenIndexFileAsBaiOrNull_NPE() throws IOException {
-        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
-        dictionary.addSequence(new SAMSequenceRecord("1", 100));
-        Assert.assertNull(SamIndexes.openIndexFileAsBaiOrNull(null, dictionary));
-    }
-
-    @Test
-    public void testOpenIndexFileAsBaiOrNull_ReturnsNull() throws IOException {
-        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
-        dictionary.addSequence(new SAMSequenceRecord("1", 100));
-        File file = File.createTempFile("test", ".notbai");
-        file.deleteOnExit();
-        Assert.assertNull(SamIndexes.openIndexFileAsBaiOrNull(file, dictionary));
-        file.delete();
-
-        file = File.createTempFile("test", ".notcrai");
-        file.deleteOnExit();
-        Assert.assertNull(SamIndexes.openIndexFileAsBaiOrNull(file, dictionary));
-        file.delete();
-    }
-
-    @Test
-    public void testOpenIndexUrlAsBaiOrNull() throws IOException {
-        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
-        dictionary.addSequence(new SAMSequenceRecord("1", 100));
-
-        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
-        final CRAIEntry entry = new CRAIEntry();
-        entry.sequenceId = 0;
-        entry.alignmentStart = 1;
-        entry.alignmentSpan = 2;
-        entry.sliceOffset = 3;
-        entry.sliceSize = 4;
-        entry.containerStartOffset = 5;
-        index.add(entry);
-
-
-        final File file = File.createTempFile("test", ".crai");
-        file.deleteOnExit();
-        final FileOutputStream fos = new FileOutputStream(file);
-        final GZIPOutputStream gos = new GZIPOutputStream(fos);
-        CRAIIndex.writeIndex(gos, index);
-        gos.close();
-
-        final InputStream baiStream = SamIndexes.openIndexUrlAsBaiOrNull(file.toURI().toURL(), dictionary);
-        Assert.assertNotNull(baiStream);
-
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        IOUtil.copyStream(baiStream, baos);
-        final CachingBAMFileIndex bamIndex = new CachingBAMFileIndex(new SeekableMemoryStream(baos.toByteArray(), null), dictionary);
-        final BAMFileSpan span = bamIndex.getSpanOverlapping(entry.sequenceId, entry.alignmentStart, entry.alignmentStart);
-        Assert.assertNotNull(span);
-        final long[] coordinateArray = span.toCoordinateArray();
-        Assert.assertEquals(coordinateArray.length, 2);
-        Assert.assertEquals(coordinateArray[0] >> 16, entry.containerStartOffset);
-        Assert.assertEquals(coordinateArray[1] & 0xFFFF, 1);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SamReaderFactoryTest.java b/src/tests/java/htsjdk/samtools/SamReaderFactoryTest.java
deleted file mode 100644
index fc3d37b..0000000
--- a/src/tests/java/htsjdk/samtools/SamReaderFactoryTest.java
+++ /dev/null
@@ -1,293 +0,0 @@
-package htsjdk.samtools;
-
-import htsjdk.samtools.seekablestream.SeekableHTTPStream;
-import htsjdk.samtools.util.Iterables;
-import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.samtools.util.StopWatch;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-public class SamReaderFactoryTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    private static final Log LOG = Log.getInstance(SamReaderFactoryTest.class);
-
-    @Test(dataProvider = "variousFormatReaderTestCases")
-    public void variousFormatReaderTest(final String inputFile) throws IOException {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-        final SamReader reader = SamReaderFactory.makeDefault().open(input);
-        for (final SAMRecord ignored : reader) {
-        }
-        reader.close();
-    }
-
-    private int countRecordsInQueryInterval(final SamReader reader, final QueryInterval query) {
-        final SAMRecordIterator iter = reader.queryOverlapping(new QueryInterval[] { query });
-        int count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            count++;
-        }
-        iter.close();
-        return count;
-    }
-
-    // See https://github.com/samtools/htsjdk/issues/76
-    @Test(dataProvider = "queryIntervalIssue76TestCases")
-    public void queryIntervalIssue76(final String sequenceName, final int start, final int end, final int expectedCount) throws IOException {
-        final File input = new File(TEST_DATA_DIR, "issue76.bam");
-        final SamReader reader = SamReaderFactory.makeDefault().open(input);
-        final QueryInterval interval = new QueryInterval(reader.getFileHeader().getSequence(sequenceName).getSequenceIndex(), start, end);
-        Assert.assertEquals(countRecordsInQueryInterval(reader, interval), expectedCount);
-        reader.close();
-    }
-
-    @DataProvider(name = "queryIntervalIssue76TestCases")
-    public Object[][] queryIntervalIssue76TestCases() {
-        return new Object[][]{
-                {"1", 11966, 11966, 2},
-                {"1", 11966, 11967, 2},
-                {"1", 11967, 11967, 1}
-        };
-    }
-
-    @DataProvider(name = "variousFormatReaderTestCases")
-    public Object[][] variousFormatReaderTestCases() {
-        return new Object[][]{
-                {"block_compressed.sam.gz"},
-                {"uncompressed.sam"},
-                {"compressed.sam.gz"},
-                {"compressed.bam"},
-        };
-    }
-
-    // Tests for the SAMRecordFactory usage
-    class SAMRecordFactoryTester extends DefaultSAMRecordFactory {
-        int samRecordsCreated;
-        int bamRecordsCreated;
-
-        public SAMRecord createSAMRecord(final SAMFileHeader header) {
-            ++samRecordsCreated;
-            return super.createSAMRecord(header);
-        }
-
-        public BAMRecord createBAMRecord(final SAMFileHeader header, final int referenceSequenceIndex, final int alignmentStart, final short readNameLength, final short mappingQuality, final int indexingBin, final int cigarLen, final int flags, final int readLen, final int mateReferenceSequenceIndex, final int mateAlignmentStart, final int insertSize, final byte[] variableLengthBlock) {
-            ++bamRecordsCreated;
-            return super.createBAMRecord(header, referenceSequenceIndex, alignmentStart, readNameLength, mappingQuality, indexingBin, cigarLen, flags, readLen, mateReferenceSequenceIndex, mateAlignmentStart, insertSize, variableLengthBlock);
-        }
-    }
-
-    @Test(dataProvider = "variousFormatReaderTestCases")
-    public void samRecordFactoryTest(final String inputFile) throws IOException {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-
-        final SAMRecordFactoryTester recordFactory = new SAMRecordFactoryTester();
-        final SamReaderFactory readerFactory = SamReaderFactory.makeDefault().samRecordFactory(recordFactory);
-        final SamReader reader = readerFactory.open(input);
-
-        int i = 0;
-        for (final SAMRecord ignored : reader) {
-            ++i;
-        }
-        reader.close();
-
-        Assert.assertTrue(i > 0);
-        if (inputFile.endsWith(".sam") || inputFile.endsWith(".sam.gz")) Assert.assertEquals(recordFactory.samRecordsCreated, i);
-        else if (inputFile.endsWith(".bam")) Assert.assertEquals(recordFactory.bamRecordsCreated, i);
-    }
-
-    @Test(expectedExceptions=IllegalStateException.class)
-    public void samRecordFactoryNullHeaderBAMTest() {
-        final SAMRecordFactory recordFactory = new DefaultSAMRecordFactory();
-        recordFactory.createBAMRecord(
-                null, // null header
-                0,
-                0,
-                (short) 0,
-                (short) 0,
-                0,
-                0,
-                0,
-                0,
-                0,
-                0,
-                0,
-                null);
-    }
-
-
-    /**
-     * Unit tests for asserting all permutations of data and index sources read the same records and header.
-     */
-    final File localBam = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
-    final File localBamIndex = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai");
-
-    final URL bamUrl, bamIndexUrl;
-
-    {
-        try {
-            bamUrl = new URL("http://www.broadinstitute.org/~picard/testdata/index_test.bam");
-            bamIndexUrl = new URL("http://www.broadinstitute.org/~picard/testdata/index_test.bam.bai");
-        } catch (final MalformedURLException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    @DataProvider
-    public Object[][] composeAllPermutationsOfSamInputResource() {
-        final List<SamInputResource> sources = new ArrayList<SamInputResource>();
-        for (final InputResource.Type dataType : InputResource.Type.values()) {
-            if (dataType.equals(InputResource.Type.SRA_ACCESSION))
-                continue;
-
-            sources.add(new SamInputResource(composeInputResourceForType(dataType, false)));
-            for (final InputResource.Type indexType : InputResource.Type.values()) {
-                if (indexType.equals(InputResource.Type.SRA_ACCESSION))
-                    continue;
-
-                sources.add(new SamInputResource(
-                        composeInputResourceForType(dataType, false),
-                        composeInputResourceForType(indexType, true)
-                ));
-            }
-        }
-        final Object[][] data = new Object[sources.size()][];
-        for (final SamInputResource source : sources) {
-            data[sources.indexOf(source)] = new Object[]{source};
-        }
-
-        return data;
-    }
-
-    private InputResource composeInputResourceForType(final InputResource.Type type, final boolean forIndex) {
-        final File f = forIndex ? localBamIndex : localBam;
-        final URL url = forIndex ? bamIndexUrl : bamUrl;
-        switch (type) {
-            case FILE:
-                return new FileInputResource(f);
-            case URL:
-                return new UrlInputResource(url);
-            case SEEKABLE_STREAM:
-                return new SeekableStreamInputResource(new SeekableHTTPStream(url));
-            case INPUT_STREAM:
-                try {
-                    return new InputStreamInputResource(new FileInputStream(f));
-                } catch (final FileNotFoundException e) {
-                    throw new RuntimeIOException(e);
-                }
-            default:
-                throw new IllegalStateException();
-        }
-    }
-
-    final Set<SAMFileHeader> observedHeaders = new HashSet<SAMFileHeader>();
-    final Set<List<SAMRecord>> observedRecordOrdering = new HashSet<List<SAMRecord>>();
-
-    @Test(dataProvider = "composeAllPermutationsOfSamInputResource")
-    public void exhaustInputResourcePermutation(final SamInputResource resource) throws IOException {
-        final SamReader reader = SamReaderFactory.makeDefault().open(resource);
-        LOG.info(String.format("Reading from %s ...", resource));
-        final List<SAMRecord> slurped = Iterables.slurp(reader);
-        final SAMFileHeader fileHeader = reader.getFileHeader();
-        reader.hasIndex();
-        reader.indexing().hasBrowseableIndex();
-        reader.close();
-        
-        /* Ensure all tests have read the same records in the same order or, if this is the first test, set it as the template. */
-        observedHeaders.add(fileHeader);
-        observedRecordOrdering.add(slurped);
-        Assert.assertEquals(observedHeaders.size(), 1, "read different headers than other testcases");
-        Assert.assertEquals(observedRecordOrdering.size(), 1, "read different records than other testcases");
-    }
-
-
-    final Set<List<SAMRecord>> observedRecordOrdering1 = new HashSet<List<SAMRecord>>();
-    final Set<List<SAMRecord>> observedRecordOrdering3 = new HashSet<List<SAMRecord>>();
-    final Set<List<SAMRecord>> observedRecordOrdering20 = new HashSet<List<SAMRecord>>();
-
-    @Test(dataProvider = "composeAllPermutationsOfSamInputResource")
-    public void queryInputResourcePermutation(final SamInputResource resource) throws IOException {
-        final SamReader reader = SamReaderFactory.makeDefault().open(resource);
-        LOG.info(String.format("Query from %s ...", resource));
-        if (reader.hasIndex()) {
-            final StopWatch stopWatch = new StopWatch();
-            stopWatch.start();
-            final SAMRecordIterator q1 = reader.query("chr1", 500000, 100000000, true);
-            observedRecordOrdering1.add(Iterables.slurp(q1));
-            q1.close();
-            final SAMRecordIterator q20 = reader.query("chr20", 1, 1000000, true);
-            observedRecordOrdering20.add(Iterables.slurp(q20));
-            q20.close();
-            final SAMRecordIterator q3 = reader.query("chr3", 1, 10000000, true);
-            observedRecordOrdering3.add(Iterables.slurp(q3));
-            q3.close();
-            stopWatch.stop();
-            LOG.info(String.format("Finished queries in %sms", stopWatch.getElapsedTime()));
-
-            Assert.assertEquals(observedRecordOrdering1.size(), 1, "read different records for chromosome 1");
-            Assert.assertEquals(observedRecordOrdering20.size(), 1, "read different records for chromosome 20");
-            Assert.assertEquals(observedRecordOrdering3.size(), 1, "read different records for chromosome 3");
-        } else if (resource.indexMaybe() != null) {
-            LOG.warn("Resource has an index source, but is not indexed: " + resource);
-        } else {
-            LOG.info("Skipping query operation: no index.");
-        }
-        reader.close();
-    }
-    
-    @Test
-    public void customReaderFactoryTest() throws IOException {
-        try {
-          CustomReaderFactory.setInstance(new CustomReaderFactory(
-              "https://www.googleapis.com/genomics/v1beta/reads/," +
-              "htsjdk.samtools.SamReaderFactoryTest$TestReaderFactory"));
-          final SamReader reader = SamReaderFactory.makeDefault().open(
-              SamInputResource.of(
-              "https://www.googleapis.com/genomics/v1beta/reads/?uncompressed.sam"));
-          int i = 0;
-          for (@SuppressWarnings("unused") final SAMRecord ignored : reader) {
-              ++i;
-          }
-          reader.close();
-  
-          Assert.assertTrue(i > 0);
-        } finally {
-          CustomReaderFactory.resetToDefaultInstance();
-        }
-    }
-    
-    public static class TestReaderFactory implements CustomReaderFactory.ICustomReaderFactory {
-      @Override
-      public SamReader open(URL url) {
-        final File file = new File(TEST_DATA_DIR, url.getQuery());
-        LOG.info("Opening customr reader for " + file.toString());
-        return SamReaderFactory.makeDefault().open(file);
-      }
-    }
-    
-    @Test
-    public void inputResourceFromStringTest() throws IOException {
-      Assert.assertEquals(SamInputResource.of("http://test.url").data().type(),
-          InputResource.Type.URL);
-      Assert.assertEquals(SamInputResource.of("https://test.url").data().type(),
-          InputResource.Type.URL);
-      Assert.assertEquals(SamInputResource.of("ftp://test.url").data().type(),
-          InputResource.Type.URL);
-      Assert.assertEquals(SamInputResource.of("/a/b/c").data().type(),
-          InputResource.Type.FILE);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SamReaderSortTest.java b/src/tests/java/htsjdk/samtools/SamReaderSortTest.java
deleted file mode 100755
index 00d44d6..0000000
--- a/src/tests/java/htsjdk/samtools/SamReaderSortTest.java
+++ /dev/null
@@ -1,102 +0,0 @@
-package htsjdk.samtools;
-
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * Tests for the implementation of SAMRecordIterator in SAMFileReader
- *
- * @author ktibbett at broadinstitute.org
- */
-public class SamReaderSortTest {
-
-    public static final String COORDINATE_SORTED_FILE = "testdata/htsjdk/samtools/coordinate_sorted.sam";
-    public static final String QUERYNAME_SORTED_FILE = "testdata/htsjdk/samtools/queryname_sorted.sam";
-    public static final String QUERYNAME_SORTED_NO_HEADER_SORT = "testdata/htsjdk/samtools/unsorted.sam";
-
-    @Test(expectedExceptions = IllegalStateException.class)
-    public void testSortsDisagree() throws Exception {
-        SAMRecordIterator it = SamReaderFactory.makeDefault().open(new File(COORDINATE_SORTED_FILE)).iterator();
-        try {
-            it.assertSorted(SAMFileHeader.SortOrder.queryname);
-            while (it.hasNext()) {
-                it.next();
-            }
-            Assert.fail("Queryname assertion should have failed on coordinate sorted file but didn't");
-        } finally {
-            it.close();
-        }
-    }
-
-    @Test(dataProvider = "validSorts")
-    public void testSortAssertionValid(String file, SAMFileHeader.SortOrder order) {
-        SAMRecordIterator it = SamReaderFactory.makeDefault().open(new File(file)).iterator();
-        try {
-            it.assertSorted(order);
-            while (it.hasNext()) {
-                it.next();
-            }
-        } finally {
-            it.close();
-        }
-    }
-
-    @DataProvider(name = "validSorts")
-    public Object[][] getValidSorts() {
-        return new Object[][]{
-                {COORDINATE_SORTED_FILE, SAMFileHeader.SortOrder.coordinate},
-                {QUERYNAME_SORTED_FILE, SAMFileHeader.SortOrder.queryname},
-                {QUERYNAME_SORTED_NO_HEADER_SORT, SAMFileHeader.SortOrder.queryname},
-                {COORDINATE_SORTED_FILE, SAMFileHeader.SortOrder.unsorted}
-        };
-    }
-
-
-    @Test(dataProvider = "invalidSorts", expectedExceptions = IllegalStateException.class)
-    public void testSortAssertionFails(String file, SAMFileHeader.SortOrder order) throws Exception {
-        SAMRecordIterator it = SamReaderFactory.makeDefault().open(new File(file)).iterator();
-        try {
-            it.assertSorted(order);
-            while (it.hasNext()) {
-                it.next();
-            }
-            Assert.fail("Iterated successfully over " + file + " with invalid sort assertion: " + order.name());
-        } finally {
-            it.close();
-        }
-    }
-
-    @DataProvider(name = "invalidSorts")
-    public Object[][] getInvalidSorts() {
-        return new Object[][]{
-                {QUERYNAME_SORTED_NO_HEADER_SORT, SAMFileHeader.SortOrder.coordinate}
-        };
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/SamSpecIntTest.java b/src/tests/java/htsjdk/samtools/SamSpecIntTest.java
deleted file mode 100644
index 9be6d6c..0000000
--- a/src/tests/java/htsjdk/samtools/SamSpecIntTest.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2014 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.CloserUtil;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-public class SamSpecIntTest {
-    private static final File SAM_INPUT = new File("testdata/htsjdk/samtools/inttest.sam");
-    private static final File BAM_INPUT = new File("testdata/htsjdk/samtools/inttest.bam");
-
-    @Test
-    public void testSamIntegers() throws IOException {
-        final List<String> errorMessages = new ArrayList<String>();
-        final SamReader samReader = SamReaderFactory.makeDefault().open(SAM_INPUT);
-        final File bamOutput = File.createTempFile("test", ".bam");
-        final File samOutput = File.createTempFile("test", ".sam");
-        final SAMFileWriter samWriter = new SAMFileWriterFactory().makeWriter(samReader.getFileHeader(), true, samOutput, null);
-        final SAMFileWriter bamWriter = new SAMFileWriterFactory().makeWriter(samReader.getFileHeader(), true, bamOutput, null);
-
-        final SAMRecordIterator iterator = samReader.iterator();
-        while (iterator.hasNext()) {
-            try {
-                final SAMRecord rec = iterator.next();
-                samWriter.addAlignment(rec);
-                bamWriter.addAlignment(rec);
-            } catch (final Throwable e) {
-                System.out.println(e.getMessage());
-                errorMessages.add(e.getMessage());
-            }
-        }
-
-        CloserUtil.close(samReader);
-        samWriter.close();
-        bamWriter.close();
-        Assert.assertEquals(errorMessages.size(), 0);
-        bamOutput.deleteOnExit();
-        samOutput.deleteOnExit();
-    }
-
-    @Test
-    public void testBamIntegers() throws IOException {
-        final List<String> errorMessages = new ArrayList<String>();
-        final SamReader bamReader = SamReaderFactory.makeDefault().open(BAM_INPUT);
-        final File bamOutput = File.createTempFile("test", ".bam");
-        final File samOutput = File.createTempFile("test", ".sam");
-        final SAMFileWriter samWriter = new SAMFileWriterFactory().makeWriter(bamReader.getFileHeader(), true, samOutput, null);
-        final SAMFileWriter bamWriter = new SAMFileWriterFactory().makeWriter(bamReader.getFileHeader(), true, bamOutput, null);
-        final SAMRecordIterator iterator = bamReader.iterator();
-        while (iterator.hasNext()) {
-            try {
-                final SAMRecord rec = iterator.next();
-                samWriter.addAlignment(rec);
-                bamWriter.addAlignment(rec);
-            } catch (final Throwable e) {
-                System.out.println(e.getMessage());
-                errorMessages.add(e.getMessage());
-            }
-        }
-
-        CloserUtil.close(bamReader);
-        samWriter.close();
-        bamWriter.close();
-        Assert.assertEquals(errorMessages.size(), 0);
-        bamOutput.deleteOnExit();
-        samOutput.deleteOnExit();
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/SamStreamsTest.java b/src/tests/java/htsjdk/samtools/SamStreamsTest.java
deleted file mode 100644
index 834538e..0000000
--- a/src/tests/java/htsjdk/samtools/SamStreamsTest.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2016 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.*;
-
-public class SamStreamsTest {
-
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    @Test(dataProvider = "makeData")
-    public void testDataFormat(final String inputFile, final boolean isGzippedSAMFile, final boolean isBAMFile, final boolean isCRAMFile) throws Exception {
-        final File input = new File(TEST_DATA_DIR, inputFile);
-        try(final InputStream fis = new BufferedInputStream(new FileInputStream(input))) { //must be buffered or the isGzippedSAMFile will blow up
-            Assert.assertEquals(SamStreams.isGzippedSAMFile(fis), isGzippedSAMFile, "isGzippedSAMFile:" + inputFile);
-            Assert.assertEquals(SamStreams.isBAMFile(fis), isBAMFile,   "isBAMFile:" + inputFile);
-            Assert.assertEquals(SamStreams.isCRAMFile(fis), isCRAMFile, "isCRAMFile:" + inputFile);
-        }
-    }
-
-    @DataProvider(name = "makeData")
-    public Object[][] makeData() {
-        final Object[][] scenarios = new Object[][]{
-                //isGzippedSAMFile isBAMFile isCRAMFile
-                {"block_compressed.sam.gz", true,  false, false},
-                {"uncompressed.sam",        false, false, false},
-                {"compressed.sam.gz",       true,  false, false},
-                {"compressed.bam",          true,  true,  false}, //this is slightly weird (responding true to isGzippedSAMFile)
-                {"cram_query_sorted.cram",  false, false, true},
-        };
-        return scenarios;
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java b/src/tests/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java
deleted file mode 100644
index 0227126..0000000
--- a/src/tests/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools;
-
-import htsjdk.samtools.util.CloserUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * Test new functionality that truncates sequence names at first whitespace in order to deal
- * with older BAMs that had spaces in sequence names.
- *
- * @author alecw at broadinstitute.org
- */
-public class SequenceNameTruncationAndValidationTest {
-    private static File TEST_DATA_DIR = new File("testdata/htsjdk/samtools");
-
-    @Test(expectedExceptions = {SAMException.class}, dataProvider = "badSequenceNames")
-    public void testSequenceRecordThrowsWhenInvalid(final String sequenceName) {
-        new SAMSequenceRecord(sequenceName, 123);
-        Assert.fail("Should not reach here.");
-    }
-
-    @DataProvider(name = "badSequenceNames")
-    public Object[][] badSequenceNames() {
-        return new Object[][]{
-                {" "},
-                {"\t"},
-                {"\n"},
-                {"="},
-                {"Hi, Mom!"}
-        };
-    }
-
-    @Test(dataProvider = "goodSequenceNames")
-    public void testSequenceRecordPositiveTest(final String sequenceName) {
-        new SAMSequenceRecord(sequenceName, 123);
-    }
-
-    @DataProvider(name = "goodSequenceNames")
-    public Object[][] goodSequenceNames() {
-        return new Object[][]{
-                {"Hi, at Mom!"}
-        };
-    }
-
-    @Test(dataProvider = "samFilesWithSpaceInSequenceName")
-    public void testSamSequenceTruncation(final String filename) {
-        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, filename));
-        for (final SAMSequenceRecord sequence : reader.getFileHeader().getSequenceDictionary().getSequences()) {
-            Assert.assertFalse(sequence.getSequenceName().contains(" "), sequence.getSequenceName());
-        }
-        for (final SAMRecord rec : reader) {
-            Assert.assertFalse(rec.getReferenceName().contains(" "));
-        }
-        CloserUtil.close(reader);
-    }
-
-    @DataProvider(name = "samFilesWithSpaceInSequenceName")
-    public Object[][] samFilesWithSpaceInSequenceName() {
-        return new Object[][]{
-                {"sequenceWithSpace.sam"},
-                {"sequenceWithSpace.bam"}
-        };
-    }
-
-    @Test(expectedExceptions = {SAMFormatException.class})
-    public void testBadRname() {
-        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, "readWithBadRname.sam"));
-        for (final SAMRecord rec : reader) {
-        }
-        Assert.fail("Should not reach here.");
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/ValidateSamFileTest.java b/src/tests/java/htsjdk/samtools/ValidateSamFileTest.java
deleted file mode 100644
index 5204a1e..0000000
--- a/src/tests/java/htsjdk/samtools/ValidateSamFileTest.java
+++ /dev/null
@@ -1,471 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools;
-
-import htsjdk.samtools.BamIndexValidator.IndexValidationStringency;
-import htsjdk.samtools.metrics.MetricBase;
-import htsjdk.samtools.metrics.MetricsFile;
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.Histogram;
-import htsjdk.samtools.util.StringUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.LineNumberReader;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.Arrays;
-import java.util.Iterator;
-
-/**
- * Tests almost all error conditions detected by the sam file validator. The
- * conditions not tested are proactively prevented by sam generation code.
- *
- * @author Doug Voet
- */
-public class ValidateSamFileTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools/ValidateSamFileTest");
-
-    @Test
-    public void testValidSamFile() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "valid.sam"));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertTrue(results.isEmpty());
-    }
-
-    @Test
-    public void testSamFileVersion1pt5() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "test_samfile_version_1pt5.bam"));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertTrue(results.isEmpty());
-    }
-
-    @Test
-    public void testSortOrder() throws IOException {
-        Histogram<String> results = executeValidation(SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
-                .open(new File(TEST_DATA_DIR, "invalid_coord_sort_order.sam")), null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.RECORD_OUT_OF_ORDER.getHistogramString()).getValue(), 1.0);
-        results = executeValidation(SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
-                .open(new File(TEST_DATA_DIR, "invalid_queryname_sort_order.sam")), null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.RECORD_OUT_OF_ORDER.getHistogramString()).getValue(), 5.0);
-    }
-
-    @Test
-    public void testVerbose() throws IOException {
-        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
-
-        for (int i = 0; i < 20; i++) {
-            samBuilder.addFrag(String.valueOf(i), 1, i, false);
-        }
-        for (final SAMRecord record : samBuilder) {
-            record.setProperPairFlag(true);
-        }
-
-        final StringWriter results = new StringWriter();
-        final SamFileValidator validator = new SamFileValidator(new PrintWriter(results), 8000);
-        validator.setVerbose(true, 10);
-        validator.validateSamFileVerbose(samBuilder.getSamReader(), null);
-
-        final int lineCount = results.toString().split("\n").length;
-        Assert.assertEquals(lineCount, 11);
-    }
-
-    @Test
-    public void testUnpairedRecords() throws IOException {
-        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
-
-        for (int i = 0; i < 6; i++) {
-            samBuilder.addFrag(String.valueOf(i), i, i, false);
-        }
-        final Iterator<SAMRecord> records = samBuilder.iterator();
-        records.next().setProperPairFlag(true);
-        records.next().setMateUnmappedFlag(true);
-        records.next().setMateNegativeStrandFlag(true);
-        records.next().setFirstOfPairFlag(true);
-        records.next().setSecondOfPairFlag(true);
-        records.next().setMateReferenceIndex(1);
-
-        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
-
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_PROPER_PAIR.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_MATE_UNMAPPED.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_MATE_NEG_STRAND.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_FIRST_OF_PAIR.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_SECOND_OF_PAIR.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_MATE_REF_INDEX.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test
-    public void testPairedRecords() throws IOException {
-        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
-
-        for (int i = 0; i < 5; i++) {
-            samBuilder.addPair(String.valueOf(i), i, i, i + 100);
-        }
-        final Iterator<SAMRecord> records = samBuilder.iterator();
-        records.next().setMateReferenceName("*");
-        records.next().setMateAlignmentStart(Integer.MAX_VALUE);
-        records.next().setMateAlignmentStart(records.next().getAlignmentStart() + 1);
-        records.next().setMateNegativeStrandFlag(!records.next().getReadNegativeStrandFlag());
-        records.next().setMateReferenceIndex(records.next().getReferenceIndex() + 1);
-        records.next().setMateUnmappedFlag(!records.next().getReadUnmappedFlag());
-
-
-        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
-
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_ALIGNMENT_START.getHistogramString()).getValue(), 3.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_MATE_UNMAPPED.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_FLAG_MATE_NEG_STRAND.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_FLAG_MATE_UNMAPPED.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_MATE_ALIGNMENT_START.getHistogramString()).getValue(), 2.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_MATE_REF_INDEX.getHistogramString()).getValue(), 2.0);
-    }
-
-    @Test(dataProvider = "missingMateTestCases")
-    public void testMissingMate(final SAMFileHeader.SortOrder sortOrder) throws IOException {
-        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder(true, sortOrder);
-
-        samBuilder.addPair(String.valueOf(1), 1, 1, 101);
-        final Iterator<SAMRecord> records = samBuilder.iterator();
-        records.next();
-        records.remove();
-        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
-
-        Assert.assertEquals(results.get(SAMValidationError.Type.MATE_NOT_FOUND.getHistogramString()).getValue(), 1.0);
-    }
-
-    @DataProvider(name = "missingMateTestCases")
-    public Object[][] missingMateTestCases() {
-        return new Object[][]{
-                {SAMFileHeader.SortOrder.coordinate},
-                {SAMFileHeader.SortOrder.queryname},
-                {SAMFileHeader.SortOrder.unsorted},
-        };
-    }
-
-    @Test
-    public void testUnmappedRecords() throws IOException {
-        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
-
-        for (int i = 0; i < 4; i++) {
-            samBuilder.addUnmappedFragment(String.valueOf(i));
-        }
-        final Iterator<SAMRecord> records = samBuilder.iterator();
-        records.next().setReadNegativeStrandFlag(true);
-        records.next().setNotPrimaryAlignmentFlag(true);
-        records.next().setMappingQuality(10);
-        records.next().setCigarString("36M");
-
-        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
-
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_NOT_PRIM_ALIGNMENT.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_MAPPING_QUALITY.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test
-    public void testMappedRecords() throws IOException {
-        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
-
-        for (int i = 0; i < 2; i++) {
-            samBuilder.addFrag(String.valueOf(i), i, i, false);
-        }
-        final Iterator<SAMRecord> records = samBuilder.iterator();
-        records.next().setCigarString("25M3S25M");
-        records.next().setReferenceName("*");
-
-        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
-
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_CIGAR.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_READ_UNMAPPED.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.MISSING_TAG_NM.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test
-    public void testNmFlagValidation() throws IOException {
-        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
-
-        for (int i = 0; i < 3; i++) {
-            samBuilder.addFrag(String.valueOf(i), i, i + 1, false);
-        }
-        final Iterator<SAMRecord> records = samBuilder.iterator();
-        records.next().setAttribute(ReservedTagConstants.NM, 4);
-
-        // PIC-215: Confirm correct NM value when there is an insertion and a deletion.
-        final SAMRecord recordWithInsert = records.next();
-        final byte[] sequence = recordWithInsert.getReadBases();
-        Arrays.fill(sequence, (byte) 'A');
-        recordWithInsert.setReadBases(sequence);
-        recordWithInsert.setCigarString("1D" + Integer.toString(sequence.length - 1) + "M1I");
-        recordWithInsert.setAttribute(ReservedTagConstants.NM, 2);
-
-        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), new ReferenceSequenceFile() {
-            private int index = 0;
-
-            public SAMSequenceDictionary getSequenceDictionary() {
-                return null;
-            }
-
-            public ReferenceSequence nextSequence() {
-                final byte[] bases = new byte[10000];
-                Arrays.fill(bases, (byte) 'A');
-                return new ReferenceSequence("foo", index++, bases);
-            }
-
-            public void reset() {
-                this.index = 0;
-            }
-
-            public boolean isIndexed() { return false; }
-
-            public ReferenceSequence getSequence(final String contig) {
-                throw new UnsupportedOperationException();
-            }
-
-            public ReferenceSequence getSubsequenceAt(final String contig, final long start, final long stop) {
-                throw new UnsupportedOperationException();
-            }
-
-            @Override
-            public void close() throws IOException {
-                //no-op
-            }
-        }, IndexValidationStringency.EXHAUSTIVE);
-
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_TAG_NM.getHistogramString()).getValue(), 1.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.MISSING_TAG_NM.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test(dataProvider = "testMateCigarScenarios")
-    public void testMateCigarScenarios(final String scenario, final String inputFile, final SAMValidationError.Type expectedError)
-            throws Exception {
-        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, inputFile));
-        final Histogram<String> results = executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertNotNull(results.get(expectedError.getHistogramString()));
-        Assert.assertEquals(results.get(expectedError.getHistogramString()).getValue(), 1.0);
-    }
-
-
-    @DataProvider(name = "testMateCigarScenarios")
-    public Object[][] testMateCigarScenarios() {
-        return new Object[][]{
-                {"invalid mate cigar", "invalid_mate_cigar_string.sam", SAMValidationError.Type.MISMATCH_MATE_CIGAR_STRING},
-                {"inappropriate mate cigar", "inappropriate_mate_cigar_string.sam", SAMValidationError.Type.MATE_CIGAR_STRING_INVALID_PRESENCE}
-        };
-    }
-
-    @Test(dataProvider = "testTruncatedScenarios")
-    public void testTruncated(final String scenario, final String inputFile, final SAMValidationError.Type expectedError)
-            throws Exception {
-        final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, inputFile));
-        final Histogram<String> results = executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertNotNull(results.get(expectedError.getHistogramString()));
-        Assert.assertEquals(results.get(expectedError.getHistogramString()).getValue(), 1.0);
-    }
-
-    @DataProvider(name = "testTruncatedScenarios")
-    public Object[][] testTruncatedScenarios() {
-        return new Object[][]{
-                {"truncated bam", "truncated.bam", SAMValidationError.Type.TRUNCATED_FILE},
-                {"truncated quals", "truncated_quals.sam", SAMValidationError.Type.MISMATCH_READ_LENGTH_AND_QUALS_LENGTH},
-                // TODO: Because validation is turned off when parsing, this error is not detectable currently by validator.
-                //{"truncated tag", "truncated_tag.sam", SAMValidationError.Type.TRUNCATED_FILE},
-                // TODO: Currently, this is not considered an error.  Should it be?
-                //{"hanging tab", "hanging_tab.sam", SAMValidationError.Type.TRUNCATED_FILE},
-        };
-    }
-
-    @Test(expectedExceptions = SAMException.class, dataProvider = "testFatalParsingErrors")
-    public void testFatalParsingErrors(final String scenario, final String inputFile) throws Exception {
-        final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, inputFile));
-        executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.fail("Exception should have been thrown.");
-    }
-
-    @DataProvider(name = "testFatalParsingErrors")
-    public Object[][] testFatalParsingErrorScenarios() {
-        return new Object[][]{
-                {"missing fields", "missing_fields.sam"},
-                {"zero length read", "zero_length_read.sam"}
-        };
-    }
-
-    @Test
-    public void testHeaderVersionValidation() throws Exception {
-        final String header = "@HD	VN:Hi,Mom!	SO:queryname";
-        final InputStream strm = new ByteArrayInputStream(StringUtil.stringToBytes(header));
-        final SamReader samReader = SamReaderFactory.makeDefault().open(SamInputResource.of(strm));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_VERSION_NUMBER.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test(enabled = false, description = "File is actually valid for Standard quality scores so this test fails with an NPE.")
-    public void testQualityFormatValidation() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().open(new File("./testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/illumina-as-standard.bam"));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        final Histogram<String>.Bin bin = results.get(SAMValidationError.Type.INVALID_QUALITY_FORMAT.getHistogramString());
-        final double value = bin.getValue();
-        Assert.assertEquals(value, 1.0);
-    }
-
-    @Test
-    public void testCigarOffEndOfReferenceValidation() throws Exception {
-        final SAMRecordSetBuilder samBuilder = new SAMRecordSetBuilder();
-        samBuilder.addFrag(String.valueOf(0), 0, 1, false);
-        final int contigLength = samBuilder.getHeader().getSequence(0).getSequenceLength();
-        // Should hang off the end.
-        samBuilder.addFrag(String.valueOf(1), 0, contigLength - 1, false);
-        final Histogram<String> results = executeValidation(samBuilder.getSamReader(), null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertNotNull(results.get(SAMValidationError.Type.CIGAR_MAPS_OFF_REFERENCE.getHistogramString()));
-        Assert.assertEquals(results.get(SAMValidationError.Type.CIGAR_MAPS_OFF_REFERENCE.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test(expectedExceptions = SAMFormatException.class)
-    public void testConflictingTags() throws Exception {
-        final String header = "@HD	VN:1.0	SO:queryname	SO:coordinate";
-        final InputStream strm = new ByteArrayInputStream(StringUtil.stringToBytes(header));
-        final SamReader reader = SamReaderFactory.makeDefault().open(SamInputResource.of(strm));
-        Assert.fail("Exception should have been thrown.");
-    }
-
-    @Test
-    public void testRedundantTags() throws Exception {
-        final String header = "@HD	VN:1.0	SO:coordinate	SO:coordinate";
-        final InputStream strm = new ByteArrayInputStream(StringUtil.stringToBytes(header));
-        final SamReader samReader = SamReaderFactory.makeDefault().open(SamInputResource.of(strm));
-        Assert.assertEquals(SAMFileHeader.SortOrder.coordinate, samReader.getFileHeader().getSortOrder());
-        CloserUtil.close(samReader);
-    }
-
-    @Test
-    public void testHeaderValidation() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
-                .open(new File(TEST_DATA_DIR, "buggyHeader.sam"));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.UNRECOGNIZED_HEADER_TYPE.getHistogramString()).getValue(), 3.0);
-        Assert.assertEquals(results.get(SAMValidationError.Type.HEADER_TAG_MULTIPLY_DEFINED.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test
-    public void testPlatformMissing() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
-                .open((new File(TEST_DATA_DIR, "missing_platform_unit.sam")));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.MISSING_PLATFORM_VALUE.getHistogramString()).getValue(), 1.0);
-    }
-    
-    @Test
-    public void testPlatformInvalid() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
-                .open((new File(TEST_DATA_DIR, "invalid_platform_unit.sam")));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_PLATFORM_VALUE.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test
-    public void testDuplicateRGIDs() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
-                .open((new File(TEST_DATA_DIR, "duplicate_rg.sam")));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.DUPLICATE_READ_GROUP_ID.getHistogramString()).getValue(), 1.0);
-    }
-
-    @Test
-    public void testIndexFileValidation() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
-                .enable(SamReaderFactory.Option.CACHE_FILE_BASED_INDEXES).open((new File(TEST_DATA_DIR, "bad_index.bam")));
-
-        Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_INDEX_FILE_POINTER.getHistogramString()).getValue(), 1.0);
-
-        results = executeValidation(samReader, null, IndexValidationStringency.LESS_EXHAUSTIVE);
-        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_INDEX_FILE_POINTER.getHistogramString()).getValue(), 1.0);
-
-    }
-
-    private Histogram<String> executeValidation(final SamReader samReader, final ReferenceSequenceFile reference, final IndexValidationStringency stringency) throws IOException {
-        final File outFile = File.createTempFile("validation", ".txt");
-        outFile.deleteOnExit();
-        final PrintWriter out = new PrintWriter(outFile);
-        new SamFileValidator(out, 8000).setIndexValidationStringency(stringency).validateSamFileSummary(samReader, reference);
-        final LineNumberReader reader = new LineNumberReader(new FileReader(outFile));
-        if (reader.readLine().equals("No errors found")) {
-            return new Histogram<String>();
-        }
-        final MetricsFile<MetricBase, String> outputFile = new MetricsFile<MetricBase, String>();
-        outputFile.read(new FileReader(outFile));
-        Assert.assertNotNull(outputFile.getHistogram());
-        return outputFile.getHistogram();
-    }
-    
-    private void testHeaderVersion(final String version, final boolean expectValid) throws Exception {
-        final File samFile = File.createTempFile("validateHeader.", ".sam");
-        samFile.deleteOnExit();
-        final PrintWriter pw = new PrintWriter(samFile);
-        pw.println("@HD\tVN:" + version);
-        pw.close();
-        final SamReader reader = SamReaderFactory.makeDefault().open(samFile);
-        final Histogram<String> results = executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
-        if (expectValid) Assert.assertNull(results.get(SAMValidationError.Type.INVALID_VERSION_NUMBER.getHistogramString()));
-        else {
-            Assert.assertNotNull(results.get(SAMValidationError.Type.INVALID_VERSION_NUMBER.getHistogramString()));
-            Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_VERSION_NUMBER.getHistogramString()).getValue(), 1.0);
-        }
-    }
-
-    @Test
-    public void testHeaderVersions() throws Exception {
-        // Test the acceptable versions
-        for (final String version : SAMFileHeader.ACCEPTABLE_VERSIONS) {
-            testHeaderVersion(version, true);
-        }
-
-        // Test an unacceptable version
-        testHeaderVersion("1.6", false);
-    }
-
-    @Test(enabled = false)
-    public void duplicateReads() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "duplicated_reads.sam"));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertFalse(results.isEmpty());
-        Assert.assertEquals(results.get(SAMValidationError.Type.MATES_ARE_SAME_END.getHistogramString()).getValue(), 2.0);
-    }
-
-    @Test
-    public void duplicateReadsOutOfOrder() throws Exception {
-        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "duplicated_reads_out_of_order.sam"));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertFalse(results.isEmpty());
-        Assert.assertEquals(results.get(SAMValidationError.Type.MATES_ARE_SAME_END.getHistogramString()).getValue(), 2.0);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/cram/CRAIIndexTest.java b/src/tests/java/htsjdk/samtools/cram/CRAIIndexTest.java
deleted file mode 100644
index ac7608c..0000000
--- a/src/tests/java/htsjdk/samtools/cram/CRAIIndexTest.java
+++ /dev/null
@@ -1,213 +0,0 @@
-package htsjdk.samtools.cram;
-
-import htsjdk.samtools.BAMFileSpan;
-import htsjdk.samtools.DiskBasedBAMFileIndex;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.seekablestream.SeekableBufferedStream;
-import htsjdk.samtools.seekablestream.SeekableFileStream;
-import htsjdk.samtools.seekablestream.SeekableMemoryStream;
-import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.IOUtil;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.function.BiFunction;
-import java.util.zip.GZIPOutputStream;
-
-/**
- * Created by vadim on 25/08/2015.
- */
-public class CRAIIndexTest {
-
-    @Test
-    public void testFind() throws IOException, CloneNotSupportedException {
-        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
-
-        final int sequenceId = 1;
-        CRAIEntry e = new CRAIEntry();
-        e.sequenceId = sequenceId;
-        e.alignmentStart = 1;
-        e.alignmentSpan = 1;
-        e.containerStartOffset = 1;
-        e.sliceOffset = 1;
-        e.sliceSize = 0;
-        index.add(e);
-
-        e = e.clone();
-        e.alignmentStart = 2;
-        e.containerStartOffset = 2;
-        index.add(e);
-
-        e = e.clone();
-        e.alignmentStart = 3;
-        e.containerStartOffset = 3;
-        index.add(e);
-
-        Assert.assertFalse(allFoundEntriesIntersectQueryInFind(index, sequenceId, 1, 0));
-
-        Assert.assertTrue(allFoundEntriesIntersectQueryInFind(index, sequenceId, 1, 1));
-        Assert.assertTrue(allFoundEntriesIntersectQueryInFind(index, sequenceId, 1, 2));
-        Assert.assertTrue(allFoundEntriesIntersectQueryInFind(index, sequenceId, 2, 1));
-        Assert.assertTrue(allFoundEntriesIntersectQueryInFind(index, sequenceId, 1, 3));
-
-        final int nonExistentSequenceId = 2;
-        Assert.assertFalse(allFoundEntriesIntersectQueryInFind(index, nonExistentSequenceId, 2, 1));
-        // a query starting beyond all entries:
-        Assert.assertFalse(allFoundEntriesIntersectQueryInFind(index, sequenceId, 4, 1));
-    }
-
-    private boolean allFoundEntriesIntersectQueryInFind(final List<CRAIEntry> index, final int sequenceId, final int start, final int span) {
-        int foundCount = 0;
-        for (final CRAIEntry found : CRAIIndex.find(index, sequenceId, start, span)) {
-            foundCount++;
-            Assert.assertEquals(found.sequenceId, sequenceId);
-            boolean intersects = false;
-            for (int pos = Math.min(found.alignmentStart, start); pos <= Math.max(found.alignmentStart + found.alignmentSpan, start + span); pos++) {
-                if (pos >= found.alignmentStart && pos >= start &&
-                        pos <= found.alignmentStart + found.alignmentSpan && pos <= start + span) {
-                    intersects = true;
-                    break;
-                }
-            }
-            if (!intersects) {
-                return false;
-            }
-        }
-        return foundCount > 0;
-    }
-
-    @Test(expectedExceptions = NullPointerException.class)
-    public void testCraiRequiresDictionary() throws IOException {
-        try (final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-             final GZIPOutputStream gos = new GZIPOutputStream(baos);
-             final BufferedInputStream bis = new BufferedInputStream(new ByteArrayInputStream(baos.toByteArray()))) {
-            CRAIIndex.openCraiFileAsBaiStream(bis, null);
-        }
-    }
-
-    @Test
-    public void testCraiInMemory() throws IOException {
-        doCRAITest(this::getBaiStreamFromMemory);
-    }
-
-    @Test
-    public void testCraiFromFile() throws IOException {
-        doCRAITest(this::getBaiStreamFromFile);
-    }
-
-    private void doCRAITest(BiFunction<SAMSequenceDictionary, List<CRAIEntry>, SeekableStream> getBaiStreamForIndex) throws IOException {
-        final ArrayList<CRAIEntry> index = new ArrayList<CRAIEntry>();
-        final CRAIEntry entry = new CRAIEntry();
-        entry.sequenceId = 0;
-        entry.alignmentStart = 1;
-        entry.alignmentSpan = 2;
-        entry.sliceOffset = 3;
-        entry.sliceSize = 4;
-        entry.containerStartOffset = 5;
-        index.add(entry);
-
-        final SAMSequenceDictionary dictionary = new SAMSequenceDictionary();
-        dictionary.addSequence(new SAMSequenceRecord("1", 100));
-
-        final SeekableStream baiStream = getBaiStreamForIndex.apply(dictionary, index);
-
-        final DiskBasedBAMFileIndex bamIndex = new DiskBasedBAMFileIndex(baiStream, dictionary);
-        final BAMFileSpan span = bamIndex.getSpanOverlapping(entry.sequenceId, entry.alignmentStart, entry.alignmentStart);
-        Assert.assertNotNull(span);
-        final long[] coordinateArray = span.toCoordinateArray();
-        Assert.assertEquals(coordinateArray.length, 2);
-        Assert.assertEquals(coordinateArray[0] >> 16, entry.containerStartOffset);
-        Assert.assertEquals(coordinateArray[1] & 0xFFFF, 1);
-    }
-
-    public SeekableStream getBaiStreamFromMemory(SAMSequenceDictionary dictionary, final List<CRAIEntry> index) {
-        try {
-            ByteArrayOutputStream baos = new ByteArrayOutputStream();
-            final GZIPOutputStream gos = new GZIPOutputStream(baos);
-            CRAIIndex.writeIndex(gos, index);
-            gos.close();
-            final SeekableStream baiStream = CRAIIndex.openCraiFileAsBaiStream(new ByteArrayInputStream(baos.toByteArray()), dictionary);
-            Assert.assertNotNull(baiStream);
-            return baiStream;
-        }
-        catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    private SeekableStream getBaiStreamFromFile(SAMSequenceDictionary dictionary, final List<CRAIEntry> index) {
-        try {
-            final File file = File.createTempFile("test", ".crai");
-            file.deleteOnExit();
-            final FileOutputStream fos = new FileOutputStream(file);
-            final GZIPOutputStream gos = new GZIPOutputStream(fos);
-            CRAIIndex.writeIndex(gos, index);
-            gos.close();
-            final SeekableStream baiStream = CRAIIndex.openCraiFileAsBaiStream(new SeekableBufferedStream(new SeekableFileStream(file)), dictionary);
-            Assert.assertNotNull(baiStream);
-            return baiStream;
-        }
-        catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    @Test
-    public void testGetLeftmost() throws CloneNotSupportedException {
-        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
-        Assert.assertNull(CRAIIndex.getLeftmost(index));
-
-        final CRAIEntry e1 = new CRAIEntry();
-        e1.sequenceId = 1;
-        e1.alignmentStart = 2;
-        e1.alignmentSpan = 3;
-        e1.containerStartOffset = 4;
-        e1.sliceOffset = 5;
-        e1.sliceSize = 6;
-        index.add(e1);
-        // trivial case of single entry in index:
-        Assert.assertEquals(e1, CRAIIndex.getLeftmost(index));
-
-        final CRAIEntry e2 = e1.clone();
-        e2.alignmentStart = e1.alignmentStart + 1;
-        index.add(e2);
-        Assert.assertEquals(e1, CRAIIndex.getLeftmost(index));
-    }
-
-    @Test
-    public void testFindLastAlignedEntry() {
-        final List<CRAIEntry> index = new ArrayList<CRAIEntry>();
-        Assert.assertEquals(-1, CRAIIndex.findLastAlignedEntry(index));
-
-        // Scan all allowed combinations of 10 mapped/unmapped entries and assert the found last aligned entry:
-        final int indexSize = 10;
-        for (int lastAligned = 0; lastAligned < indexSize; lastAligned++) {
-            index.clear();
-            for (int i = 0; i < indexSize; i++) {
-                final CRAIEntry e = new CRAIEntry();
-
-                e.sequenceId = (i <= lastAligned ? 0 : -1);
-                e.alignmentStart = i;
-                index.add(e);
-            }
-            // check expectations are correct before calling findLastAlignedEntry method:
-            Assert.assertTrue(index.get(lastAligned).sequenceId != -1);
-            if (lastAligned < index.size() - 1) {
-                Assert.assertTrue(index.get(lastAligned + 1).sequenceId == -1);
-            }
-            // assert the the found value matches the expectation:
-            Assert.assertEquals(CRAIIndex.findLastAlignedEntry(index), lastAligned);
-        }
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/fastq/FastqWriterTest.java b/src/tests/java/htsjdk/samtools/fastq/FastqWriterTest.java
deleted file mode 100644
index 9610d02..0000000
--- a/src/tests/java/htsjdk/samtools/fastq/FastqWriterTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * The MIT License
- *
- * Pierre Lindenbaum PhD
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.fastq;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import htsjdk.samtools.util.TestUtil;
-
-import java.io.File;
-import java.util.ArrayList;
-
-/**
- * test fastq
- */
-public class FastqWriterTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools/util/QualityEncodingDetectorTest");
-
-    @DataProvider(name = "fastqsource")
-    public Object[][] createTestData() {
-        return new Object[][]{
-                {"solexa_full_range_as_solexa.fastq"},
-                {"5k-30BB2AAXX.3.aligned.sam.fastq"}
-        };
-    }
-
-    @Test(dataProvider = "fastqsource")
-    public void testReadReadWriteFastq(final String basename) throws Exception {
-        final File tmpFile = File.createTempFile("test.", ".fastq");
-        tmpFile.deleteOnExit();
-        final FastqReader fastqReader = new FastqReader(new File(TEST_DATA_DIR,basename));
-        final FastqWriterFactory writerFactory = new FastqWriterFactory();
-        final FastqWriter fastqWriter = writerFactory.newWriter(tmpFile);
-        for(final FastqRecord rec: fastqReader) fastqWriter.write(rec);
-        fastqWriter.close();
-        fastqReader.close();
-    }
-    
-    @Test(dataProvider = "fastqsource")
-    public void testFastqSerialize(final String basename) throws Exception {
-        //write 
-        final ArrayList<FastqRecord> records = new ArrayList<>();
-        final FastqReader fastqReader = new FastqReader(new File(TEST_DATA_DIR,basename));
-        for(final FastqRecord rec: fastqReader) {
-            records.add(rec);
-            if(records.size()>100) break;
-        }
-        fastqReader.close();
-        Assert.assertEquals(TestUtil.serializeAndDeserialize(records),records);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java b/src/tests/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java
deleted file mode 100644
index 7fc5fd2..0000000
--- a/src/tests/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2015 Pierre Lindenbaum @yokofakun Institut du Thorax - Nantes - France
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.filter;
-
-import htsjdk.samtools.SAMRecordIterator;
-import htsjdk.samtools.SamReader;
-import htsjdk.samtools.SamReaderFactory;
-import htsjdk.samtools.util.CloserUtil;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * @author Pierre Lindenbaum PhD Institut du Thorax - INSERM - Nantes - France
- */
-
-public class JavascriptSamRecordFilterTest {
-    final File testDir = new File("./testdata/htsjdk/samtools");
-
-    @DataProvider
-    public Object[][] jsData() {
-        return new Object[][] { { "unsorted.sam", "samFilter01.js", 8 }, { "unsorted.sam", "samFilter02.js", 10 }, };
-    }
-
-    @Test(dataProvider = "jsData")
-    public void testJavascriptFilters(final String samFile, final String javascriptFile, final int expectCount) {
-        final SamReaderFactory srf = SamReaderFactory.makeDefault();
-        final SamReader samReader = srf.open(new File(testDir, samFile));
-        final JavascriptSamRecordFilter filter;
-        try {
-            filter = new JavascriptSamRecordFilter(new File(testDir, javascriptFile),
-                    samReader.getFileHeader());    
-        } catch (IOException err) {
-            Assert.fail("Cannot read script",err);
-            return;
-        }
-        final SAMRecordIterator iter = samReader.iterator();
-        int count = 0;
-        while (iter.hasNext()) {
-            if (filter.filterOut(iter.next())) {
-                continue;
-            }
-            ++count;
-        }
-        iter.close();
-        CloserUtil.close(samReader);
-        Assert.assertEquals(count, expectCount, "Expected number of reads " + expectCount + " but got " + count);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/liftover/LiftOverTest.java b/src/tests/java/htsjdk/samtools/liftover/LiftOverTest.java
deleted file mode 100644
index 92599ba..0000000
--- a/src/tests/java/htsjdk/samtools/liftover/LiftOverTest.java
+++ /dev/null
@@ -1,458 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.liftover;
-
-import htsjdk.samtools.util.Interval;
-import htsjdk.samtools.util.OverlapDetector;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.PrintWriter;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-/**
- * @author alecw at broadinstitute.org
- */
-public class LiftOverTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools/liftover");
-    private static final File CHAIN_FILE = new File(TEST_DATA_DIR, "hg18ToHg19.over.chain");
-
-    private LiftOver liftOver;
-
-    @BeforeClass
-    public void initLiftOver() {
-        liftOver = new LiftOver(CHAIN_FILE);
-    }
-
-    @Test(dataProvider = "testIntervals")
-    public void testBasic(final Interval in, final Interval expected) {
-        final Interval out = liftOver.liftOver(in);
-        Assert.assertEquals(out, expected);
-
-    }
-
-    @DataProvider(name = "testIntervals")
-    public Object[][] makeTestIntervals() {
-        return new Object[][] {
-                {new Interval("chr3", 50911035, 50911051), null},
-                {new Interval("chr1", 16776377, 16776452),    new Interval("chr1", 16903790, 16903865)},
-                {new Interval("chr2", 30575990, 30576065),    new Interval("chr2", 30722486, 30722561)},
-                {new Interval("chr3", 12157217, 12157292),    new Interval("chr3", 12182217, 12182292)},
-                {new Interval("chr4", 12503121, 12503196),    new Interval("chr4", 12894023, 12894098)},
-                {new Interval("chr5", 13970930, 13971005),    new Interval("chr5", 13917930, 13918005)},
-                {new Interval("chr6", 13838774, 13838849),    new Interval("chr6", 13730795, 13730870)},
-                {new Interval("chr7", 23978336, 23978411),    new Interval("chr7", 24011811, 24011886)},
-                {new Interval("chr8", 13337368, 13337443),    new Interval("chr8", 13292997, 13293072)},
-                {new Interval("chr9", 35059282, 35059357),    new Interval("chr9", 35069282, 35069357)},
-                {new Interval("chr10", 7893794, 7893869),     new Interval("chr10", 7853788, 7853863)},
-                {new Interval("chr11", 17365784, 17365859),   new Interval("chr11", 17409208, 17409283)},
-                {new Interval("chr12", 4530193, 4530268),     new Interval("chr12", 4659932, 4660007)},
-                {new Interval("chr13", 29398707, 29398782),   new Interval("chr13", 30500707, 30500782)},
-                {new Interval("chr14", 22955252, 22955327),   new Interval("chr14", 23885412, 23885487)},
-                {new Interval("chr15", 27477379, 27477454),   new Interval("chr15", 29690087, 29690162)},
-                {new Interval("chr16", 13016380, 13016455),   new Interval("chr16", 13108879, 13108954)},
-                {new Interval("chr17", 28318218, 28318293),   new Interval("chr17", 31294105, 31294180)},
-                {new Interval("chr18", 42778225, 42778300),   new Interval("chr18", 44524227, 44524302)},
-                {new Interval("chr19", 8340119, 8340194),     new Interval("chr19", 8434119, 8434194)},
-                {new Interval("chr20", 39749226, 39749301),   new Interval("chr20", 40315812, 40315887)},
-                {new Interval("chr21", 20945136, 20945211),   new Interval("chr21", 22023265, 22023340)},
-                {new Interval("chr22", 32307031, 32307106),   new Interval("chr22", 33977031, 33977106)},
-                {new Interval("chrX", 34252958, 34253033) ,   new Interval("chrX", 34343037, 34343112)},
-                // Sampling from /seq/references/HybSelOligos/whole_exome_refseq_coding/whole_exome_refseq_coding.targets.interval_list
-                {new Interval("chr1", 58952, 59873),	new Interval("chr1", 69089, 70010)},
-                {new Interval("chr1", 7733844, 7734041),	new Interval("chr1", 7811257, 7811454)},
-                {new Interval("chr1", 16261179, 16261276),	new Interval("chr1", 16388592, 16388689)},
-                {new Interval("chr1", 23634929, 23635110),	new Interval("chr1", 23762342, 23762523)},
-                {new Interval("chr1", 31910910, 31911030),	new Interval("chr1", 32138323, 32138443)},
-                {new Interval("chr1", 39686851, 39687024),	new Interval("chr1", 39914264, 39914437)},
-                {new Interval("chr1", 46434068, 46434185),	new Interval("chr1", 46661481, 46661598)},
-                {new Interval("chr1", 60102890, 60102928),	new Interval("chr1", 60330302, 60330340)},
-                {new Interval("chr1", 84734151, 84734336),	new Interval("chr1", 84961563, 84961748)},
-                {new Interval("chr1", 100529545, 100529650),	new Interval("chr1", 100756957, 100757062)},
-                {new Interval("chr1", 114771320, 114771441),	new Interval("chr1", 114969797, 114969918)},
-                {new Interval("chr1", 148564831, 148564965),	new Interval("chr1", 150298207, 150298341)},
-                {new Interval("chr1", 153293008, 153293090),	new Interval("chr1", 155026384, 155026466)},
-                {new Interval("chr1", 158167550, 158167677),	new Interval("chr1", 159900926, 159901053)},
-                {new Interval("chr1", 169444555, 169444718),	new Interval("chr1", 171177931, 171178094)},
-                {new Interval("chr1", 183535970, 183536100),	new Interval("chr1", 185269347, 185269477)},
-                {new Interval("chr1", 201411300, 201411508),	new Interval("chr1", 203144677, 203144885)},
-                {new Interval("chr1", 212862043, 212862249),	new Interval("chr1", 214795420, 214795626)},
-                {new Interval("chr1", 228992499, 228992560),	new Interval("chr1", 230925876, 230925937)},
-                {new Interval("chr1", 246268191, 246269133),	new Interval("chr1", 248201568, 248202510)},
-                {new Interval("chr2", 25027765, 25027929),	new Interval("chr2", 25174261, 25174425)},
-                {new Interval("chr2", 32572109, 32572240),	new Interval("chr2", 32718605, 32718736)},
-                {new Interval("chr2", 53988959, 53989061),	new Interval("chr2", 54135455, 54135557)},
-                {new Interval("chr2", 71749748, 71749847),	new Interval("chr2", 71896240, 71896339)},
-                {new Interval("chr2", 96059879, 96060011),	new Interval("chr2", 96696152, 96696284)},
-                {new Interval("chr2", 109923614, 109923763),	new Interval("chr2", 110566325, 110566474)},
-                {new Interval("chr2", 130655571, 130655646),	new Interval("chr2", 130939101, 130939176)},
-                {new Interval("chr2", 159228028, 159228205),	new Interval("chr2", 159519782, 159519959)},
-                {new Interval("chr2", 172639236, 172639282),	new Interval("chr2", 172930990, 172931036)},
-                {new Interval("chr2", 189558634, 189558751),	new Interval("chr2", 189850389, 189850506)},
-                {new Interval("chr2", 203547300, 203547466),	new Interval("chr2", 203839055, 203839221)},
-                {new Interval("chr2", 219578985, 219579191),	new Interval("chr2", 219870741, 219870947)},
-                {new Interval("chr2", 232982284, 232982404),	new Interval("chr2", 233274040, 233274160)},
-                {new Interval("chr3", 3114819, 3114976),	new Interval("chr3", 3139819, 3139976)},
-                {new Interval("chr3", 16333337, 16333745),	new Interval("chr3", 16358333, 16358741)},
-                {new Interval("chr3", 40183652, 40183736),	new Interval("chr3", 40208648, 40208732)},
-                {new Interval("chr3", 48601077, 48601227),	new Interval("chr3", 48626073, 48626223)},
-                {new Interval("chr3", 52287255, 52287419),	new Interval("chr3", 52312215, 52312379)},
-                {new Interval("chr3", 63979313, 63979425),	new Interval("chr3", 64004273, 64004385)},
-                {new Interval("chr3", 110234255, 110234364),	new Interval("chr3", 108751565, 108751674)},
-                {new Interval("chr3", 126088466, 126088539),	new Interval("chr3", 124605776, 124605849)},
-                {new Interval("chr3", 137600279, 137600363),	new Interval("chr3", 136117589, 136117673)},
-                {new Interval("chr3", 159845116, 159845200),	new Interval("chr3", 158362422, 158362506)},
-                {new Interval("chr3", 185387877, 185387927),	new Interval("chr3", 183905183, 183905233)},
-                {new Interval("chr3", 199065658, 199065715),	new Interval("chr3", 197581261, 197581318)},
-                {new Interval("chr4", 10152742, 10152765),	new Interval("chr4", 10543644, 10543667)},
-                {new Interval("chr4", 47243396, 47243638),	new Interval("chr4", 47548639, 47548881)},
-                {new Interval("chr4", 72632227, 72632303),	new Interval("chr4", 72413363, 72413439)},
-                {new Interval("chr4", 88942682, 88942736),	new Interval("chr4", 88723658, 88723712)},
-                {new Interval("chr4", 114381088, 114381190),	new Interval("chr4", 114161639, 114161741)},
-                {new Interval("chr4", 151338602, 151338707),	new Interval("chr4", 151119152, 151119257)},
-                {new Interval("chr4", 184429225, 184429390),	new Interval("chr4", 184192231, 184192396)},
-                {new Interval("chr5", 14804176, 14804350),	new Interval("chr5", 14751176, 14751350)},
-                {new Interval("chr5", 43687596, 43687745),	new Interval("chr5", 43651839, 43651988)},
-                {new Interval("chr5", 71651730, 71651806),	new Interval("chr5", 71615974, 71616050)},
-                {new Interval("chr5", 95017504, 95017771),	new Interval("chr5", 94991748, 94992015)},
-                {new Interval("chr5", 128984208, 128984352),	new Interval("chr5", 128956309, 128956453)},
-                {new Interval("chr5", 140033038, 140033159),	new Interval("chr5", 140052854, 140052975)},
-                {new Interval("chr5", 153045976, 153046084),	new Interval("chr5", 153065783, 153065891)},
-                {new Interval("chr5", 176255669, 176255768),	new Interval("chr5", 176323063, 176323162)},
-                {new Interval("chr6", 10810586, 10810710),	new Interval("chr6", 10702600, 10702724)},
-                {new Interval("chr6", 30666289, 30666459),	new Interval("chr6", 30558310, 30558480)},
-                {new Interval("chr6", 33082591, 33082598),	new Interval("chr6", 32974613, 32974620)},
-                {new Interval("chr6", 39940185, 39940263),	new Interval("chr6", 39832207, 39832285)},
-                {new Interval("chr6", 50789726, 50789768),	new Interval("chr6", 50681767, 50681809)},
-                {new Interval("chr6", 79721666, 79721720),	new Interval("chr6", 79664947, 79665001)},
-                {new Interval("chr6", 108336822, 108336934),	new Interval("chr6", 108230129, 108230241)},
-                {new Interval("chr6", 131240935, 131241085),	new Interval("chr6", 131199242, 131199392)},
-                {new Interval("chr6", 151799272, 151799384),	new Interval("chr6", 151757579, 151757691)},
-                {new Interval("chr6", 169897302, 169897445),	new Interval("chr6", 170155377, 170155520)},
-                {new Interval("chr7", 17341792, 17341937),	new Interval("chr7", 17375267, 17375412)},
-                {new Interval("chr7", 38875269, 38875380),	new Interval("chr7", 38908744, 38908855)},
-                {new Interval("chr7", 72563000, 72563120),	new Interval("chr7", 72925064, 72925184)},
-                {new Interval("chr7", 89839403, 89839480),	new Interval("chr7", 90001467, 90001544)},
-                {new Interval("chr7", 100063781, 100063867),	new Interval("chr7", 100225845, 100225931)},
-                {new Interval("chr7", 111889559, 111889671),	new Interval("chr7", 112102323, 112102435)},
-                {new Interval("chr7", 133900771, 133900840),	new Interval("chr7", 134250231, 134250300)},
-                {new Interval("chr7", 149124615, 149124769),	new Interval("chr7", 149493682, 149493836)},
-                {new Interval("chr8", 9647462, 9647548),	new Interval("chr8", 9610052, 9610138)},
-                {new Interval("chr8", 27203588, 27203614),	new Interval("chr8", 27147671, 27147697)},
-                {new Interval("chr8", 43171970, 43172044),	new Interval("chr8", 43052813, 43052887)},
-                {new Interval("chr8", 76088775, 76088894),	new Interval("chr8", 75926220, 75926339)},
-                {new Interval("chr8", 103641854, 103642290),	new Interval("chr8", 103572678, 103573114)},
-                {new Interval("chr8", 133913660, 133913828),	new Interval("chr8", 133844478, 133844646)},
-                {new Interval("chr8", 145697031, 145697164),	new Interval("chr8", 145726223, 145726356)},
-                {new Interval("chr9", 26985517, 26985849),	new Interval("chr9", 26995517, 26995849)},
-                {new Interval("chr9", 68496721, 68496793),	new Interval("chr9", 69206901, 69206973)},
-                {new Interval("chr9", 94051959, 94052046),	new Interval("chr9", 95012138, 95012225)},
-                {new Interval("chr9", 110750285, 110750337),	new Interval("chr9", 111710464, 111710516)},
-                {new Interval("chr9", 124416836, 124417782),	new Interval("chr9", 125377015, 125377961)},
-                {new Interval("chr9", 130939690, 130939794),	new Interval("chr9", 131899869, 131899973)},
-                {new Interval("chr9", 138395593, 138395667),	new Interval("chr9", 139275772, 139275846)},
-                {new Interval("chr10", 6048112, 6048310),	new Interval("chr10", 6008106, 6008304)},
-                {new Interval("chr10", 26599573, 26599693),	new Interval("chr10", 26559567, 26559687)},
-                {new Interval("chr10", 51507890, 51507920),	new Interval("chr10", 51837884, 51837914)},
-                {new Interval("chr10", 74343070, 74343234),	new Interval("chr10", 74673064, 74673228)},
-                {new Interval("chr10", 93604764, 93604865),	new Interval("chr10", 93614784, 93614885)},
-                {new Interval("chr10", 101985412, 101985513),	new Interval("chr10", 101995422, 101995523)},
-                {new Interval("chr10", 115325644, 115325755),	new Interval("chr10", 115335654, 115335765)},
-                {new Interval("chr10", 129062310, 129062470),	new Interval("chr10", 129172320, 129172480)},
-                {new Interval("chr11", 1904274, 1904289),	new Interval("chr11", 1947698, 1947713)},
-                {new Interval("chr11", 11928485, 11928607),	new Interval("chr11", 11971909, 11972031)},
-                {new Interval("chr11", 33326642, 33326942),	new Interval("chr11", 33370066, 33370366)},
-                {new Interval("chr11", 55554469, 55555445),	new Interval("chr11", 55797893, 55798869)},
-                {new Interval("chr11", 62505888, 62506060),	new Interval("chr11", 62749312, 62749484)},
-                {new Interval("chr11", 65488560, 65488619),	new Interval("chr11", 65731984, 65732043)},
-                {new Interval("chr11", 71618353, 71618446),	new Interval("chr11", 71940705, 71940798)},
-                {new Interval("chr11", 89174516, 89174750),	new Interval("chr11", 89534868, 89535102)},
-                {new Interval("chr11", 111349955, 111350190),	new Interval("chr11", 111844745, 111844980)},
-                {new Interval("chr11", 120195672, 120195841),	new Interval("chr11", 120690462, 120690631)},
-                {new Interval("chr12", 1089617, 1089776),	new Interval("chr12", 1219356, 1219515)},
-                {new Interval("chr12", 8894021, 8894139),	new Interval("chr12", 9002754, 9002872)},
-                {new Interval("chr12", 26455518, 26455614),	new Interval("chr12", 26564251, 26564347)},
-                {new Interval("chr12", 46663731, 46663788),	new Interval("chr12", 48377464, 48377521)},
-                {new Interval("chr12", 51502394, 51502432),	new Interval("chr12", 53216127, 53216165)},
-                {new Interval("chr12", 55603883, 55604103),	new Interval("chr12", 57317616, 57317836)},
-                {new Interval("chr12", 69218200, 69218280),	new Interval("chr12", 70931933, 70932013)},
-                {new Interval("chr12", 97543837, 97544677),	new Interval("chr12", 99019706, 99020546)},
-                {new Interval("chr12", 108438951, 108439074),	new Interval("chr12", 109954568, 109954691)},
-                {new Interval("chr12", 119021215, 119021343),	new Interval("chr12", 120536832, 120536960)},
-                {new Interval("chr12", 127849755, 127849917),	new Interval("chr12", 129283802, 129283964)},
-                {new Interval("chr13", 28900978, 28901035),	new Interval("chr13", 30002978, 30003035)},
-                {new Interval("chr13", 48646570, 48646698),	new Interval("chr13", 49748569, 49748697)},
-                {new Interval("chr13", 98989699, 98989814),	new Interval("chr13", 100191698, 100191813)},
-                {new Interval("chr14", 20929460, 20929643),	new Interval("chr14", 21859620, 21859803)},
-                {new Interval("chr14", 33338689, 33340068),	new Interval("chr14", 34268938, 34270317)},
-                {new Interval("chr14", 55217155, 55217163),	new Interval("chr14", 56147402, 56147410)},
-                {new Interval("chr14", 71260115, 71260358),	new Interval("chr14", 72190362, 72190605)},
-                {new Interval("chr14", 89806293, 89806451),	new Interval("chr14", 90736540, 90736698)},
-                {new Interval("chr14", 102548185, 102548280),	new Interval("chr14", 103478432, 103478527)},
-                {new Interval("chr15", 31917122, 31918453),	new Interval("chr15", 34129830, 34131161)},
-                {new Interval("chr15", 40481129, 40481302),	new Interval("chr15", 42693837, 42694010)},
-                {new Interval("chr15", 48649374, 48649484),	new Interval("chr15", 50862082, 50862192)},
-                {new Interval("chr15", 61768839, 61768953),	new Interval("chr15", 63981786, 63981900)},
-                {new Interval("chr15", 72115399, 72115456),	new Interval("chr15", 74328346, 74328403)},
-                {new Interval("chr15", 83031858, 83032011),	new Interval("chr15", 85230854, 85231007)},
-                {new Interval("chr16", 79709, 79902),	new Interval("chr16", 139709, 139902)},
-                {new Interval("chr16", 2285590, 2285744),	new Interval("chr16", 2345589, 2345743)},
-                {new Interval("chr16", 14872977, 14873044),	new Interval("chr16", 14965476, 14965543)},
-                {new Interval("chr16", 23611004, 23611155),	new Interval("chr16", 23703503, 23703654)},
-                {new Interval("chr16", 31004784, 31005007),	new Interval("chr16", 31097283, 31097506)},
-                {new Interval("chr16", 55745701, 55745922),	new Interval("chr16", 57188200, 57188421)},
-                {new Interval("chr16", 66647766, 66647830),	new Interval("chr16", 68090265, 68090329)},
-                {new Interval("chr16", 79224415, 79224636),	new Interval("chr16", 80666914, 80667135)},
-                {new Interval("chr17", 1320663, 1320735),	new Interval("chr17", 1373913, 1373985)},
-                {new Interval("chr17", 5304981, 5305155),	new Interval("chr17", 5364257, 5364431)},
-                {new Interval("chr17", 8588568, 8588654),	new Interval("chr17", 8647843, 8647929)},
-                {new Interval("chr17", 18192362, 18192481),	new Interval("chr17", 18251637, 18251756)},
-                {new Interval("chr17", 26514328, 26514522),	new Interval("chr17", 29490202, 29490396)},
-                {new Interval("chr17", 35069238, 35069334),	new Interval("chr17", 37815712, 37815808)},
-                {new Interval("chr17", 38377148, 38377241),	new Interval("chr17", 41123622, 41123715)},
-                {new Interval("chr17", 44472316, 44472454),	new Interval("chr17", 47117317, 47117455)},
-                {new Interval("chr17", 55482984, 55483122),	new Interval("chr17", 58128202, 58128340)},
-                {new Interval("chr17", 64595087, 64595211),	new Interval("chr17", 67083492, 67083616)},
-                {new Interval("chr17", 72814816, 72814876),	new Interval("chr17", 75303221, 75303281)},
-                {new Interval("chr17", 78167687, 78167812),	new Interval("chr17", 80574398, 80574523)},
-                {new Interval("chr18", 19653801, 19653961),	new Interval("chr18", 21399803, 21399963)},
-                {new Interval("chr18", 46766985, 46767455),	new Interval("chr18", 48512987, 48513457)},
-                {new Interval("chr19", 822924, 823120),	new Interval("chr19", 871924, 872120)},
-                {new Interval("chr19", 4200223, 4200327),	new Interval("chr19", 4249223, 4249327)},
-                {new Interval("chr19", 8094666, 8094894),	new Interval("chr19", 8188666, 8188894)},
-                {new Interval("chr19", 11657040, 11657607),	new Interval("chr19", 11796040, 11796607)},
-                {new Interval("chr19", 16298665, 16298844),	new Interval("chr19", 16437665, 16437844)},
-                {new Interval("chr19", 19650533, 19650597),	new Interval("chr19", 19789533, 19789597)},
-                {new Interval("chr19", 42008351, 42008363),	new Interval("chr19", 37316511, 37316523)},
-                {new Interval("chr19", 46446486, 46446567),	new Interval("chr19", 41754646, 41754727)},
-                {new Interval("chr19", 51212087, 51212169),	new Interval("chr19", 46520247, 46520329)},
-                {new Interval("chr19", 55052042, 55052201),	new Interval("chr19", 50360230, 50360389)},
-                {new Interval("chr19", 60200495, 60200669),	new Interval("chr19", 55508683, 55508857)},
-                {new Interval("chr20", 3244380, 3244434),	new Interval("chr20", 3296380, 3296434)},
-                {new Interval("chr20", 25145282, 25145374),	new Interval("chr20", 25197282, 25197374)},
-                {new Interval("chr20", 35182714, 35182855),	new Interval("chr20", 35749300, 35749441)},
-                {new Interval("chr20", 46797751, 46797826),	new Interval("chr20", 47364344, 47364419)},
-                {new Interval("chr20", 61546454, 61546633),	new Interval("chr20", 62076010, 62076189)},
-                {new Interval("chr21", 36666540, 36666701),	new Interval("chr21", 37744670, 37744831)},
-                {new Interval("chr21", 46450176, 46450285),	new Interval("chr21", 47625748, 47625857)},
-                {new Interval("chr22", 22890366, 22890533),	new Interval("chr22", 24560366, 24560533)},
-                {new Interval("chr22", 32487356, 32487465),	new Interval("chr22", 34157356, 34157465)},
-                {new Interval("chr22", 40469028, 40469146),	new Interval("chr22", 42139082, 42139200)},
-                {new Interval("chr22", 49365651, 49365713),	new Interval("chr22", 51018785, 51018847)},
-                {new Interval("chrX", 24135748, 24135895),	new Interval("chrX", 24225827, 24225974)},
-                {new Interval("chrX", 48708293, 48708459),	new Interval("chrX", 48823349, 48823515)},
-                {new Interval("chrX", 69406673, 69406721),	new Interval("chrX", 69489948, 69489996)},
-                {new Interval("chrX", 101459444, 101459531),	new Interval("chrX", 101572788, 101572875)},
-                {new Interval("chrX", 128442357, 128442474),	new Interval("chrX", 128614676, 128614793)},
-                {new Interval("chrX", 152701873, 152701902),	new Interval("chrX", 153048679, 153048708)},
-                {new Interval("chrY", 2715028, 2715646),	new Interval("chrY", 2655028, 2655646)},
-                {new Interval("chrY", 26179988, 26180064),	new Interval("chrY", 27770600, 27770676)},
-                // Some intervals that are flipped in the new genome
-                {new Interval("chr1", 2479704, 2479833, false, "target_549"),        new Interval("chr1", 2494585, 2494714, true, "target_549")},
-                {new Interval("chr1", 2480081, 2480116, false, "target_550"),        new Interval("chr1", 2494302, 2494337, true, "target_550")},
-                {new Interval("chr1", 2481162, 2481308, false, "target_551"),        new Interval("chr1", 2493110, 2493256, true, "target_551")},
-                {new Interval("chr1", 2482263, 2482357, false, "target_552"),        new Interval("chr1", 2492061, 2492155, true, "target_552")},
-                {new Interval("chr1", 2482999, 2483158, false, "target_553"),        new Interval("chr1", 2491260, 2491419, true, "target_553")},
-                {new Interval("chr1", 2484509, 2484638, false, "target_554"),        new Interval("chr1", 2489780, 2489909, true, "target_554")},
-                {new Interval("chr1", 2485143, 2485255, false, "target_555"),        new Interval("chr1", 2489163, 2489275, true, "target_555")},
-                {new Interval("chr1", 2486244, 2486316, false, "target_556"),        new Interval("chr1", 2488102, 2488174, true, "target_556")},
-                {new Interval("chr2", 110735471, 110735558, false, "target_101982"), new Interval("chr2", 110585640, 110585727, true, "target_101982")},
-                {new Interval("chr2", 110735648, 110735831, false, "target_101983"), new Interval("chr2", 110585367, 110585550, true, "target_101983")},
-                {new Interval("chr2", 110736772, 110736922, false, "target_101984"), new Interval("chr2", 110584276, 110584426, true, "target_101984")},
-                {new Interval("chr2", 110737181, 110737322, false, "target_101985"), new Interval("chr2", 110583876, 110584017, true, "target_101985")},
-                {new Interval("chr2", 110737585, 110737747, false, "target_101986"), new Interval("chr2", 110583451, 110583613, true, "target_101986")},
-                {new Interval("chr2", 110738666, 110738793, false, "target_101987"), new Interval("chr2", 110582405, 110582532, true, "target_101987")},
-                {new Interval("chr2", 110738957, 110739136, false, "target_101988"), new Interval("chr2", 110582062, 110582241, true, "target_101988")},
-                {new Interval("chr2", 110739216, 110739401, false, "target_101989"), new Interval("chr2", 110581797, 110581982, true, "target_101989")},
-                {new Interval("chr2", 110741555, 110741768, false, "target_101990"), new Interval("chr2", 110579480, 110579693, true, "target_101990")},
-                {new Interval("chr2", 110743887, 110743978, false, "target_101991"), new Interval("chr2", 110577271, 110577362, true, "target_101991")},
-                {new Interval("chr2", 110750021, 110750220, false, "target_101992"), new Interval("chr2", 110571035, 110571234, true, "target_101992")},
-                {new Interval("chr2", 110754786, 110754935, false, "target_101993"), new Interval("chr2", 110566325, 110566474, true, "target_101993")},
-                {new Interval("chr2", 110755277, 110755511, false, "target_101994"), new Interval("chr2", 110565749, 110565983, true, "target_101994")},
-                {new Interval("chr2", 110759547, 110759703, false, "target_101995"), new Interval("chr2", 110561554, 110561710, true, "target_101995")},
-                {new Interval("chr2", 110760135, 110760250, false, "target_101996"), new Interval("chr2", 110561007, 110561122, true, "target_101996")},
-                {new Interval("chr2", 110761828, 110761899, false, "target_101997"), new Interval("chr2", 110559358, 110559429, true, "target_101997")},
-                {new Interval("chr2", 110769521, 110769596, false, "target_101998"), new Interval("chr2", 110552041, 110552116, true, "target_101998")},
-                {new Interval("chr2", 111012182, 111012298, false, "target_101999"), new Interval("chr2", 108484181, 108484297, true, "target_101999")},
-                {new Interval("chr13", 113547048, 113547139, false, "target_51005"), new Interval("chr13", 114566804, 114566895, true, "target_51005")},
-                {new Interval("chr13", 113547227, 113547397, false, "target_51006"), new Interval("chr13", 114566546, 114566716, true, "target_51006")},
-                {new Interval("chr13", 113562918, 113562946, false, "target_51007"), new Interval("chr13", 114550997, 114551025, true, "target_51007")},
-                {new Interval("chr13", 113564379, 113564445, false, "target_51008"), new Interval("chr13", 114549498, 114549564, true, "target_51008")},
-                {new Interval("chr13", 113571118, 113571244, false, "target_51009"), new Interval("chr13", 114542699, 114542825, true, "target_51009")},
-                {new Interval("chr13", 113572777, 113572903, false, "target_51010"), new Interval("chr13", 114541040, 114541166, true, "target_51010")},
-                {new Interval("chr13", 113575333, 113575459, false, "target_51011"), new Interval("chr13", 114538484, 114538610, true, "target_51011")},
-                {new Interval("chr13", 113576296, 113576421, false, "target_51012"), new Interval("chr13", 114537522, 114537647, true, "target_51012")},
-                {new Interval("chr13", 113578216, 113578338, false, "target_51013"), new Interval("chr13", 114535605, 114535727, true, "target_51013")},
-                {new Interval("chr13", 113578480, 113578673, false, "target_51014"), new Interval("chr13", 114535270, 114535463, true, "target_51014")},
-                {new Interval("chr13", 113582257, 113582425, false, "target_51015"), new Interval("chr13", 114531518, 114531686, true, "target_51015")},
-                {new Interval("chr13", 113583804, 113583976, false, "target_51016"), new Interval("chr13", 114529967, 114530139, true, "target_51016")},
-                {new Interval("chr13", 113587418, 113587597, false, "target_51017"), new Interval("chr13", 114526346, 114526525, true, "target_51017")},
-                {new Interval("chr13", 113588782, 113589014, false, "target_51018"), new Interval("chr13", 114524929, 114525161, true, "target_51018")},
-                {new Interval("chr13", 113589950, 113590108, false, "target_51019"), new Interval("chr13", 114523835, 114523993, true, "target_51019")},
-                {new Interval("chr13", 113599065, 113599236, false, "target_51020"), new Interval("chr13", 114514707, 114514878, true, "target_51020")},
-                {new Interval("chr13", 113605940, 113606087, false, "target_51021"), new Interval("chr13", 114507856, 114508003, true, "target_51021")},
-                {new Interval("chr13", 113609156, 113609319, false, "target_51022"), new Interval("chr13", 114504624, 114504787, true, "target_51022")},
-                {new Interval("chr13", 113610056, 113610145, false, "target_51023"), new Interval("chr13", 114503798, 114503887, true, "target_51023")},
-                {new Interval("chr13", 113611549, 113611633, false, "target_51024"), new Interval("chr13", 114502310, 114502394, true, "target_51024")},
-                {new Interval("chr13", 113615731, 113615824, false, "target_51025"), new Interval("chr13", 114498119, 114498212, true, "target_51025")},
-                {new Interval("chr13", 113641808, 113641874, false, "target_51026"), new Interval("chr13", 114472069, 114472135, true, "target_51026")},
-                {new Interval("chr13", 113644711, 113644857, false, "target_51027"), new Interval("chr13", 114469086, 114469232, true, "target_51027")},
-                {new Interval("chr13", 113651799, 113651848, false, "target_51028"), new Interval("chr13", 114462241, 114462290, true, "target_51028")},
-                {new Interval("chr17", 33541604, 33542176, false, "target_76102"),   new Interval("chr17", 36294030, 36294602, true, "target_76102")},
-                {new Interval("chr17", 33543154, 33543310, false, "target_76103"),   new Interval("chr17", 36292896, 36293052, true, "target_76103")},
-                {new Interval("chr17", 33543677, 33543780, false, "target_76104"),   new Interval("chr17", 36292426, 36292529, true, "target_76104")},
-                {new Interval("chr17", 33544240, 33544309, false, "target_76105"),   new Interval("chr17", 36291897, 36291966, true, "target_76105")},
-                {new Interval("chr17", 33544690, 33544788, false, "target_76106"),   new Interval("chr17", 36291418, 36291516, true, "target_76106")},
-                {new Interval("chr17", 33545498, 33545622, false, "target_76107"),   new Interval("chr17", 36290584, 36290708, true, "target_76107")},
-                {new Interval("chr17", 33547465, 33547578, false, "target_76109"),   new Interval("chr17", 36288629, 36288742, true, "target_76109")},
-                {new Interval("chr17", 33547904, 33548015, false, "target_76110"),   new Interval("chr17", 36288192, 36288303, true, "target_76110")},
-                {new Interval("chr17", 33548455, 33548539, false, "target_76111"),   new Interval("chr17", 36287668, 36287752, true, "target_76111")},
-                {new Interval("chr17", 33549018, 33549061, false, "target_76112"),   new Interval("chr17", 36287146, 36287189, true, "target_76112")},
-                {new Interval("chr17", 33550341, 33550430, false, "target_76113"),   new Interval("chr17", 36285777, 36285866, true, "target_76113")},
-                {new Interval("chr17", 33550589, 33550664, false, "target_76114"),   new Interval("chr17", 36285543, 36285618, true, "target_76114")},
-                {new Interval("chrX", 148575967, 148576994, false, "target_184692"), new Interval("chrX", 148797411, 148798438, true, "target_184692")},
-                {new Interval("chrX", 148577066, 148577143, false, "target_184693"), new Interval("chrX", 148797262, 148797339, true, "target_184693")},
-                {new Interval("chrX", 148578167, 148578266, false, "target_184694"), new Interval("chrX", 148796139, 148796238, true, "target_184694")},
-                {new Interval("chrX", 148579488, 148579587, false, "target_184695"), new Interval("chrX", 148794818, 148794917, true, "target_184695")},
-                {new Interval("chrX", 148603758, 148603770, false, "target_184696"), new Interval("chrX", 148770634, 148770646, true, "target_184696")},
-                // Some that don't map in hg19
-                {new Interval("chr2", 111013693, 111013832), null},
-                {new Interval("chr3", 14174511, 14175398), null},
-                {new Interval("chr3", 50911035, 50911051), null},
-                {new Interval("chr6", 32071709, 32071869), null},
-                {new Interval("chr6", 32072183, 32072358), null},
-                {new Interval("chr6", 32104446, 32104606), null},
-                {new Interval("chr6", 32104920, 32105095), null},
-                {new Interval("chr7", 101995561, 101995739), null},
-                {new Interval("chr7", 142178782, 142178825), null},
-                {new Interval("chr7", 142179850, 142180013), null},
-                {new Interval("chr7", 142181067, 142181324), null},
-                {new Interval("chr7", 142181720, 142181860), null},
-                {new Interval("chr7", 142182157, 142182313), null},
-                {new Interval("chr15", 19335778, 19336302), null},
-                {new Interval("chr17", 33364376, 33364428), null},
-                {new Interval("chr17", 33546162, 33546214), null},
-                {new Interval("chr17", 33706667, 33706736), null},
-                {new Interval("chr17", 59772721, 59772781), null},
-                {new Interval("chr17", 59779355, 59779421), null},
-                {new Interval("chr17", 59781483, 59781540), null},
-                {new Interval("chr17", 59783488, 59783565), null},
-                {new Interval("chr17", 59784584, 59784615), null},
-                {new Interval("chr17", 59786025, 59786136), null},
-                {new Interval("chr17", 59787203, 59787494), null},
-                {new Interval("chr17", 59791235, 59791514), null},
-                {new Interval("chr17", 59794247, 59794502), null},
-                {new Interval("chr17", 59801884, 59802193), null},
-                {new Interval("chr17", 59804685, 59804982), null},
-                {new Interval("chr17", 59817352, 59817382), null},
-                {new Interval("chr17", 59817465, 59817532), null},
-                {new Interval("chr17", 59875754, 59875812), null},
-                {new Interval("chr17", 59875899, 59875944), null},
-                {new Interval("chr17", 59879183, 59879456), null},
-                {new Interval("chr17", 59883988, 59884276), null},
-                {new Interval("chr17", 59887398, 59887512), null},
-                {new Interval("chrX", 48774611, 48775058), null},
-
-        };
-    }
-
-    @Test(dataProvider = "failingIntervals")
-    public void testDiagnosticLiftover(final Interval fromInterval) {
-        final List<LiftOver.PartialLiftover> partials = liftOver.diagnosticLiftover(fromInterval);
-        System.out.println("Diagnosing " + fromInterval + " (len " + fromInterval.length() + ")");
-        for (final LiftOver.PartialLiftover partial : partials) {
-            System.out.println(partial);
-        }
-    }
-
-    @DataProvider(name = "failingIntervals")
-    public Object[][] makeFailingIntervals() {
-        return new Object[][] {
-                {new Interval("chr3", 50911035, 50911051)},
-                {new Interval("chr2", 111013693, 111013832)},
-                {new Interval("chr3", 14174511, 14175398)},
-                {new Interval("chr3", 50911035, 50911051)},
-                {new Interval("chr6", 32071709, 32071869)},
-                {new Interval("chr6", 32072183, 32072358)},
-                {new Interval("chr6", 32104446, 32104606)},
-                {new Interval("chr6", 32104920, 32105095)},
-                {new Interval("chr7", 101995561, 101995739)},
-                {new Interval("chr7", 142178782, 142178825)},
-                {new Interval("chr7", 142179850, 142180013)},
-                {new Interval("chr7", 142181067, 142181324)},
-                {new Interval("chr7", 142181720, 142181860)},
-                {new Interval("chr7", 142182157, 142182313)},
-                {new Interval("chr15", 19335778, 19336302)},
-                {new Interval("chr17", 33364376, 33364428)},
-                {new Interval("chr17", 33546162, 33546214)},
-                {new Interval("chr17", 33706667, 33706736)},
-                {new Interval("chr17", 59772721, 59772781)},
-                {new Interval("chr17", 59779355, 59779421)},
-                {new Interval("chr17", 59781483, 59781540)},
-                {new Interval("chr17", 59783488, 59783565)},
-                {new Interval("chr17", 59784584, 59784615)},
-                {new Interval("chr17", 59786025, 59786136)},
-                {new Interval("chr17", 59787203, 59787494)},
-                {new Interval("chr17", 59791235, 59791514)},
-                {new Interval("chr17", 59794247, 59794502)},
-                {new Interval("chr17", 59801884, 59802193)},
-                {new Interval("chr17", 59804685, 59804982)},
-                {new Interval("chr17", 59817352, 59817382)},
-                {new Interval("chr17", 59817465, 59817532)},
-                {new Interval("chr17", 59875754, 59875812)},
-                {new Interval("chr17", 59875899, 59875944)},
-                {new Interval("chr17", 59879183, 59879456)},
-                {new Interval("chr17", 59883988, 59884276)},
-                {new Interval("chr17", 59887398, 59887512)},
-                {new Interval("chrX", 48774611, 48775058)},
-
-        };
-    }
-
-    @Test
-    public void testWriteChain() throws Exception {
-        final OverlapDetector<Chain> chains = Chain.loadChains(CHAIN_FILE);
-        File outFile = File.createTempFile("test.", ".chain");
-        outFile.deleteOnExit();
-        PrintWriter pw = new PrintWriter(outFile);
-        final Map<Integer, Chain> originalChainMap = new TreeMap<Integer, Chain>();
-        for (final Chain chain : chains.getAll()) {
-            chain.write(pw);
-            originalChainMap.put(chain.id, chain);
-        }
-        pw.close();
-
-        final OverlapDetector<Chain> newChains = Chain.loadChains(outFile);
-        final Map<Integer, Chain> newChainMap = new TreeMap<Integer, Chain>();
-        for (final Chain chain : newChains.getAll()) {
-            newChainMap.put(chain.id, chain);
-        }
-        Assert.assertEquals(newChainMap, originalChainMap);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/metrics/MetricsFileTest.java b/src/tests/java/htsjdk/samtools/metrics/MetricsFileTest.java
deleted file mode 100644
index 2393031..0000000
--- a/src/tests/java/htsjdk/samtools/metrics/MetricsFileTest.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools.metrics;
-
-import htsjdk.samtools.SAMException;
-import htsjdk.samtools.util.FormatUtil;
-import htsjdk.samtools.util.Histogram;
-import htsjdk.samtools.util.TestUtil;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.Date;
-
-/**
- * Tests for the various classes in the metrics package.  Constructs a MetricsFile,
- * populates it with various items and then ensure that it can be written to disk
- * and read back without altering any values.
- *
- * @author Tim Fennell
- */
-public class MetricsFileTest {
-    public enum TestEnum {One, Two, Three}
-
-    public static class TestMetric extends MetricBase implements Cloneable, Serializable {
-        private static final long serialVersionUID = 1l;
-
-        public String    STRING_PROP;
-        public Date      DATE_PROP;
-        public Short     SHORT_PROP;
-        public Integer   INTEGER_PROP;
-        public Long      LONG_PROP;
-        public Float     FLOAT_PROP;
-        public Double    DOUBLE_PROP;
-        public TestEnum  ENUM_PROP;
-        public Boolean   BOOLEAN_PROP;
-        public Character CHARACTER_PROP;
-        public short     SHORT_PRIMITIVE;
-        public int       INT_PRIMITIVE;
-        public long      LONG_PRIMITIVE;
-        public float     FLOAT_PRIMITIVE;
-        public double    DOUBLE_PRIMITIVE;
-        public boolean   BOOLEAN_PRIMITIVE;
-        public char      CHAR_PRIMITIVE;
-
-        @Override
-        public TestMetric clone()  {
-            try { return (TestMetric) super.clone(); }
-            catch (CloneNotSupportedException cnse) { throw new SAMException("That's Unpossible!"); }
-        }
-    }
-
-    public static class FloatingPointMetric extends MetricBase{
-        public double DOUBLE_PRIMITIVE;
-        public Double DOUBLE_PROP;
-        public float  FLOAT_PRIMITIVE;
-        public Float FLOAT_PROP;
-    }
-
-    @Test
-    public void testFloatingPointEquality() throws IOException {
-        MetricsFile<FloatingPointMetric,Integer> file = new MetricsFile<FloatingPointMetric,Integer>();
-
-        FloatingPointMetric metric = new FloatingPointMetric();
-        metric.DOUBLE_PRIMITIVE = .0000000000000000001d;
-        metric.DOUBLE_PROP = .0000000000000000001d;
-        metric.FLOAT_PRIMITIVE = .0000000000000000001f;
-        metric.FLOAT_PROP = .0000000000000000001f;
-        file.addMetric(metric);
-
-        MetricsFile<FloatingPointMetric,Integer> file2 = writeThenReadBack(file);
-        Assert.assertEquals(file, file2);
-
-
-
-    }
-
-    @Test
-    public void testWriteMetricsFile() throws IOException, ClassNotFoundException {
-        MetricsFile<TestMetric,Integer> file = new MetricsFile<TestMetric,Integer>();
-        TestMetric metric = new TestMetric();
-        metric.STRING_PROP       = "Hello World";
-        metric.DATE_PROP         = new FormatUtil().parseDate("2008-12-31");
-        metric.SHORT_PROP        = 123;
-        metric.INTEGER_PROP      = null;
-        metric.LONG_PROP         = Long.MAX_VALUE;
-        metric.FLOAT_PROP        = 456.789f;
-        metric.DOUBLE_PROP       = 0.713487;
-        metric.ENUM_PROP         = TestEnum.Two;
-        metric.BOOLEAN_PROP      = false;
-        metric.CHARACTER_PROP    = 'A';
-        metric.SHORT_PRIMITIVE   = 123;
-        metric.INT_PRIMITIVE     = 919834781;
-        metric.LONG_PRIMITIVE    = Long.MAX_VALUE - Integer.MAX_VALUE;
-        metric.FLOAT_PRIMITIVE   = 0.55694f;
-        metric.DOUBLE_PRIMITIVE  = 0.229233;
-        metric.BOOLEAN_PRIMITIVE = true;
-        metric.CHAR_PRIMITIVE    = 'B';
-        file.addMetric(metric);
-
-        MetricsFile<TestMetric,Integer> file2 = writeThenReadBack(file);
-        Assert.assertEquals(file, file2);
-
-        // Now add some headers and run the test again
-        StringHeader stringHeader = new StringHeader();
-        stringHeader.setValue("Hello, I'm a String Header!");
-        file.addHeader(stringHeader);
-
-        VersionHeader version = new VersionHeader();
-        version.setVersionedItem("MetricsFileTest");
-        version.setVersionString("1.0");
-        file.addHeader(version);
-
-        version = new VersionHeader();
-        version.setVersionedItem("Nada");
-        version.setVersionString("0.0alpha1");
-        file.addHeader(version);
-
-        file2 = writeThenReadBack(file);
-        Assert.assertEquals(file, file2);
-
-        // Now add a Histogram and make sure it still works
-        Histogram<Integer> histo = new Histogram<Integer>();
-        histo.setBinLabel("small_number");
-        histo.setValueLabel("big_number");
-        histo.increment(1, 101);
-        histo.increment(2, 202);
-        histo.increment(3, 4000);
-        histo.increment(5, 123981);
-        histo.increment(1000, 10981982);
-        file.setHistogram(histo);
-
-        file2 = writeThenReadBack(file);
-        Assert.assertEquals(file, file2);
-
-        // And lastly add some more metrics rows to the file
-        TestMetric metric2 = metric.clone();
-        metric2.ENUM_PROP = TestEnum.One;
-        metric2.FLOAT_PROP = 0.998f;
-        metric2.STRING_PROP = "Wheeeee!";
-        file.addMetric(metric2);
-
-        metric2 = metric.clone();
-        metric2.ENUM_PROP = TestEnum.Three;
-        metric2.DOUBLE_PRIMITIVE = 1.299d;
-        file.addMetric(metric2);
-
-        file2 = writeThenReadBack(file);
-        Assert.assertEquals(file, file2);
-
-        //Test that we can serialize and deserialize this whole thing
-        MetricsFile<TestMetric, Integer> file3 = TestUtil.serializeAndDeserialize(file);
-
-        Assert.assertEquals(file, file3);
-    }
-
-    @Test
-    public void areMetricsFilesEqualTest(){
-        final File TEST_DIR = new File("testdata/htsjdk/samtools/metrics/");
-        final File file1 = new File(TEST_DIR,"metricsOne.metrics");
-        final File file2 = new File(TEST_DIR,"metricsOneCopy.metrics");
-        final File fileModifiedHist = new File(TEST_DIR,"metricsOneModifiedHistogram.metrics");
-        final File fileModifiedMet = new File(TEST_DIR,"metricsOneModifiedMetrics.metrics");
-
-        Assert.assertTrue(MetricsFile.areMetricsEqual(file1, file2));
-        Assert.assertTrue(MetricsFile.areMetricsEqual(file1, fileModifiedHist));
-
-        Assert.assertFalse(MetricsFile.areMetricsAndHistogramsEqual(file1, fileModifiedHist));
-        Assert.assertFalse(MetricsFile.areMetricsEqual(file1, fileModifiedMet));
-        Assert.assertFalse(MetricsFile.areMetricsAndHistogramsEqual(file1, fileModifiedMet));
-    }
-
-    /** Helper method to persist metrics to file and read them back again. */
-    private <METRIC extends MetricBase> MetricsFile<METRIC, Integer> writeThenReadBack(MetricsFile<METRIC,Integer> in) throws IOException {
-        File f = File.createTempFile("test", ".metrics");
-        f.deleteOnExit();
-        FileWriter out = new FileWriter(f);
-        in.write(out);
-
-        MetricsFile<METRIC,Integer> retval = new MetricsFile<METRIC,Integer>();
-        retval.read(new FileReader(f));
-        return retval;
-    }
-
-
-
-}
diff --git a/src/tests/java/htsjdk/samtools/reference/FastaSequenceFileTest.java b/src/tests/java/htsjdk/samtools/reference/FastaSequenceFileTest.java
deleted file mode 100644
index 328063e..0000000
--- a/src/tests/java/htsjdk/samtools/reference/FastaSequenceFileTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.reference;
-
-import htsjdk.samtools.util.StringUtil;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.PrintWriter;
-
-/**
- * @author alecw at broadinstitute.org
- */
-public class FastaSequenceFileTest {
-    @Test
-    public void testTrailingWhitespace() throws Exception {
-        final File fasta = File.createTempFile("test", ".fasta");
-        fasta.deleteOnExit();
-        final PrintWriter writer = new PrintWriter(fasta);
-        final String chr1 = "chr1";
-        writer.println(">" + chr1);
-        final String sequence = "ACGTACGT";
-        writer.println(sequence);
-        writer.println(sequence + " \t");
-        writer.close();
-        final FastaSequenceFile fastaReader = new FastaSequenceFile(fasta, true);
-        final ReferenceSequence referenceSequence = fastaReader.nextSequence();
-        Assert.assertEquals(referenceSequence.getName(), chr1);
-        Assert.assertEquals(StringUtil.bytesToString(referenceSequence.getBases()), sequence + sequence);
-    }
-
-    @Test
-    public void testIntermediateWhitespace() throws Exception {
-        final File fasta = File.createTempFile("test", ".fasta");
-        fasta.deleteOnExit();
-        final PrintWriter writer = new PrintWriter(fasta);
-        final String chr1 = "chr1";
-        writer.println(">" + chr1 + " extra stuff after sequence name");
-        final String sequence = "ACGTACGT";
-        writer.println(sequence + "  ");
-        writer.println(sequence + " \t");
-        writer.println(sequence);
-        writer.close();
-        final FastaSequenceFile fastaReader = new FastaSequenceFile(fasta, true);
-        final ReferenceSequence referenceSequence = fastaReader.nextSequence();
-        Assert.assertEquals(referenceSequence.getName(), chr1);
-        Assert.assertEquals(StringUtil.bytesToString(referenceSequence.getBases()), sequence + sequence + sequence);
-    }
-
-    // There was a bug when reading a fasta with trailing whitespace, only when a sequence dictionary exists.
-    @Test
-    public void testTrailingWhitespaceWithPreexistingSequenceDictionary() throws Exception {
-        final File fasta = new File("testdata/htsjdk/samtools/reference/reference_with_trailing_whitespace.fasta");
-        final FastaSequenceFile fastaReader = new FastaSequenceFile(fasta, true);
-        ReferenceSequence referenceSequence = fastaReader.nextSequence();
-        Assert.assertEquals(referenceSequence.getName(), "chr1");
-        Assert.assertEquals(StringUtil.bytesToString(referenceSequence.getBases()), "ACGTACGT");
-        referenceSequence = fastaReader.nextSequence();
-        Assert.assertEquals(referenceSequence.getName(), "chr2");
-        Assert.assertEquals(StringUtil.bytesToString(referenceSequence.getBases()), "TCGATCGA");
-
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java b/src/tests/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java
deleted file mode 100644
index 511b1ab..0000000
--- a/src/tests/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools.reference;
-
-import htsjdk.samtools.SAMException;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.Iterator;
-
-/**
- * Test the fasta sequence index reader.
- */
-public class FastaSequenceIndexTest {
-    private static File TEST_DATA_DIR = new File("testdata/htsjdk/samtools/reference");
-
-    @DataProvider(name="homosapiens")
-    public Object[][] provideHomoSapiens() throws FileNotFoundException {
-        final File sequenceIndexFile = new File(TEST_DATA_DIR,"Homo_sapiens_assembly18.fasta.fai");
-        return new Object[][] { new Object[]
-            { new FastaSequenceIndex(sequenceIndexFile) },
-            { new FastaSequenceIndex(sequenceIndexFile.toPath()) } };
-    }
-
-    @DataProvider(name="specialcharacters")
-    public Object[][] provideSpecialCharacters() throws FileNotFoundException {
-        final File sequenceIndexFile = new File(TEST_DATA_DIR,"testing.fai");
-        return new Object[][] { new Object[]
-            { new FastaSequenceIndex(sequenceIndexFile) },
-            { new FastaSequenceIndex(sequenceIndexFile.toPath()) } };
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testInitialContig(FastaSequenceIndex sequenceIndex) {
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrM"),"Contig chrM is not present");
-        FastaSequenceIndexEntry entry = sequenceIndex.getIndexEntry("chrM");
-        Assert.assertEquals(entry.getContig(),"chrM","Contig chrM name is incorrect");
-        Assert.assertEquals(entry.getLocation(),6L,"Contig chrM location is incorrect");
-        Assert.assertEquals(entry.getSize(),16571L,"Contig chrM size is incorrect");
-        Assert.assertEquals(entry.getBasesPerLine(),50,"Contig chrM bases per line is incorrect");
-        Assert.assertEquals(entry.getBytesPerLine(),51,"Contig chrM bytes per line is incorrect");
-
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testMiddleContig(FastaSequenceIndex sequenceIndex) {
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr8"),"Contig chr8 is not present");
-        FastaSequenceIndexEntry entry = sequenceIndex.getIndexEntry("chr8");
-        Assert.assertEquals(entry.getContig(),"chr8","Contig chr8 name is incorrect");
-        Assert.assertEquals(entry.getLocation(),1419403101L,"Contig chr8 location is incorrect");
-        Assert.assertEquals(entry.getSize(),146274826L,"Contig chr8 size is incorrect");
-        Assert.assertEquals(entry.getBasesPerLine(),50,"Contig chr8 bases per line is incorrect");
-        Assert.assertEquals(entry.getBytesPerLine(),51,"Contig chr8 bytes per line is incorrect");
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testLastContig(FastaSequenceIndex sequenceIndex) {
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrX_random"),"Contig chrX_random is not present");
-        FastaSequenceIndexEntry entry = sequenceIndex.getIndexEntry("chrX_random");
-        Assert.assertEquals(entry.getContig(),"chrX_random","Contig chrX_random name is incorrect");
-        Assert.assertEquals(entry.getLocation(),3156698441L,"Contig chrX_random location is incorrect");
-        Assert.assertEquals(entry.getSize(),1719168L,"Contig chrX_random size is incorrect");
-        Assert.assertEquals(entry.getBasesPerLine(),50,"Contig chrX_random bases per line is incorrect");
-        Assert.assertEquals(entry.getBytesPerLine(),51,"Contig chrX_random bytes per line is incorrect");
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testAllContigsPresent(FastaSequenceIndex sequenceIndex) {
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrM"),"Contig chrM is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr1"),"Contig chr1 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr2"),"Contig chr2 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr3"),"Contig chr3 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr4"),"Contig chr4 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr5"),"Contig chr5 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr6"),"Contig chr6 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr7"),"Contig chr7 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr8"),"Contig chr8 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr9"),"Contig chr9 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr10"),"Contig chr10 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr11"),"Contig chr11 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr12"),"Contig chr12 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr13"),"Contig chr13 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr14"),"Contig chr14 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr15"),"Contig chr15 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr16"),"Contig chr16 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr17"),"Contig chr17 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr18"),"Contig chr18 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr19"),"Contig chr19 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr20"),"Contig chr20 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr21"),"Contig chr21 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr22"),"Contig chr22 is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrX"),"Contig chrX is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrY"),"Contig chrY is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr1_random"),"Contig chr1_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr2_random"),"Contig chr2_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr3_random"),"Contig chr3_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr4_random"),"Contig chr4_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr5_random"),"Contig chr5_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr6_random"),"Contig chr6_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr7_random"),"Contig chr7_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr8_random"),"Contig chr8_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr9_random"),"Contig chr9_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr10_random"),"Contig chr10_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr11_random"),"Contig chr11_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr13_random"),"Contig chr13_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr15_random"),"Contig chr15_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr16_random"),"Contig chr16_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr17_random"),"Contig chr17_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr18_random"),"Contig chr18_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr19_random"),"Contig chr19_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr21_random"),"Contig chr21_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chr22_random"),"Contig chr22_random is not present");
-        Assert.assertTrue(sequenceIndex.hasIndexEntry("chrX_random"),"Contig chrX_random is not present");
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testHasInvalidEntry(FastaSequenceIndex sequenceIndex) {
-        Assert.assertFalse(sequenceIndex.hasIndexEntry("invalid"),"Found an invalid entry");
-    }
-
-    @Test(dataProvider="homosapiens",expectedExceptions=SAMException.class)
-    public void testGetInvalidEntry(FastaSequenceIndex sequenceIndex) {
-        sequenceIndex.getIndexEntry("invalid");
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testIteration(FastaSequenceIndex sequenceIndex) {
-        Iterator<FastaSequenceIndexEntry> sequenceIndexEntries = sequenceIndex.iterator();
-
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chrM","Contig chrM is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr1","Contig chr1 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr2","Contig chr2 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr3","Contig chr3 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr4","Contig chr4 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr5","Contig chr5 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr6","Contig chr6 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr7","Contig chr7 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr8","Contig chr8 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr9","Contig chr9 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr10","Contig chr10 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr11","Contig chr11 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr12","Contig chr12 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr13","Contig chr13 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr14","Contig chr14 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr15","Contig chr15 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr16","Contig chr16 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr17","Contig chr17 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr18","Contig chr18 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr19","Contig chr19 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr20","Contig chr20 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr21","Contig chr21 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr22","Contig chr22 is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chrX","Contig chrX is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chrY","Contig chrY is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr1_random","Contig chr1_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr2_random","Contig chr2_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr3_random","Contig chr3_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr4_random","Contig chr4_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr5_random","Contig chr5_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr6_random","Contig chr6_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr7_random","Contig chr7_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr8_random","Contig chr8_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr9_random","Contig chr9_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr10_random","Contig chr10_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr11_random","Contig chr11_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr13_random","Contig chr13_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr15_random","Contig chr15_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr16_random","Contig chr16_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr17_random","Contig chr17_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr18_random","Contig chr18_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr19_random","Contig chr19_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr21_random","Contig chr21_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chr22_random","Contig chr22_random is not present");
-        Assert.assertEquals(sequenceIndexEntries.next().getContig(),"chrX_random","Contig chrX_random is not present");
-        Assert.assertFalse(sequenceIndexEntries.hasNext(),"Iterator still has more entries");
-    }
-
-    @Test(dataProvider="specialcharacters")
-    public void testSpecialCharacters(FastaSequenceIndex specialCharactersIndex) {
-        /* file contents:
-        chrM	16571	6	50	51
-        chr1;boat	247249719	16915	50	51
-        chr2:money	242951149	252211635	50	51
-        chr3::;	199501827	500021813	50	51
-        ;;;;;;  1234            1234            1234    1234
-        file:gi|17981852|ref|NC_001807.4|    16571   2911876801      70      71
-        */
-        Iterator<FastaSequenceIndexEntry> sequenceIndexEntries = specialCharactersIndex.iterator();
-        FastaSequenceIndexEntry ent = sequenceIndexEntries.next();
-        Assert.assertEquals(ent.getContig(),"chrM","Contig chrM is not present");
-        Assert.assertEquals(ent.getSize(),16571,"Contig chrM size is not correct");
-        Assert.assertEquals(ent.getLocation(),6,"Contig chrM location is not correct");
-        Assert.assertEquals(ent.getBasesPerLine(),50,"Contig chrM bases per line is not correct");
-        Assert.assertEquals(ent.getBytesPerLine(),51,"Contig chrM bytes per line is not correct");
-
-        ent = sequenceIndexEntries.next();
-        Assert.assertEquals(ent.getContig(),"chr1;boat","Contig chr1;boat is not present");
-        Assert.assertEquals(ent.getSize(),247249719,"Contig chr1;boat size is not correct");
-        Assert.assertEquals(ent.getLocation(),16915,"Contig chr1;boat location is not correct");
-        Assert.assertEquals(ent.getBasesPerLine(),50,"Contig chr1;boat bases per line is not correct");
-        Assert.assertEquals(ent.getBytesPerLine(),51,"Contig chr1;boat bytes per line is not correct");
-
-        ent = sequenceIndexEntries.next();
-        Assert.assertEquals(ent.getContig(),"chr2:money","Contig chr2:money is not present");
-        Assert.assertEquals(ent.getSize(),242951149,"Contig chr2:money size is not correct");
-        Assert.assertEquals(ent.getLocation(),252211635,"Contig chr2:money location is not correct");
-        Assert.assertEquals(ent.getBasesPerLine(),50,"Contig chr2:money bases per line is not correct");
-        Assert.assertEquals(ent.getBytesPerLine(),51,"Contig chr2:money bytes per line is not correct");
-
-        ent = sequenceIndexEntries.next();
-        Assert.assertEquals(ent.getContig(),"chr3::;","Contig chr3::; is not present");
-        Assert.assertEquals(ent.getSize(),199501827,"Contig chr3::; size is not correct");
-        Assert.assertEquals(ent.getLocation(),500021813,"Contig chrM location is not correct");
-        Assert.assertEquals(ent.getBasesPerLine(),50,"Contig chr3::; bases per line is not correct");
-        Assert.assertEquals(ent.getBytesPerLine(),51,"Contig chr3::; bytes per line is not correct");
-
-        ent = sequenceIndexEntries.next();
-        Assert.assertEquals(ent.getContig(),";;;;;;;;","Contig ;;;;;;;; is not present");
-        Assert.assertEquals(ent.getSize(),123,"Contig ;;;;;;;; size is not correct");
-        Assert.assertEquals(ent.getLocation(),234,"Contig ;;;;;;;; location is not correct");
-        Assert.assertEquals(ent.getBasesPerLine(),456,"Contig ;;;;;;;; bases per line is not correct");
-        Assert.assertEquals(ent.getBytesPerLine(),789,"Contig ;;;;;;;; bytes per line is not correct");
-
-        ent = sequenceIndexEntries.next();
-        Assert.assertEquals(ent.getContig(),"file:gi|17981852|ref|NC_001807.4|","Contig file:gi|17981852|ref|NC_001807.4| is not present");
-        Assert.assertEquals(ent.getSize(),16571,"Contig file:gi|17981852|ref|NC_001807.4| size is not correct");
-        Assert.assertEquals(ent.getLocation(),2911876801L,"Contig file:gi|17981852|ref|NC_001807.4| location is not correct");
-        Assert.assertEquals(ent.getBasesPerLine(),70,"Contig file:gi|17981852|ref|NC_001807.4| bases per line is not correct");
-        Assert.assertEquals(ent.getBytesPerLine(),71,"Contig file:gi|17981852|ref|NC_001807.4| bytes per line is not correct");
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java b/src/tests/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java
deleted file mode 100644
index 5c1a9ac..0000000
--- a/src/tests/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java
+++ /dev/null
@@ -1,308 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools.reference;
-
-import htsjdk.samtools.SAMException;
-import htsjdk.samtools.util.CloserUtil;
-import htsjdk.samtools.util.StringUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-
-/**
- * Test the indexed fasta sequence file reader.
- */
-public class IndexedFastaSequenceFileTest{
-    private static File TEST_DATA_DIR = new File("testdata/htsjdk/samtools/reference");
-    private static File SEQUENCE_FILE = new File(TEST_DATA_DIR,"Homo_sapiens_assembly18.trimmed.fasta");
-    private static File SEQUENCE_FILE_NODICT = new File(TEST_DATA_DIR,"Homo_sapiens_assembly18.trimmed.nodict.fasta");
-
-    private final String firstBasesOfChrM = "GATCACAGGTCTATCACCCT";
-    private final String extendedBasesOfChrM = "GATCACAGGTCTATCACCCTATTAACCACTCACGGGAGCTCTCCATGCAT" +
-                                               "TTGGTATTTTCGTCTGGGGGGTGTGCACGCGATAGCATTGCGAGACGCTG" +
-                                               "GAGCCGGAGCACCCTATGTCGCAGTATCTGTCTTTGATTCCTGCCTCATT";
-    private final String lastBasesOfChr20 = "ttgtctgatgctcatattgt";
-    private final int CHR20_LENGTH = 1000000;
-
-    @DataProvider(name="homosapiens")
-    public Object[][] provideSequenceFile() throws FileNotFoundException {
-        return new Object[][] { new Object[]
-                { new IndexedFastaSequenceFile(SEQUENCE_FILE) },
-                { new IndexedFastaSequenceFile(SEQUENCE_FILE_NODICT) },
-                { new IndexedFastaSequenceFile(SEQUENCE_FILE.toPath()) },
-                { new IndexedFastaSequenceFile(SEQUENCE_FILE_NODICT.toPath()) }};
-    }
-
-    @DataProvider(name="comparative")
-    public Object[][] provideOriginalAndNewReaders() throws FileNotFoundException {
-        return new Object[][] {
-                new Object[] { ReferenceSequenceFileFactory.getReferenceSequenceFile(SEQUENCE_FILE),
-                                               new IndexedFastaSequenceFile(SEQUENCE_FILE) },
-                new Object[] { ReferenceSequenceFileFactory.getReferenceSequenceFile(SEQUENCE_FILE, true),
-                                               new IndexedFastaSequenceFile(SEQUENCE_FILE) },
-                new Object[] { ReferenceSequenceFileFactory.getReferenceSequenceFile(SEQUENCE_FILE.toPath()),
-                                               new IndexedFastaSequenceFile(SEQUENCE_FILE.toPath()) },
-                new Object[] { ReferenceSequenceFileFactory.getReferenceSequenceFile(SEQUENCE_FILE.toPath(), true),
-                                               new IndexedFastaSequenceFile(SEQUENCE_FILE.toPath()) },};
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testOpenFile(IndexedFastaSequenceFile sequenceFile) {
-        long startTime = System.currentTimeMillis();
-        Assert.assertNotNull(sequenceFile);
-        long endTime = System.currentTimeMillis();
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testOpenFile runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testFirstSequence(IndexedFastaSequenceFile sequenceFile) {
-        long startTime = System.currentTimeMillis();
-        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chrM",1,firstBasesOfChrM.length());
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),firstBasesOfChrM,"First n bases of chrM are incorrect");
-
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testFirstSequence runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testFirstSequenceExtended(IndexedFastaSequenceFile sequenceFile) {
-        long startTime = System.currentTimeMillis();
-        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chrM",1,extendedBasesOfChrM.length());
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),extendedBasesOfChrM,"First n bases of chrM are incorrect");
-
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testFirstSequenceExtended runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testReadStartingInCenterOfFirstLine(IndexedFastaSequenceFile sequenceFile) {
-        final int bytesToChopOff = 5;
-        String truncated = extendedBasesOfChrM.substring(bytesToChopOff);
-
-        long startTime = System.currentTimeMillis();
-        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chrM",
-                                                                   bytesToChopOff + 1,
-                                                                   bytesToChopOff + truncated.length());
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),truncated,"First n bases of chrM are incorrect");
-
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testReadStartingInCenterOfFirstLine runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="homosapiens")
-    public void testReadStartingInCenterOfMiddleLine(IndexedFastaSequenceFile sequenceFile) {
-        final int bytesToChopOff = 120;
-        String truncated = extendedBasesOfChrM.substring(bytesToChopOff);
-
-        long startTime = System.currentTimeMillis();
-        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chrM",
-                                                                   bytesToChopOff + 1,
-                                                                   bytesToChopOff + truncated.length());
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),truncated,"First n bases of chrM are incorrect");
-
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testReadStartingInCenterOfMiddleLine runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="comparative")
-    public void testFirstCompleteContigRead(ReferenceSequenceFile originalSequenceFile, IndexedFastaSequenceFile sequenceFile) {
-        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
-
-        long startTime = System.currentTimeMillis();
-        ReferenceSequence sequence = sequenceFile.getSequence("chrM");
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chrM is incorrect");
-
-        CloserUtil.close(originalSequenceFile);
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testFirstCompleteContigRead runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="homosapiens",expectedExceptions=SAMException.class)
-    public void testReadThroughEndOfContig(IndexedFastaSequenceFile sequenceFile) {
-        long startTime = System.currentTimeMillis();
-        try {
-            sequenceFile.getSubsequenceAt("chrM",16500,16600);
-        }
-        finally {
-            long endTime = System.currentTimeMillis();
-
-            CloserUtil.close(sequenceFile);
-
-            System.err.printf("testReadThroughEndOfContig runtime: %dms%n", (endTime - startTime)) ;
-        }
-    }
-
-    @Test(dataProvider="homosapiens",expectedExceptions=SAMException.class)
-    public void testReadPastEndOfContig(IndexedFastaSequenceFile sequenceFile) {
-         long startTime = System.currentTimeMillis();
-         try {
-             sequenceFile.getSubsequenceAt("chrM",16800,16900);
-         }
-         finally {
-             long endTime = System.currentTimeMillis();
-
-             CloserUtil.close(sequenceFile);
-
-             System.err.printf("testReadPastEndOfContig runtime: %dms%n", (endTime - startTime)) ;
-         }
-     }
-
-    @Test(dataProvider="comparative")
-    public void testLastCompleteContigRead(ReferenceSequenceFile originalSequenceFile, IndexedFastaSequenceFile sequenceFile) {
-        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
-        while( !expectedSequence.getName().equals("chr20") )
-            expectedSequence = originalSequenceFile.nextSequence();
-
-        long startTime = System.currentTimeMillis();
-        ReferenceSequence sequence = sequenceFile.getSequence("chr20");
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chr20","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),1,"Sequence contig index is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chrX_random is incorrect");
-
-        CloserUtil.close(originalSequenceFile);
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testLastCompleteContigRead runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-
-    @Test(dataProvider="homosapiens")
-    public void testLastOfChr20(IndexedFastaSequenceFile sequenceFile) {
-        long startTime = System.currentTimeMillis();
-        ReferenceSequence sequence = sequenceFile.getSubsequenceAt("chr20",
-                                                                   CHR20_LENGTH - lastBasesOfChr20.length()+1,
-                                                                   CHR20_LENGTH);
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chr20","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),1,"Sequence contig index is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),lastBasesOfChr20,"First n bases of chr1 are incorrect");
-
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testFirstOfChr1 runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="comparative")
-    public void testFirstElementOfIterator(ReferenceSequenceFile originalSequenceFile,IndexedFastaSequenceFile sequenceFile) {
-        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
-
-        long startTime = System.currentTimeMillis();
-        ReferenceSequence sequence = sequenceFile.nextSequence();
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(), "chrM","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(), 0,"Sequence contig index is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chrM is incorrect");
-
-        CloserUtil.close(originalSequenceFile);
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testFirstElementOfIterator runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="comparative")
-    public void testNextElementOfIterator(ReferenceSequenceFile originalSequenceFile, IndexedFastaSequenceFile sequenceFile) {
-        // Skip past the first one and load the second one.
-        originalSequenceFile.nextSequence();
-        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
-
-        long startTime = System.currentTimeMillis();
-        sequenceFile.nextSequence();
-        ReferenceSequence sequence = sequenceFile.nextSequence();
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chr20","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),1,"Sequence contig index is not correct");
-        Assert.assertEquals(sequence.length(),expectedSequence.length(),"Sequence size is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chr1 is incorrect");
-
-        CloserUtil.close(originalSequenceFile);
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testNextElementOfIterator runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(dataProvider="comparative")
-    public void testReset(ReferenceSequenceFile originalSequenceFile, IndexedFastaSequenceFile sequenceFile) {
-        // Skip past the first one and load the second one.
-        ReferenceSequence expectedSequence = originalSequenceFile.nextSequence();
-
-        long startTime = System.currentTimeMillis();
-        sequenceFile.nextSequence();
-        sequenceFile.nextSequence();
-        sequenceFile.reset();
-        ReferenceSequence sequence = sequenceFile.nextSequence();
-        long endTime = System.currentTimeMillis();
-
-        Assert.assertEquals(sequence.getName(),"chrM","Sequence contig is not correct");
-        Assert.assertEquals(sequence.getContigIndex(),0,"Sequence contig index is not correct");
-        Assert.assertEquals(sequence.length(),expectedSequence.length(), "Sequence size is not correct");
-        Assert.assertEquals(StringUtil.bytesToString(sequence.getBases()),StringUtil.bytesToString(expectedSequence.getBases()),"chrM is incorrect");
-
-        CloserUtil.close(originalSequenceFile);
-        CloserUtil.close(sequenceFile);
-
-        System.err.printf("testReset runtime: %dms%n", (endTime - startTime)) ;
-    }
-
-    @Test(expectedExceptions = FileNotFoundException.class)
-    public void testMissingFile() throws Exception {
-        new IndexedFastaSequenceFile(new File(TEST_DATA_DIR, "non-existent.fasta"));
-        Assert.fail("FileNotFoundException should have been thrown");
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java b/src/tests/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java
deleted file mode 100644
index 6e0f1ca..0000000
--- a/src/tests/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package htsjdk.samtools.reference;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * Simple tests for the reference sequence file factory
- */
-public class ReferenceSequenceFileFactoryTests {
-    public static final File hg18 = new File("testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta");
-
-    @Test public void testPositivePath() {
-        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18);
-        Assert.assertTrue(f instanceof AbstractFastaSequenceFile);
-    }
-
-    @Test public void testGetIndexedReader() {
-        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18, true, true);
-        Assert.assertTrue(f instanceof IndexedFastaSequenceFile, "Got non-indexed reader when expecting indexed reader.");
-    }
-
-    @Test public void testGetNonIndexedReader1() {
-        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18, false, true);
-        Assert.assertTrue(f instanceof FastaSequenceFile, "Got indexed reader when truncating at whitespace! FAI must truncate.");
-    }
-
-    @Test public void testGetNonIndexedReader2() {
-        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18, true, false);
-        Assert.assertTrue(f instanceof FastaSequenceFile, "Got indexed reader when requesting non-indexed reader.");
-    }
-
-    @Test public void testDefaultToIndexed() {
-        final ReferenceSequenceFile f = ReferenceSequenceFileFactory.getReferenceSequenceFile(hg18, true);
-        Assert.assertTrue(f instanceof IndexedFastaSequenceFile, "Got non-indexed reader by default.");
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java b/src/tests/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java
deleted file mode 100644
index 3924354..0000000
--- a/src/tests/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-package htsjdk.samtools.reference;
-
-import htsjdk.samtools.SAMException;
-import htsjdk.samtools.util.CloserUtil;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * Created by farjoun on 2/14/14.
- */
-public class ReferenceSequenceFileWalkerTest {
-
-
-    @DataProvider(name = "TestReference")
-    public Object[][] TestReference() {
-        return new Object[][]{
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 0, 1},
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 1, 1},
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 0, 0},
-        };
-    }
-
-
-    @Test(dataProvider = "TestReference")
-    public void testGet(final String fileName, final int index1, final int index2) throws SAMException {
-        final File refFile = new File(fileName);
-        final ReferenceSequenceFileWalker refWalker = new ReferenceSequenceFileWalker(refFile);
-
-        ReferenceSequence sequence = refWalker.get(index1);
-        Assert.assertEquals(sequence.getContigIndex(), index1);
-
-        sequence = refWalker.get(index2);
-        Assert.assertEquals(sequence.getContigIndex(), index2);
-        CloserUtil.close(refWalker);
-    }
-
-
-    @DataProvider(name = "TestFailReference")
-    public Object[][] TestFailReference() {
-        return new Object[][]{
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta", 1,3},  //fail because out of bounds
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta", 2,3},  //fail because out of bounds
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta", 1,0},  //fail because not allowed to look back
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.noindex.fasta", -1,0},  //fail because out of bounds
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", -1, 0},  //fail because out of bounds
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 1, -1},    //fail because out of bounds
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 2,3},  //fail because out of bounds
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 1,3},  //fail because out of bounds
-                new Object[]{"testdata/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta", 1, 0} // fasta is indexed, but not allowed to look back
-        };
-    }
-
-
-    @Test(expectedExceptions = {SAMException.class}, dataProvider = "TestFailReference")
-    public void testFailGet(final String fileName, final int index1, final int index2) throws SAMException {
-        final File refFile = new File(fileName);
-        final ReferenceSequenceFileWalker refWalker = new ReferenceSequenceFileWalker(refFile);
-
-        try {
-            refWalker.get(index1);
-
-            refWalker.get(index2);
-        }
-        finally {
-            CloserUtil.close(refWalker);
-        }
-    }
-
-
-}
diff --git a/src/tests/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java b/src/tests/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java
deleted file mode 100644
index e8999ff..0000000
--- a/src/tests/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools.seekablestream;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-
-import static org.testng.Assert.assertEquals;
-
-public class SeekableBufferedStreamTest {
-
-//    private final File BAM_INDEX_FILE = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai");
-    private final File BAM_FILE = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
-    private final String BAM_URL_STRING = "http://broadinstitute.github.io/picard/testdata/index_test.bam";
-    private static File TestFile = new File("testdata/htsjdk/samtools/seekablestream/megabyteZeros.dat");
-
-    /**
-     * Test reading across a buffer boundary (buffer size is 512000).   The test first reads a range of
-     * bytes using an unbuffered stream file stream,  then compares this to results from a buffered http stream.
-     *
-     * @throws IOException
-     */
-    @Test
-    public void testRandomRead() throws IOException {
-
-        int startPosition = 500000;
-        int length = 50000;
-
-        byte[] buffer1 = new byte[length];
-        SeekableStream unBufferedStream = new SeekableFileStream(BAM_FILE);
-        unBufferedStream.seek(startPosition);
-        int bytesRead = unBufferedStream.read(buffer1, 0, length);
-        assertEquals(length, bytesRead);
-
-        byte[] buffer2 = new byte[length];
-        SeekableStream bufferedStream = new SeekableBufferedStream(new SeekableHTTPStream(new URL(BAM_URL_STRING)));
-        bufferedStream.seek(startPosition);
-        bytesRead = bufferedStream.read(buffer2, 0, length);
-        assertEquals(length, bytesRead);
-
-        assertEquals(buffer1, buffer2);
-    }
-
-    /**
-     * Test an attempt to read past the end of the file.  The test file is 594,149 bytes in length.  The test
-     * attempts to read a 1000 byte block starting at position 594000.  A correct result would return 149 bytes.
-     *
-     * @throws IOException
-     */
-    @Test
-    public void testEOF() throws IOException {
-
-        int remainder = 149;
-        long fileLength = BAM_FILE.length();
-        long startPosition = fileLength - remainder;
-        int length = 1000;
-
-
-        byte[] buffer = new byte[length];
-        SeekableStream bufferedStream = new SeekableBufferedStream(new SeekableHTTPStream(new URL(BAM_URL_STRING)));
-        bufferedStream.seek(startPosition);
-        int bytesRead = bufferedStream.read(buffer, 0, length);
-        assertEquals(remainder, bytesRead);
-
-        // Subsequent reads should return -1
-        bytesRead = bufferedStream.read(buffer, 0, length);
-        assertEquals(-1, bytesRead);
-    }
-
-    @Test
-    public void testSkip() throws IOException {
-        final int[] BUFFER_SIZES = new int[]{8, 96, 1024, 8*1024, 16*1024, 96*1024, 48*1024};
-
-        for (final int bufferSize : BUFFER_SIZES) {
-            final SeekableBufferedStream in1 = new SeekableBufferedStream(new SeekableFileStream(BAM_FILE), bufferSize);
-            final SeekableBufferedStream in2 = new SeekableBufferedStream(new SeekableFileStream(BAM_FILE), bufferSize);
-
-            final int SIZE = 10000;
-            final byte[] bytes1 = new byte[SIZE];
-            final byte[] bytes2 = new byte[SIZE];
-
-            reallyRead(bytes1, in1);
-            reallyRead(bytes1, in1);
-            in1.skip(bytes1.length);
-            reallyRead(bytes1, in1);
-
-            reallyRead(bytes2, in2);
-            reallyRead(bytes2, in2);
-            in2.seek(bytes2.length * 3);
-            reallyRead(bytes2, in2);
-
-            in1.close();
-            in2.close();
-
-            Assert.assertEquals(bytes1, bytes2, "Error at buffer size " + bufferSize);
-        }
-    }
-
-    private int reallyRead(final byte[] bytes, final SeekableBufferedStream in) throws IOException {
-        int read = 0, total = 0;
-        do {
-            read = in.read(bytes, total, bytes.length-total);
-            total += read;
-        } while (total != bytes.length && read > 0);
-
-        return total;
-    }
-
-
-    @Test
-    public void testDivisableReads()throws IOException{
-
-        testReadsLength(1);
-        testReadsLength(2);
-        testReadsLength(4);
-        testReadsLength(5);
-        testReadsLength(10);
-        testReadsLength(20);
-        testReadsLength(50);
-        testReadsLength(100);
-
-    }
-
-    private void testReadsLength(final int length) throws IOException {
-
-        final int BUFFERED_STREAM_BUFFER_SIZE = 100;
-        final byte buffer[]=new byte[BUFFERED_STREAM_BUFFER_SIZE*10];
-        final SeekableFileStream fileStream = new SeekableFileStream(TestFile);
-        final SeekableBufferedStream  bufferedStream = new SeekableBufferedStream(fileStream,BUFFERED_STREAM_BUFFER_SIZE);
-
-        for( int i=0; i<10*BUFFERED_STREAM_BUFFER_SIZE/length ; ++i ){
-            assertEquals(bufferedStream.read(buffer, 0, length), length);
-        }
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java b/src/tests/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java
deleted file mode 100644
index 0523898..0000000
--- a/src/tests/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.seekablestream;
-
-import htsjdk.samtools.util.BufferedLineReader;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * Created by IntelliJ IDEA.
- * User: jrobinso
- * Date: Dec 20, 2009
- * Time: 11:13:19 AM
- * To change this template use File | Settings | File Templates.
- */
-public class SeekableFileStreamTest {
-
-    @Test
-    public void testSeek() throws Exception {
-        String expectedLine = "ccccccccc";
-        File testFile = new File("testdata/htsjdk/samtools/seekablestream/seekTest.txt");
-        SeekableFileStream is = new SeekableFileStream(testFile);
-        is.seek(20);
-        BufferedLineReader reader = new BufferedLineReader(is);
-        String nextLine = reader.readLine();
-        Assert.assertEquals(expectedLine, nextLine);
-        reader.close();
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java b/src/tests/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java
deleted file mode 100644
index 9f3f67b..0000000
--- a/src/tests/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package htsjdk.samtools.seekablestream;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-public class SeekableStreamFactoryTest {
-    @Test
-    public void testIsFilePath() throws Exception {
-        Assert.assertEquals(SeekableStreamFactory.isFilePath("x"), true);
-        Assert.assertEquals(SeekableStreamFactory.isFilePath(""), true);
-        Assert.assertEquals(SeekableStreamFactory.isFilePath("http://broadinstitute.org"), false);
-        Assert.assertEquals(SeekableStreamFactory.isFilePath("https://broadinstitute.org"), false);
-        Assert.assertEquals(SeekableStreamFactory.isFilePath("ftp://broadinstitute.org"), false);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/sra/SRAAccessionTest.java b/src/tests/java/htsjdk/samtools/sra/SRAAccessionTest.java
deleted file mode 100644
index dc03453..0000000
--- a/src/tests/java/htsjdk/samtools/sra/SRAAccessionTest.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package htsjdk.samtools.sra;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-/**
- * Tests for SRAAccession logic
- */
-public class SRAAccessionTest extends AbstractSRATest {
-
-    @DataProvider(name = "isValidAccData")
-    private Object[][] getIsValidAccData() {
-        return new Object[][] {
-            { "SRR000123", true },
-            { "DRR000001", true },
-            { "SRR000000", false },
-            { "testdata/htsjdk/samtools/sra/test_archive.sra", true },
-            { "testdata/htsjdk/samtools/compressed.bam", false },
-            { "testdata/htsjdk/samtools/uncompressed.sam", false },
-        };
-    }
-
-    @Test(dataProvider = "isValidAccData")
-    public void testIsValidAcc(String accession, boolean isValid) {
-        Assert.assertEquals(isValid, SRAAccession.isValid(accession));
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/sra/SRAReferenceTest.java b/src/tests/java/htsjdk/samtools/sra/SRAReferenceTest.java
deleted file mode 100644
index ec1fa91..0000000
--- a/src/tests/java/htsjdk/samtools/sra/SRAReferenceTest.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package htsjdk.samtools.sra;
-
-import htsjdk.samtools.reference.ReferenceSequence;
-import htsjdk.samtools.reference.ReferenceSequenceFile;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-public class SRAReferenceTest extends AbstractSRATest {
-    @DataProvider(name = "testReference")
-    private Object[][] createDataForReference() {
-        return new Object[][] {
-                {"SRR2096940", "CM000681.1", 95001, 95050, "AGATGATTCAGTCTCACCAAGAACACTGAAAGTCACATGGCTACCAGCAT"},
-        };
-    }
-
-    @Test(dataProvider = "testReference")
-    public void testReference(String acc, String refContig, int refStart, int refStop, String refBases) {
-        final ReferenceSequenceFile refSeqFile = new SRAIndexedSequenceFile(new SRAAccession(acc));
-        final ReferenceSequence refSeq = refSeqFile.getSubsequenceAt(refContig, refStart, refStop);
-        Assert.assertEquals(new String(refSeq.getBases()), refBases);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/sra/SRATest.java b/src/tests/java/htsjdk/samtools/sra/SRATest.java
deleted file mode 100644
index 420a889..0000000
--- a/src/tests/java/htsjdk/samtools/sra/SRATest.java
+++ /dev/null
@@ -1,431 +0,0 @@
-/*===========================================================================
-*
-*                            PUBLIC DOMAIN NOTICE
-*               National Center for Biotechnology Information
-*
-*  This software/database is a "United States Government Work" under the
-*  terms of the United States Copyright Act.  It was written as part of
-*  the author's official duties as a United States Government employee and
-*  thus cannot be copyrighted.  This software/database is freely available
-*  to the public for use. The National Library of Medicine and the U.S.
-*  Government have not placed any restriction on its use or reproduction.
-*
-*  Although all reasonable efforts have been taken to ensure the accuracy
-*  and reliability of the software and data, the NLM and the U.S.
-*  Government do not and cannot warrant the performance or results that
-*  may be obtained by using this software or data. The NLM and the U.S.
-*  Government disclaim all warranties, express or implied, including
-*  warranties of performance, merchantability or fitness for any particular
-*  purpose.
-*
-*  Please cite the author in any work or product based on this material.
-*
-* ===========================================================================
-*
-*/
-
-package htsjdk.samtools.sra;
-
-import htsjdk.samtools.BAMFileSpan;
-import htsjdk.samtools.BrowseableBAMIndex;
-import htsjdk.samtools.Chunk;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMRecordIterator;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.SAMUtils;
-import htsjdk.samtools.SAMValidationError;
-import htsjdk.samtools.SamInputResource;
-import htsjdk.samtools.SamReader;
-import htsjdk.samtools.SamReaderFactory;
-import htsjdk.samtools.ValidationStringency;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.TreeSet;
-
-/**
- * Integration tests for SRA functionality
- *
- * Created by andrii.nikitiuk on 8/24/15.
- */
-public class SRATest extends AbstractSRATest {
-
-    @DataProvider(name = "testCounts")
-    private Object[][] createDataForCounts() {
-        return new Object[][] {
-            {"SRR2096940", 10591, 498}
-        };
-    }
-
-    @Test(dataProvider = "testCounts")
-    public void testCounts(String acc, int expectedNumMapped, int expectedNumUnmapped) {
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
-                SamInputResource.of(new SRAAccession(acc))
-        );
-
-        final SAMRecordIterator samRecordIterator = reader.iterator();
-
-        assertCorrectCountsOfMappedAndUnmappedRecords(samRecordIterator, expectedNumMapped, expectedNumUnmapped);
-    }
-
-    @DataProvider(name = "testCountsBySpan")
-    private Object[][] createDataForCountsBySpan() {
-        return new Object[][] {
-            {"SRR2096940", Arrays.asList(new Chunk(0, 59128983), new Chunk(59128983, 59141089)), 10591, 498},
-            {"SRR2096940", Arrays.asList(new Chunk(0, 29128983), new Chunk(29128983, 59141089)), 10591, 498},
-            {"SRR2096940", Arrays.asList(new Chunk(0, 59134983), new Chunk(59134983, 59141089)), 10591, 498},
-            {"SRR2096940", Arrays.asList(new Chunk(0, 59130000)),                                10591, 0},
-            {"SRR2096940", Arrays.asList(new Chunk(0, 59140889)),                                10591, 298}
-        };
-    }
-
-    @Test(dataProvider = "testCountsBySpan")
-    public void testCountsBySpan(String acc, List<Chunk> chunks, int expectedNumMapped, int expectedNumUnmapped) {
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
-                SamInputResource.of(new SRAAccession(acc))
-        );
-
-        final SAMRecordIterator samRecordIterator = ((SamReader.Indexing) reader).iterator(new BAMFileSpan(chunks));
-
-        assertCorrectCountsOfMappedAndUnmappedRecords(samRecordIterator, expectedNumMapped, expectedNumUnmapped);
-    }
-
-    @DataProvider(name = "testGroups")
-    private Object[][] createDataForGroups() {
-        return new Object[][] {
-            {"SRR822962", new TreeSet<>(Arrays.asList(
-                    "GS54389-FS3-L08", "GS57511-FS3-L08", "GS54387-FS3-L02", "GS54387-FS3-L01",
-                    "GS57510-FS3-L01", "GS57510-FS3-L03", "GS54389-FS3-L07", "GS54389-FS3-L05",
-                    "GS54389-FS3-L06", "GS57510-FS3-L02", "GS57510-FS3-L04", "GS54387-FS3-L03",
-                    "GS46253-FS3-L03"))
-            },
-            {"SRR2096940", new HashSet<>(Arrays.asList("SRR2096940"))}
-        };
-    }
-
-    @Test(dataProvider = "testGroups")
-    public void testGroups(String acc, Set<String> groups) {
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
-                SamInputResource.of(new SRAAccession(acc))
-        );
-
-        final SAMRecordIterator samRecordIterator = reader.iterator();
-
-        SAMFileHeader header = reader.getFileHeader();
-        Set<String> headerGroups = new TreeSet<>();
-        for (SAMReadGroupRecord group : header.getReadGroups()) {
-            Assert.assertEquals(group.getReadGroupId(), group.getId());
-            headerGroups.add(group.getReadGroupId());
-        }
-
-        Assert.assertEquals(groups, headerGroups);
-
-        Set<String> foundGroups = new TreeSet<>();
-
-        for (int i = 0; i < 10000; i++) {
-            if (!samRecordIterator.hasNext()) {
-                break;
-            }
-            SAMRecord record = samRecordIterator.next();
-            String groupName = (String)record.getAttribute("RG");
-
-            foundGroups.add(groupName);
-        }
-
-        // please note that some groups may be introduced after 10k records, which is not an error
-        Assert.assertEquals(groups, foundGroups);
-    }
-
-    @DataProvider(name = "testReferences")
-    private Object[][] createDataForReferences() {
-        return new Object[][] {
-            // primary alignment only
-            {"SRR1063272", 1,
-                    Arrays.asList("supercont2.1", "supercont2.2", "supercont2.3", "supercont2.4",
-                                  "supercont2.5", "supercont2.6", "supercont2.7", "supercont2.8",
-                                  "supercont2.9", "supercont2.10", "supercont2.11", "supercont2.12",
-                                  "supercont2.13", "supercont2.14"),
-                    Arrays.asList(2291499, 1621675, 1575141, 1084805,
-                                  1814975, 1422463, 1399503, 1398693,
-                                  1186808, 1059964, 1561994, 774062,
-                                  756744, 926563)},
-        };
-    }
-
-    @Test(dataProvider = "testReferences")
-    public void testReferences(String acc, int numberFirstReferenceFound, List<String> references, List<Integer> refLengths) {
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
-                SamInputResource.of(new SRAAccession(acc))
-        );
-
-        final SAMRecordIterator samRecordIterator = reader.iterator();
-
-        SAMFileHeader header = reader.getFileHeader();
-        Set<String> headerRefNames = new TreeSet<>();
-
-        for (SAMSequenceRecord ref : header.getSequenceDictionary().getSequences()) {
-            String refName = ref.getSequenceName();
-
-            int refIndex = references.indexOf(refName);
-            Assert.assertTrue(refIndex != -1, "Unexpected reference: " + refName);
-
-            Assert.assertEquals(refLengths.get(refIndex), (Integer) ref.getSequenceLength(), "Reference length is incorrect");
-
-            headerRefNames.add(refName);
-        }
-
-        Assert.assertEquals(new TreeSet<>(references), headerRefNames);
-
-        Set<String> foundRefNames = new TreeSet<>();
-        for (int i = 0; i < 10000; i++) {
-            if (!samRecordIterator.hasNext()) {
-                break;
-            }
-            SAMRecord record = samRecordIterator.next();
-
-            if (record.getReferenceIndex().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX)) {
-                continue;
-            }
-
-            String refName = record.getReferenceName();
-            Assert.assertNotNull(refName);
-
-            foundRefNames.add(refName);
-        }
-
-        Assert.assertEquals(new TreeSet<>(references.subList(0, numberFirstReferenceFound)), foundRefNames);
-    }
-
-    @DataProvider(name = "testRows")
-    private Object[][] createDataForRowsTest() {
-        return new Object[][] {
-            // primary alignment only
-            {"SRR1063272", 0, 99, "SRR1063272.R.1",
-                    "ACTCGACATTCTGCCTTCGACCTATCTTTCTCCTCTCCCAGTCATCGCCCAGTAGAATTACCAGGCAATGAACCAGGGCCTTCCATCCCAACGGCACAGCA",
-                    "@@CDDBDFFBFHFIEEFGIGGHIEHIGIGGFGEGAFDHIIIIIGGGDFHII;=BF at FEHGIEEH?AHHFHFFFFDC5'5=?CC?ADCD at AC??9BDDCDB<",
-                    86, "101M", "supercont2.1", 60, true, false},
-
-            // small SRA archive
-            {"SRR2096940", 1, 16, "SRR2096940.R.3",
-                    "GTGTGTCACCAGATAAGGAATCTGCCTAACAGGAGGTGTGGGTTAGACCCAATATCAGGAGACCAGGAAGGAGGAGGCCTAAGGATGGGGCTTTTCTGTCACCAATCCTGTCCCTAGTGGCCCCACTGTGGGGTGGAGGGGACAGATAAAAGTACCCAGAACCAGAG",
-                    "AAAABFFFFFFFGGGGGGGGIIIIIIIIIIIIIIIIIIIIIIIIIIIIII7IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIGGGGGFGFFDFFFFFC",
-                    55627016, "167M", "CM000681.1", 42, false, false},
-
-            {"SRR2096940", 10591, 4, "SRR2096940.R.10592",
-                    "CTCTGGTTCTGGGTACTTTTATCTGTCCCCTCCACCCCACAGTGGCGAGCCAGATTCCTTATCTGGTGACACAC",
-                    "IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII",
-                    -1, null, null, -1, false, false},
-
-            // primary and secondary alignments
-            {"SRR833251", 81, 393, "SRR833251.R.51",
-                    "ATGCAAATCCGAATGGGCTATTTGTGGGTACTTGGGCAGGTAAGTAGCTGGCAATCTTGGTCGGTAAACCAATACCCAAGTTCACATAGGCACCATCGGGA",
-                    "CCCFFFFFHHHHHIJJJIJJJJJIIJJJGIJIJIIJIJJJDGIGIIJIJIHIJJJJJJGIGHIHEDFFFFDDEEEDDDDDCDEEDDDDDDDDDDDDDBBDB",
-                    1787186, "38M63S", "gi|169794206|ref|NC_010410.1|", 11, true, true},
-
-            // local SRA file
-            {"testdata/htsjdk/samtools/sra/test_archive.sra", 1, 99, "test_archive.R.2",
-                    "TGTCGATGCTGAAAGTGTCTGCGGTGAACCACTTCATGCACAGCGCACACTGCAGCTCCACTTCACCCAGCTGACGGCCGTTCTCATCGTCTCCAGAGCCCGTCTGAGCGTCCGCTGCTTCAGAACTGTCCCCGGCTGTATCCTGAAGAC",
-                    "BBAABBBFAFFFGGGGGGGGGGGGEEFHHHHGHHHHHFHHGHFDGGGGGHHGHHHHHHHHHHHHFHHHGHHHHHHGGGGGGGHGGHHHHHHHHHGHHHHHGGGGHGHHHGGGGGGGGGHHHHEHHHHHHHHHHGCGGGHHHHHHGBFFGF",
-                    2811570, "150M", "NC_007121.5", 60, true, false}
-        };
-    }
-
-    @Test(dataProvider = "testRows")
-    public void testRows(String acc, int recordIndex, int flags, String readName, String bases, String quals, int refStart, String cigar,
-                         String refName, int mapQ, boolean hasMate, boolean isSecondaryAlignment) {
-        SAMRecord record = getRecordByIndex(acc, recordIndex, false);
-
-        checkSAMRecord(record, flags, readName, bases, quals, refStart, cigar, refName, mapQ, hasMate, isSecondaryAlignment);
-    }
-
-    @Test(dataProvider = "testRows")
-    public void testRowsAfterIteratorDetach(String acc, int recordIndex, int flags, String readName, String bases, String quals,
-                                            int refStart, String cigar, String refName, int mapQ, boolean hasMate,
-                                            boolean isSecondaryAlignment) {
-        SAMRecord record = getRecordByIndex(acc, recordIndex, true);
-
-        checkSAMRecord(record, flags, readName, bases, quals, refStart, cigar, refName, mapQ, hasMate, isSecondaryAlignment);
-    }
-
-    @Test(dataProvider = "testRows")
-    public void testRowsOverrideValues(String acc, int recordIndex, int flags, String readName, String bases, String quals,
-                                       int refStart, String cigar, String refName, int mapQ, boolean hasMate,
-                                       boolean isSecondaryAlignment) {
-        SAMRecord record = getRecordByIndex(acc, recordIndex, true);
-        SAMFileHeader header = record.getHeader();
-
-
-        record.setFlags(0);
-        record.setReadUnmappedFlag(refStart == -1);
-        record.setReadBases("C".getBytes());
-        record.setBaseQualities(SAMUtils.fastqToPhred("A"));
-        if (refStart == -1) {
-            checkSAMRecord(record, 4, readName, "C", "A", refStart, "1M", refName, mapQ, false, false);
-        } else {
-            int sequenceIndex = header.getSequenceIndex(refName);
-            Assert.assertFalse(sequenceIndex == -1);
-
-            if (sequenceIndex == 0) {
-                if (header.getSequenceDictionary().getSequences().size() > 1) {
-                    sequenceIndex++;
-                }
-            } else {
-                sequenceIndex--;
-            }
-
-            refName = header.getSequence(sequenceIndex).getSequenceName();
-
-            record.setAlignmentStart(refStart - 100);
-            record.setCigarString("1M");
-            record.setMappingQuality(mapQ - 1);
-            record.setReferenceIndex(sequenceIndex);
-
-            checkSAMRecord(record, 0, readName, "C", "A", refStart - 100, "1M", refName, mapQ - 1, false, false);
-        }
-    }
-
-    @Test(dataProvider = "testRows")
-    public void testRowsBySpan(String acc, int recordIndex, int flags, String readName, String bases, String quals,
-                                            int refStart, String cigar, String refName, int mapQ, boolean hasMate,
-                                            boolean isSecondaryAlignment) {
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
-                SamInputResource.of(new SRAAccession(acc))
-        );
-
-        SAMFileHeader header = reader.getFileHeader();
-
-        Chunk chunk;
-        if (refStart != -1) {
-            long refOffset = 0;
-            int refIndex = header.getSequenceDictionary().getSequence(refName).getSequenceIndex();
-            for (SAMSequenceRecord sequenceRecord : header.getSequenceDictionary().getSequences()) {
-                if (sequenceRecord.getSequenceIndex() <  refIndex) {
-                    refOffset += sequenceRecord.getSequenceLength();
-                }
-            }
-
-            chunk = new Chunk(refOffset + refStart - 1, refOffset + refStart);
-        } else {
-            long totalRefLength = header.getSequenceDictionary().getReferenceLength();
-            long totalRecordRange = ((BAMFileSpan)reader.indexing().getFilePointerSpanningReads()).toCoordinateArray()[1];
-            chunk = new Chunk(totalRefLength, totalRecordRange);
-        }
-
-        final SAMRecordIterator samRecordIterator = ((SamReader.Indexing) reader).iterator(new BAMFileSpan(chunk));
-
-        SAMRecord record = null;
-        while (samRecordIterator.hasNext()) {
-            SAMRecord currentRecord = samRecordIterator.next();
-            if (currentRecord.getReadName().equals(readName)) {
-                record = currentRecord;
-                break;
-            }
-        }
-
-        checkSAMRecord(record, flags, readName, bases, quals, refStart, cigar, refName, mapQ, hasMate, isSecondaryAlignment);
-    }
-
-    @Test(dataProvider = "testRows")
-    public void testRowsByIndex(String acc, int recordIndex, int flags, String readName, String bases, String quals,
-                                int refStart, String cigar, String refName, int mapQ, boolean hasMate,
-                                boolean isSecondaryAlignment) {
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
-                SamInputResource.of(new SRAAccession(acc))
-        );
-
-        Assert.assertTrue(reader.hasIndex());
-        Assert.assertTrue(reader.indexing().hasBrowseableIndex());
-
-        SAMFileHeader header = reader.getFileHeader();
-        BrowseableBAMIndex index = reader.indexing().getBrowseableIndex();
-
-        BAMFileSpan span;
-        if (refStart != -1) {
-            int refIndex = header.getSequenceDictionary().getSequence(refName).getSequenceIndex();
-            span = index.getSpanOverlapping(refIndex, refStart, refStart + 1);
-        } else {
-            long chunkStart = index.getStartOfLastLinearBin();
-            long totalRecordRange = ((BAMFileSpan) reader.indexing().getFilePointerSpanningReads()).toCoordinateArray()[1];
-            span = new BAMFileSpan(new Chunk(chunkStart, totalRecordRange));
-        }
-
-        final SAMRecordIterator samRecordIterator = ((SamReader.Indexing) reader).iterator(span);
-
-        SAMRecord record = null;
-        while (samRecordIterator.hasNext()) {
-            SAMRecord currentRecord = samRecordIterator.next();
-            if (refStart != -1 && currentRecord.getAlignmentStart() + currentRecord.getReadLength() < refStart) {
-                continue;
-            }
-
-            if (currentRecord.getReadName().equals(readName)) {
-                record = currentRecord;
-                break;
-            }
-        }
-
-        checkSAMRecord(record, flags, readName, bases, quals, refStart, cigar, refName, mapQ, hasMate, isSecondaryAlignment);
-    }
-
-    private SAMRecord getRecordByIndex(String acc, int recordIndex, boolean detach) {
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(
-                SamInputResource.of(new SRAAccession(acc))
-        );
-
-        final SAMRecordIterator samRecordIterator = reader.iterator();
-
-        while (recordIndex != 0) {
-            Assert.assertTrue(samRecordIterator.hasNext(), "Record set is too small");
-
-            samRecordIterator.next();
-            recordIndex--;
-        }
-        Assert.assertTrue(samRecordIterator.hasNext(), "Record set is too small");
-
-        SAMRecord record = samRecordIterator.next();
-
-        if (detach) {
-            samRecordIterator.next();
-        }
-
-        return record;
-    }
-
-    private void checkSAMRecord(SAMRecord record, int flags, String readName, String bases, String quals,
-                                int refStart, String cigar, String refName, int mapQ, boolean hasMate,
-                                boolean isSecondaryAlignment) {
-
-        Assert.assertNotNull(record, "Record with read id: " + readName + " was not found by span created from index");
-
-        List<SAMValidationError> validationErrors = record.isValid();
-        Assert.assertNull(validationErrors, "SRA Lazy record is invalid. List of errors: " +
-                (validationErrors != null ? validationErrors.toString() : ""));
-
-        Assert.assertEquals(new String(record.getReadBases()), bases);
-        Assert.assertEquals(record.getBaseQualityString(), quals);
-        Assert.assertEquals(record.getReadPairedFlag(), hasMate);
-        Assert.assertEquals(record.getFlags(), flags);
-        Assert.assertEquals(record.getNotPrimaryAlignmentFlag(), isSecondaryAlignment);
-        if (refStart == -1) {
-            Assert.assertEquals(record.getReadUnmappedFlag(), true);
-            Assert.assertEquals(record.getAlignmentStart(), 0);
-            Assert.assertEquals(record.getCigarString(), "*");
-            Assert.assertEquals(record.getReferenceName(), "*");
-            Assert.assertEquals(record.getMappingQuality(), 0);
-        } else {
-            Assert.assertEquals(record.getReadUnmappedFlag(), false);
-            Assert.assertEquals(record.getAlignmentStart(), refStart);
-            Assert.assertEquals(record.getCigarString(), cigar);
-            Assert.assertEquals(record.getReferenceName(), refName);
-            Assert.assertEquals(record.getMappingQuality(), mapQ);
-        }
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java b/src/tests/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java
deleted file mode 100644
index 758d8fa..0000000
--- a/src/tests/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
-public class BlockCompressedOutputStreamTest {
-
-    @Test
-    public void testBasic() throws Exception {
-        final File f = File.createTempFile("BCOST.", ".gz");
-        f.deleteOnExit();
-        final List<String> linesWritten = new ArrayList<String>();
-        System.out.println("Creating file " + f);
-        final BlockCompressedOutputStream bcos = new BlockCompressedOutputStream(f);
-        String s = "Hi, Mom!\n";
-        bcos.write(s.getBytes());
-        linesWritten.add(s);
-        s = "Hi, Dad!\n";
-        bcos.write(s.getBytes());
-        linesWritten.add(s);
-        bcos.flush();
-        final StringBuilder sb = new StringBuilder(BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE * 2);
-        s = "1234567890123456789012345678901234567890123456789012345678901234567890\n";
-        while (sb.length() <= BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE) {
-            sb.append(s);
-            linesWritten.add(s);
-        }
-        bcos.write(sb.toString().getBytes());
-        bcos.close();
-        final BlockCompressedInputStream bcis = new BlockCompressedInputStream(f);
-        final BufferedReader reader = new BufferedReader(new InputStreamReader(bcis));
-        String line;
-        for(int i = 0; (line = reader.readLine()) != null; ++i) {
-            Assert.assertEquals(line + "\n", linesWritten.get(i));
-        }
-        bcis.close();
-        final BlockCompressedInputStream bcis2 = new BlockCompressedInputStream(f);
-        int available = bcis2.available();
-        Assert.assertFalse(bcis2.endOfBlock(), "Should not be at end of block");
-        Assert.assertTrue(available > 0);
-        byte[] buffer = new byte[available];
-        Assert.assertEquals(bcis2.read(buffer), available, "Should read to end of block");
-        Assert.assertTrue(bcis2.endOfBlock(), "Should be at end of block");
-        bcis2.close();
-    }
-
-    @Test
-    public void testOverflow() throws Exception {
-        final File f = File.createTempFile("BCOST.", ".gz");
-        f.deleteOnExit();
-        final List<String> linesWritten = new ArrayList<String>();
-        System.out.println("Creating file " + f);
-        final BlockCompressedOutputStream bcos = new BlockCompressedOutputStream(f);
-        Random r = new Random(15555);
-        final int INPUT_SIZE = 64 * 1024;
-        byte[] input = new byte[INPUT_SIZE];
-        r.nextBytes(input);
-        bcos.write(input);
-        bcos.close();
-
-        final BlockCompressedInputStream bcis = new BlockCompressedInputStream(f);
-        byte[] output = new byte[INPUT_SIZE];
-        int len;
-        int i = 0;
-        while ((len = bcis.read(output, 0, output.length)) != -1) {
-            for (int j = 0; j < len; j++) {
-               Assert.assertEquals(output[j], input[i++]);
-            }
-        }
-        Assert.assertEquals(i, INPUT_SIZE);
-        bcis.close();
-    }
-
-    // PIC-393 exception closing BGZF stream opened to /dev/null
-    // I don't think this will work on Windows, because /dev/null doesn't work
-    @Test(groups = "broken")
-    public void testDevNull() throws Exception {
-        final BlockCompressedOutputStream bcos = new BlockCompressedOutputStream("/dev/null");
-        bcos.write("Hi, Mom!".getBytes());
-        bcos.close();
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java b/src/tests/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java
deleted file mode 100644
index 91724b4..0000000
--- a/src/tests/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * @author alecw at broadinstitute.org
- */
-public class BlockCompressedTerminatorTest {
-    private static final File TEST_DATA_DIR = new File("testdata/htsjdk/samtools/util");
-
-    @Test
-    public void testFileWithTerminator() throws Exception {
-        final File tmpCompressedFile = File.createTempFile("test.", ".bgzf");
-        tmpCompressedFile.deleteOnExit();
-        final BlockCompressedOutputStream os = new BlockCompressedOutputStream(tmpCompressedFile);
-        os.write("Hi, Mom!\n".getBytes());
-        os.close();
-        Assert.assertEquals(BlockCompressedInputStream.checkTermination(tmpCompressedFile),
-                BlockCompressedInputStream.FileTermination.HAS_TERMINATOR_BLOCK);
-    }
-
-    @Test
-    public void testValidFileWithoutTerminator() throws Exception {
-        Assert.assertEquals(BlockCompressedInputStream.checkTermination(new File(TEST_DATA_DIR, "no_bgzf_terminator.bam")),
-                BlockCompressedInputStream.FileTermination.HAS_HEALTHY_LAST_BLOCK);
-    }
-
-    @Test
-    public void testDefectiveFile() throws Exception {
-        Assert.assertEquals(BlockCompressedInputStream.checkTermination(new File(TEST_DATA_DIR, "defective_bgzf.bam")),
-                BlockCompressedInputStream.FileTermination.DEFECTIVE);
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/util/HistogramTest.java b/src/tests/java/htsjdk/samtools/util/HistogramTest.java
deleted file mode 100644
index 413447c..0000000
--- a/src/tests/java/htsjdk/samtools/util/HistogramTest.java
+++ /dev/null
@@ -1,97 +0,0 @@
-package htsjdk.samtools.util;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-
-import static java.lang.Math.abs;
-
-/**
- *
- */
-public class HistogramTest {
-
-    @Test(dataProvider = "histogramData")
-    public void testHistogramFunctions(final int[] values, final double mean, final double stdev, final Integer trimByWidth) {
-        final Histogram<Integer> histo = new Histogram<Integer>();
-        for (int value : values) {
-            histo.increment(value);
-        }
-
-        if (trimByWidth != null) histo.trimByWidth(trimByWidth);
-        final double m = histo.getMean();
-        final double sd = histo.getStandardDeviation();
-
-        Assert.assertEquals(round(mean), round(m), "Means are not equal");
-        Assert.assertEquals(round(stdev), round(sd), "Stdevs are not equal");
-    }
-
-    @DataProvider(name = "histogramData")
-    public Object[][] histogramData() {
-        return new Object[][] {
-            new Object[] {new int[] {1,2,3,4,5,6,7,8,9,10} , 5.5d, 3.027650d, null },
-            new Object[] {new int[] {1,2,2,3,3,3,4,4,4,4,5,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,9}, 6.333333d, 2.236068d, null  },
-            new Object[] {new int[] {-5, -4, -3, -2, -1,  0,  1,  2,  3,  4,  5,  6,  7,  8,  9, 10, 11, 12, 13, 14, 15}, 5d, 6.204837d, null  },
-                new Object[] {new int[] {1,2,3,4,5,6,7,8,9,10, 11, 11, 12, 100, 1000} , 5.5d, 3.027650d, 10 },
-                new Object[] {new int[] {1,2,2,3,3,3,4,4,4,4,5,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,9, 20, 20, 21, 25, 25}, 6.333333d, 2.236068d, 11  },
-                new Object[] {new int[] {-5, -4, -3, -2, -1,  0,  1,  2,  3,  4,  5,  6,  7,  8,  9, 10, 11, 12, 13, 14, 15, 101, 102, 103, 200, 2000}, 5d, 6.204837d, 20  }
-        };
-    }
-
-    @Test
-    public void testGeometricMean() {
-        final int[] is = new int[] {4,4,4,4,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8};
-        final Histogram<Integer> histo = new Histogram<Integer>();
-        for (final int i : is) histo.increment(i);
-        Assert.assertTrue(abs(histo.getGeometricMean() - 6.216797) < 0.00001);
-    }
-
-    @Test(dataProvider = "medianTestData")
-    public void testMedian(final int [] values, final double median) {
-        final Histogram<Integer> histo = new Histogram<Integer>();
-        for (final int i : values) histo.increment(i);
-        Assert.assertEquals(histo.getMedian(), median);
-    }
-
-    @DataProvider(name = "medianTestData")
-    public Object[][] medianTestData() {
-        return new Object[][] {
-                new Object[] {new int[] {} , 0d},
-                new Object[] {new int[] {999} , 999d},
-                new Object[] {new int[] {1,2,3,4,5,6} , 3.5d},
-                new Object[] {new int[] {5,5,5,5,5,6,6} , 5d},
-                new Object[] {new int[] {5,5,5,5,5,6,6,6,6,6} , 5.5d},
-        };
-    }
-
-    @Test
-    public void testMad() {
-        final int[] is = new int[] {4,4,4,4,5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8};
-        final Histogram<Integer> histo = new Histogram<Integer>();
-        for (final int i : is) histo.increment(i);
-
-        Assert.assertEquals(7d, histo.getMedian());
-        Assert.assertEquals(1d, histo.getMedianAbsoluteDeviation());
-        Assert.assertTrue(abs(histo.estimateSdViaMad() - 1.4826) < 0.0001);
-    }
-
-
-    @Test(dataProvider = "histogramData") //this data provider has several extra variables that we don't make use of here
-    public void testSerializeHistogram(final int[] values, final double mean, final double stdev, final Integer trimByWidth) throws IOException, ClassNotFoundException {
-        final Histogram<Integer> histo = new Histogram<Integer>();
-        for (int value : values) {
-            histo.increment(value);
-        }
-
-        Histogram<Integer> deserializedHistogram = TestUtil.serializeAndDeserialize(histo);
-        Assert.assertEquals(deserializedHistogram, histo);
-    }
-
-    private double round(final double in) {
-        long l = (long) (in * 10000);
-        return l / 10000d;
-    }
-
-}
diff --git a/src/tests/java/htsjdk/samtools/util/IntelDeflaterTest.java b/src/tests/java/htsjdk/samtools/util/IntelDeflaterTest.java
deleted file mode 100755
index 54c919b..0000000
--- a/src/tests/java/htsjdk/samtools/util/IntelDeflaterTest.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2016 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- */
-package htsjdk.samtools.util;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMFileWriter;
-import htsjdk.samtools.SAMFileWriterFactory;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SamReader;
-import htsjdk.samtools.SamReaderFactory;
-import htsjdk.samtools.ValidationStringency;
-import htsjdk.samtools.util.zip.DeflaterFactory;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.concurrent.ExecutionException;
-
-/**
- * This is a test for IntelDeflater.
- */
-
-public class IntelDeflaterTest {
-    static final File TEST_DIR = new File("testdata/htsjdk/samtools");
-
-    @DataProvider(name="TestIntelDeflaterIsLoadedData")
-    Iterator<Object[]> TestIntelDeflaterIsLoadedData(){
-
-        List<File> files = CollectionUtil.makeList(
-                new File(TEST_DIR, "coordinate_sorted.sam"),
-                new File(TEST_DIR, "queryname_sorted.sam"),
-                new File(TEST_DIR, "compressed.bam"),
-                new File(TEST_DIR, "empty.bam"),
-                new File(TEST_DIR, "cram_with_bai_index.cram"),
-                new File(TEST_DIR, "uncompressed.sam"),
-                new File(TEST_DIR, "cram_with_crai_index.cram"));
-
-        List<Boolean> eagerlyDecodes = CollectionUtil.makeList(Boolean.TRUE, Boolean.FALSE);
-        List<Integer> compressionLevels = CollectionUtil.makeList(1, 2, 3, 4, 5, 6, 7, 8, 9);
-
-        List<Object[]> retVal = new ArrayList<>();
-        files.stream()
-                .forEach(file ->
-                        eagerlyDecodes.stream()
-                                .forEach(eagerlyDecode -> compressionLevels.stream()
-                                        .forEach(compressionLevel ->
-                                                retVal.add(new Object[]{file, eagerlyDecode, compressionLevel}))));
-        return retVal.iterator();
-    }
-
-    @Test(dataProvider = "TestIntelDeflaterIsLoadedData", groups="intel",expectedExceptions = IllegalAccessError.class)
-    public void TestIntelDeflatorIsLoaded(final File inputFile, final Boolean eagerlyDecode,final Integer compressionLevel) throws IOException,IllegalAccessError {
-
-        Log log = Log.getInstance(IntelDeflaterTest.class);
-        Log.setGlobalLogLevel(Log.LogLevel.INFO);
-
-        log.info("In TestIntelDeflatorIsLoaded. testing: " + inputFile);
-        IOUtil.assertFileIsReadable(inputFile);
-
-        final File outputFile = File.createTempFile("IntelDeflater", "bam");
-        outputFile.deleteOnExit();
-
-
-        Assert.assertTrue(DeflaterFactory.usingIntelDeflater(), "IntelDeflater is not loaded.");
-        log.info("IntelDeflater is loaded");
-
-
-        SamReaderFactory readerFactory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT);
-        if (eagerlyDecode) {
-            readerFactory = readerFactory.enable(SamReaderFactory.Option.EAGERLY_DECODE);
-        }
-
-        if(inputFile.getName().endsWith(".cram")) {
-            readerFactory.referenceSequence(new File(TEST_DIR, "hg19mini.fasta"));
-        }
-
-        final SamReader reader = readerFactory.open(inputFile);
-        final SAMFileHeader header = reader.getFileHeader();
-        int nRecords = 0;
-        try (final SAMFileWriter writer = new SAMFileWriterFactory().makeBAMWriter(header, true, outputFile, compressionLevel)) {
-            for (final SAMRecord record : reader) {
-                writer.addAlignment(record);
-                nRecords++;
-            }
-        } catch (Exception e) {
-            Assert.fail("Error reading record no. " + nRecords);
-        }
-
-        log.info("wrote " + nRecords + " Records");
-
-        int nReadRecords = 0;
-        try (final SamReader outputReader = readerFactory.open(outputFile)) {
-            for (final SAMRecord ignored : outputReader) {
-                nReadRecords++;
-            }
-        } catch (Exception e) {
-            Assert.fail("Error reading record written with the IntelDeflater library");
-        }
-        log.info("read " + nReadRecords + " Records");
-
-        Assert.assertEquals(nReadRecords, nRecords, "Number of read records mismatches number of written records.");
-
-        throw new IllegalAccessError("Got to the end successfully! (i.e. no segmentation fault");
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/util/IntervalListTest.java b/src/tests/java/htsjdk/samtools/util/IntervalListTest.java
deleted file mode 100644
index 6d7ffe4..0000000
--- a/src/tests/java/htsjdk/samtools/util/IntervalListTest.java
+++ /dev/null
@@ -1,520 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2014 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.samtools.util;
-
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.variant.vcf.VCFFileReader;
-import org.testng.Assert;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-/**
- * Tests the IntervalList class
- */
-public class IntervalListTest {
-
-    final SAMFileHeader fileHeader;
-    final IntervalList list1, list2, list3;
-
-    public IntervalListTest() {
-        fileHeader = IntervalList.fromFile(new File("testdata/htsjdk/samtools/intervallist/IntervalListchr123_empty.interval_list")).getHeader();
-        fileHeader.setSortOrder(SAMFileHeader.SortOrder.unsorted);
-
-        list1 = new IntervalList(fileHeader);
-        list2 = new IntervalList(fileHeader);
-        list3 = new IntervalList(fileHeader);
-
-        list1.add(new Interval("1", 1, 100));     //de-facto: 1:1-200 1:202-300     2:100-150 2:200-300
-        list1.add(new Interval("1", 101, 200));
-        list1.add(new Interval("1", 202, 300));
-        list1.add(new Interval("2", 200, 300));
-        list1.add(new Interval("2", 100, 150));
-
-        list2.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
-        list2.add(new Interval("1", 301, 500));
-        list2.add(new Interval("2", 1, 150));
-        list2.add(new Interval("2", 250, 270));
-        list2.add(new Interval("2", 290, 400));
-
-        list3.add(new Interval("1", 25, 400));    //de-facto 1:25-400                2:200-600                            3:50-470
-        list3.add(new Interval("2", 200, 600));
-        list3.add(new Interval("3", 50, 470));
-    }
-
-    @DataProvider(name = "intersectData")
-    public Object[][] intersectData() {
-        final IntervalList intersect123 = new IntervalList(fileHeader);
-        final IntervalList intersect12 = new IntervalList(fileHeader);
-        final IntervalList intersect13 = new IntervalList(fileHeader);
-        final IntervalList intersect23 = new IntervalList(fileHeader);
-
-        intersect123.add(new Interval("1", 50, 150));
-        intersect123.add(new Interval("2", 250, 270));
-        intersect123.add(new Interval("2", 290, 300));
-
-        intersect12.add(new Interval("1", 50, 150));
-        intersect12.add(new Interval("2", 100, 150));
-        intersect12.add(new Interval("2", 250, 270));
-        intersect12.add(new Interval("2", 290, 300));
-
-        intersect13.add(new Interval("1", 25, 200));
-        intersect13.add(new Interval("1", 202, 300));
-        intersect13.add(new Interval("2", 200, 300));
-
-        intersect23.add(new Interval("1", 50, 150));
-        intersect23.add(new Interval("1", 301, 400));
-        intersect23.add(new Interval("2", 250, 270));
-        intersect23.add(new Interval("2", 290, 400));
-
-        return new Object[][]{
-                new Object[]{Arrays.asList(list1, list2, list3), intersect123},
-                new Object[]{Arrays.asList(list1, list2), intersect12},
-                new Object[]{Arrays.asList(list2, list1), intersect12},
-                new Object[]{Arrays.asList(list2, list3), intersect23},
-                new Object[]{Arrays.asList(list3, list2), intersect23},
-                new Object[]{Arrays.asList(list1, list3), intersect13},
-                new Object[]{Arrays.asList(list3, list1), intersect13}
-        };
-    }
-
-    @Test(dataProvider = "intersectData")
-    public void testIntersectIntervalLists(final List<IntervalList> lists, final IntervalList list) {
-        Assert.assertEquals(
-                CollectionUtil.makeCollection(IntervalList.intersection(lists).iterator()),
-                CollectionUtil.makeCollection(list.iterator()));
-    }
-
-    @DataProvider(name = "mergeData")
-    public Object[][] mergeData() {
-        final IntervalList merge123 = new IntervalList(fileHeader);
-        final IntervalList merge12 = new IntervalList(fileHeader);
-        final IntervalList merge23 = new IntervalList(fileHeader);
-        final IntervalList merge13 = new IntervalList(fileHeader);
-
-        merge123.add(new Interval("1", 1, 100));     //de-facto: 1:1-200 1:202-300     2:100-150 2:200-300
-        merge123.add(new Interval("1", 101, 200));
-        merge123.add(new Interval("1", 202, 300));
-        merge123.add(new Interval("2", 200, 300));
-        merge123.add(new Interval("2", 100, 150));
-
-        merge123.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
-        merge123.add(new Interval("1", 301, 500));
-        merge123.add(new Interval("2", 1, 150));
-        merge123.add(new Interval("2", 250, 270));
-        merge123.add(new Interval("2", 290, 400));
-
-        merge123.add(new Interval("1", 25, 400));    //de-facto 1:25-400                2:200-600                            3:50-470
-        merge123.add(new Interval("2", 200, 600));
-        merge123.add(new Interval("3", 50, 470));
-
-        merge12.add(new Interval("1", 1, 100));     //de-facto: 1:1-200 1:202-300     2:100-150 2:200-300
-        merge12.add(new Interval("1", 101, 200));
-        merge12.add(new Interval("1", 202, 300));
-        merge12.add(new Interval("2", 200, 300));
-        merge12.add(new Interval("2", 100, 150));
-
-        merge12.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
-        merge12.add(new Interval("1", 301, 500));
-        merge12.add(new Interval("2", 1, 150));
-        merge12.add(new Interval("2", 250, 270));
-        merge12.add(new Interval("2", 290, 400));
-
-        merge23.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
-        merge23.add(new Interval("1", 301, 500));
-        merge23.add(new Interval("2", 1, 150));
-        merge23.add(new Interval("2", 250, 270));
-        merge23.add(new Interval("2", 290, 400));
-
-        merge23.add(new Interval("1", 25, 400));    //de-facto 1:25-400                2:200-600                            3:50-470
-        merge23.add(new Interval("2", 200, 600));
-        merge23.add(new Interval("3", 50, 470));
-
-        merge13.add(new Interval("1", 1, 100));     //de-facto: 1:1-200 1:202-300     2:100-150 2:200-300
-        merge13.add(new Interval("1", 101, 200));
-        merge13.add(new Interval("1", 202, 300));
-        merge13.add(new Interval("2", 200, 300));
-        merge13.add(new Interval("2", 100, 150));
-
-        merge13.add(new Interval("1", 25, 400));    //de-facto 1:25-400                2:200-600                            3:50-470
-        merge13.add(new Interval("2", 200, 600));
-        merge13.add(new Interval("3", 50, 470));
-
-        return new Object[][]{
-                new Object[]{Arrays.asList(list1, list2, list3), merge123},
-                new Object[]{Arrays.asList(list1, list2), merge12},
-                new Object[]{Arrays.asList(list2, list3), merge23},
-                new Object[]{Arrays.asList(list1, list3), merge13}
-        };
-    }
-
-    @Test(dataProvider = "mergeData")
-    public void testMergeIntervalLists(final List<IntervalList> lists, final IntervalList list) {
-        Assert.assertEquals(
-                CollectionUtil.makeCollection(IntervalList.concatenate(lists).iterator()),
-                CollectionUtil.makeCollection(list.iterator()));
-    }
-
-    @DataProvider(name = "unionData")
-    public Object[][] unionData() {
-        final IntervalList union123 = new IntervalList(fileHeader);
-        final IntervalList union12 = new IntervalList(fileHeader);
-        final IntervalList union13 = new IntervalList(fileHeader);
-        final IntervalList union23 = new IntervalList(fileHeader);
-
-        union123.add(new Interval("1", 1, 500));
-        union123.add(new Interval("2", 1, 150));
-        union123.add(new Interval("2", 200, 600));
-        union123.add(new Interval("3", 50, 470));
-
-        union12.add(new Interval("1", 1, 200));
-        union12.add(new Interval("1", 202, 500));
-        union12.add(new Interval("2", 1, 150));
-        union12.add(new Interval("2", 200, 400));
-
-        union23.add(new Interval("1", 25, 500));
-        union23.add(new Interval("2", 1, 150));
-        union23.add(new Interval("2", 200, 600));
-        union23.add(new Interval("3", 50, 470));
-
-        union13.add(new Interval("1", 1, 400));
-        union13.add(new Interval("2", 100, 150));
-        union13.add(new Interval("2", 200, 600));
-        union13.add(new Interval("3", 50, 470));
-
-        return new Object[][]{
-                new Object[]{Arrays.asList(list1, list2, list3), union123},
-                new Object[]{Arrays.asList(list1, list2), union12},
-                new Object[]{Arrays.asList(list1, list2), union12},
-                new Object[]{Arrays.asList(list2, list3), union23},
-                new Object[]{Arrays.asList(list2, list3), union23},
-                new Object[]{Arrays.asList(list1, list3), union13},
-                new Object[]{Arrays.asList(list1, list3), union13}
-        };
-    }
-
-    @Test(dataProvider = "unionData", enabled = true)
-    public void testUnionIntervalLists(final List<IntervalList> lists, final IntervalList list) {
-        Assert.assertEquals(
-                CollectionUtil.makeCollection(IntervalList.union(lists).iterator()),
-                CollectionUtil.makeCollection(list.iterator()));
-    }
-
-    @DataProvider(name = "invertData")
-    public Object[][] invertData() {
-        final IntervalList invert1 = new IntervalList(fileHeader);
-        final IntervalList invert2 = new IntervalList(fileHeader);
-        final IntervalList invert3 = new IntervalList(fileHeader);
-
-        final IntervalList full = new IntervalList(fileHeader);
-        final IntervalList fullChopped = new IntervalList(fileHeader);
-        final IntervalList empty = new IntervalList(fileHeader);
-
-        invert1.add(new Interval("1", 201, 201));
-        invert1.add(new Interval("1", 301, fileHeader.getSequence("1").getSequenceLength()));
-        invert1.add(new Interval("2", 1, 99));
-        invert1.add(new Interval("2", 151, 199));
-        invert1.add(new Interval("2", 301, fileHeader.getSequence("2").getSequenceLength()));
-        invert1.add(new Interval("3", 1, fileHeader.getSequence("3").getSequenceLength()));
-
-        invert2.add(new Interval("1", 1, 49));
-        invert2.add(new Interval("1", 151, 300));
-        invert2.add(new Interval("1", 501, fileHeader.getSequence("1").getSequenceLength()));
-        invert2.add(new Interval("2", 151, 249));
-        invert2.add(new Interval("2", 271, 289));
-        invert2.add(new Interval("2", 401, fileHeader.getSequence("2").getSequenceLength()));
-        invert2.add(new Interval("3", 1, fileHeader.getSequence("3").getSequenceLength()));
-
-        invert3.add(new Interval("1", 1, 24));
-        invert3.add(new Interval("1", 401, fileHeader.getSequence("1").getSequenceLength()));
-        invert3.add(new Interval("2", 1, 199));
-        invert3.add(new Interval("2", 601, fileHeader.getSequence("2").getSequenceLength()));
-        invert3.add(new Interval("3", 1, 49));
-        invert3.add(new Interval("3", 471, fileHeader.getSequence("3").getSequenceLength()));
-
-        for (final SAMSequenceRecord samSequenceRecord : fileHeader.getSequenceDictionary().getSequences()) {
-            full.add(new Interval(samSequenceRecord.getSequenceName(), 1, samSequenceRecord.getSequenceLength()));
-
-            fullChopped.add(new Interval(samSequenceRecord.getSequenceName(), 1, samSequenceRecord.getSequenceLength() / 2));
-            fullChopped.add(new Interval(samSequenceRecord.getSequenceName(), samSequenceRecord.getSequenceLength() / 2 + 1, samSequenceRecord.getSequenceLength()));
-        }
-
-        return new Object[][]{
-                new Object[]{list1, invert1},
-                new Object[]{list2, invert2},
-                new Object[]{list3, invert3},
-                new Object[]{full, empty},
-                new Object[]{empty, full},
-                new Object[]{fullChopped, empty}
-        };
-    }
-
-    @Test(dataProvider = "invertData")
-    public void testInvertSquared(final IntervalList list, @SuppressWarnings("UnusedParameters") final IntervalList ignored) throws Exception {
-        final IntervalList inverseSquared = IntervalList.invert(IntervalList.invert(list));
-        final IntervalList originalClone = new IntervalList(list.getHeader());
-
-        for (final Interval interval : list) {
-            originalClone.add(interval);
-        }
-
-        Assert.assertEquals(
-                CollectionUtil.makeCollection(inverseSquared.iterator()),
-                CollectionUtil.makeCollection(originalClone.uniqued().iterator()));
-    }
-
-    @Test(dataProvider = "invertData")
-    public void testInvert(final IntervalList list, final IntervalList inverse) throws Exception {
-        Assert.assertEquals(
-                CollectionUtil.makeCollection(IntervalList.invert(list).iterator()),
-                CollectionUtil.makeCollection(inverse.iterator()));
-    }
-
-    @DataProvider(name = "subtractSingletonData")
-    public Object[][] subtractSingletonData() {
-        final IntervalList subtract1_from_2 = new IntervalList(fileHeader);
-        final IntervalList subtract2_from_3 = new IntervalList(fileHeader);
-        final IntervalList subtract1_from_3 = new IntervalList(fileHeader);
-        final IntervalList subtract3_from_1 = new IntervalList(fileHeader);
-
-        subtract1_from_2.add(new Interval("1", 301, 500));
-        subtract1_from_2.add(new Interval("2", 1, 99));
-        subtract1_from_2.add(new Interval("2", 301, 400));
-
-        subtract2_from_3.add(new Interval("1", 25, 49));
-        subtract2_from_3.add(new Interval("1", 151, 300));
-        subtract2_from_3.add(new Interval("2", 200, 249));
-        subtract2_from_3.add(new Interval("2", 271, 289));
-        subtract2_from_3.add(new Interval("2", 401, 600));
-        subtract2_from_3.add(new Interval("3", 50, 470));
-
-        subtract1_from_3.add(new Interval("1", 201, 201));
-        subtract1_from_3.add(new Interval("1", 301, 400));
-        subtract1_from_3.add(new Interval("2", 301, 600));
-        subtract1_from_3.add(new Interval("3", 50, 470));
-
-        subtract3_from_1.add(new Interval("1", 1, 49));    //de-facto 1:25-400                2:200-600                            3:50-470
-        subtract3_from_1.add(new Interval("2", 100, 150));
-
-        return new Object[][]{
-                new Object[]{list2, list1, subtract1_from_2},
-                new Object[]{list3, list2, subtract2_from_3},
-                new Object[]{list3, list1, subtract1_from_3},
-        };
-    }
-
-    @DataProvider(name = "subtractData")
-    public Object[][] subtractData() {
-        final IntervalList subtract12_from_3 = new IntervalList(fileHeader);
-
-        subtract12_from_3.add(new Interval("1", 201, 201));
-        subtract12_from_3.add(new Interval("2", 401, 600));
-        subtract12_from_3.add(new Interval("3", 50, 470));
-
-        return new Object[][]{
-                new Object[]{CollectionUtil.makeList(list3), CollectionUtil.makeList(list1, list2), subtract12_from_3},
-        };
-    }
-
-    @Test(dataProvider = "subtractData")
-    public void testSubtractIntervalLists(final List<IntervalList> fromLists, final List<IntervalList> whatLists, final IntervalList list) {
-        Assert.assertEquals(
-                CollectionUtil.makeCollection(IntervalList.subtract(fromLists, whatLists).iterator()),
-                CollectionUtil.makeCollection(list.iterator()));
-    }
-
-    @Test(dataProvider = "subtractSingletonData")
-    public void testSubtractSingletonIntervalLists(final IntervalList fromLists, final IntervalList whatLists, final IntervalList list) {
-        Assert.assertEquals(
-                CollectionUtil.makeCollection(IntervalList.subtract(fromLists, whatLists).iterator()),
-                CollectionUtil.makeCollection(list.iterator()));
-    }
-
-    @Test(dataProvider = "subtractSingletonData")
-    public void testSubtractSingletonasListIntervalList(final IntervalList fromLists, final IntervalList whatLists, final IntervalList list) {
-        Assert.assertEquals(
-                CollectionUtil.makeCollection(IntervalList.subtract(Collections.singletonList(fromLists), Collections.singletonList(whatLists)).iterator()),
-                CollectionUtil.makeCollection(list.iterator()));
-    }
-
-    @DataProvider(name = "VCFCompData")
-    public Object[][] VCFCompData() {
-        return new Object[][]{
-                new Object[]{"testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTest.vcf", "testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list", false},
-                new Object[]{"testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTest.vcf", "testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverse.interval_list", true},
-                new Object[]{"testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestManual.vcf", "testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestManualComp.interval_list", false},
-                new Object[]{"testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestManual.vcf", "testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestCompInverseManual.interval_list", true}
-        };
-    }
-
-    @Test(dataProvider = "VCFCompData")
-    public void testFromVCF(final String vcf, final String compInterval, final boolean invertVCF) {
-
-        final File vcfFile = new File(vcf);
-        final File compIntervalFile = new File(compInterval);
-
-        final IntervalList compList = IntervalList.fromFile(compIntervalFile);
-        final IntervalList list = invertVCF ? IntervalList.invert(VCFFileReader.fromVcf(vcfFile)) : VCFFileReader.fromVcf(vcfFile);
-
-        compList.getHeader().getSequenceDictionary().assertSameDictionary(list.getHeader().getSequenceDictionary());
-
-        final Collection<Interval> intervals = CollectionUtil.makeCollection(list.iterator());
-        final Collection<Interval> compIntervals = CollectionUtil.makeCollection(compList.iterator());
-
-        //assert that the intervals correspond
-        Assert.assertEquals(intervals, compIntervals);
-
-        final List<String> intervalNames = new LinkedList<String>();
-        final List<String> compIntervalNames = new LinkedList<String>();
-
-        for (final Interval interval : intervals) {
-            intervalNames.add(interval.getName());
-        }
-        for (final Interval interval : compIntervals) {
-            compIntervalNames.add(interval.getName());
-        }
-        //assert that the names match
-        Assert.assertEquals(intervalNames, compIntervalNames);
-    }
-
-    @DataProvider
-    public Object[][] testFromSequenceData() {
-        return new Object[][]{
-                new Object[]{"testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list", "1", 249250621},
-                new Object[]{"testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list", "2", 243199373},
-                new Object[]{"testdata/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list", "3", 198022430},
-        };
-    }
-
-    @Test(dataProvider = "testFromSequenceData")
-    public void testFromSequenceName(final String intervalList, final String referenceName, final Integer length) {
-
-        final IntervalList intervals = IntervalList.fromFile(new File(intervalList));
-        final IntervalList test = IntervalList.fromName(intervals.getHeader(), referenceName);
-        Assert.assertEquals(test.getIntervals(), CollectionUtil.makeList(new Interval(referenceName, 1, length)));
-    }
-
-    @Test
-    public void testMerges() {
-        final SortedSet<Interval> intervals = new TreeSet<Interval>() {{
-            add(new Interval("1", 500, 600, false, "foo"));
-            add(new Interval("1", 550, 650, false, "bar"));
-            add(new Interval("1", 625, 699, false, "splat"));
-        }};
-
-        Interval out = IntervalList.merge(intervals, false);
-        Assert.assertEquals(out.getStart(), 500);
-        Assert.assertEquals(out.getEnd(), 699);
-
-        intervals.add(new Interval("1", 626, 629, false, "whee"));
-        out = IntervalList.merge(intervals, false);
-        Assert.assertEquals(out.getStart(), 500);
-        Assert.assertEquals(out.getEnd(), 699);
-    }
-
-    @Test
-    public void testBreakAtBands() {
-        final List<Interval> intervals = new ArrayList<Interval>() {{
-            add(new Interval("A", 1, 99, false, "foo"));
-            add(new Interval("A", 98, 99, true, "psyduck"));
-            add(new Interval("1", 500, 600, false, "foo")); // -> 2
-            add(new Interval("1", 550, 650, false, "bar")); // -> 2
-            add(new Interval("1", 625, 699, false, "splat"));
-            add(new Interval("2", 99, 201, false, "geodude")); // -> 3
-            add(new Interval("3", 100, 99, false, "charizard"));  // Empty Interval
-            add(new Interval("3", 101, 100, false, "golduck"));   // Empty Interval
-        }};
-
-        final List<Interval> brokenIntervals = IntervalList.breakIntervalsAtBandMultiples(intervals, 100);
-
-        Assert.assertEquals(brokenIntervals.size(), 12);
-        Assert.assertEquals(brokenIntervals.get(0), new Interval("A", 1, 99, false, "foo"));
-
-        Assert.assertEquals(brokenIntervals.get(1), new Interval("A", 98, 99, true, "psyduck"));
-
-        Assert.assertEquals(brokenIntervals.get(2), new Interval("1", 500, 599, false, "foo.1"));
-        Assert.assertEquals(brokenIntervals.get(3), new Interval("1", 600, 600, false, "foo.2"));
-
-        Assert.assertEquals(brokenIntervals.get(4), new Interval("1", 550, 599, false, "bar.1"));
-        Assert.assertEquals(brokenIntervals.get(5), new Interval("1", 600, 650, false, "bar.2"));
-
-        Assert.assertEquals(brokenIntervals.get(6), new Interval("1", 625, 699, false, "splat"));
-
-        Assert.assertEquals(brokenIntervals.get(7), new Interval("2", 99, 99, false, "geodude.1"));
-        Assert.assertEquals(brokenIntervals.get(8), new Interval("2", 100, 199, false, "geodude.2"));
-        Assert.assertEquals(brokenIntervals.get(9), new Interval("2", 200, 201, false, "geodude.3"));
-
-        Assert.assertEquals(brokenIntervals.get(10), new Interval("3", 100, 99, false, "charizard"));
-        Assert.assertEquals(brokenIntervals.get(11), new Interval("3", 101, 100, false, "golduck"));
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void TestFailAdd() {
-        IntervalList test = new IntervalList(this.fileHeader);
-        test.add(new Interval("blarg", 1, 1));
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void TestFailAddAll() {
-        IntervalList test = new IntervalList(this.fileHeader);
-        test.addall(CollectionUtil.makeList(new Interval("blarg", 1, 1), new Interval("bloorg", 1, 1)));
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void changeHeader() {
-        SAMFileHeader clonedHeader = fileHeader.clone();
-        clonedHeader.addSequence(new SAMSequenceRecord("4", 1000));
-        IntervalList usingClone1 = new IntervalList(clonedHeader);
-        usingClone1.add(new Interval("4", 1, 100));
-        IntervalList usingClone2 = new IntervalList(clonedHeader);
-        usingClone2.add(new Interval("4", 10, 20));
-
-
-        IntervalList expected = new IntervalList(clonedHeader);
-        expected.add(new Interval("4", 1, 9));
-        expected.add(new Interval("4", 21, 100));
-
-        //pull rug from underneath (one call will change all the headers, since there's actually only one)
-        usingClone1.getHeader().setSequenceDictionary(fileHeader.getSequenceDictionary());
-
-        //now interval lists are in "illegal state" since they contain contigs that are not in the header.
-        //this next step should fail
-        IntervalList.subtract(usingClone1, usingClone2);
-
-        Assert.assertTrue(false);
-
-    }
-}
\ No newline at end of file
diff --git a/src/tests/java/htsjdk/samtools/util/IoUtilTest.java b/src/tests/java/htsjdk/samtools/util/IoUtilTest.java
deleted file mode 100644
index 3dad415..0000000
--- a/src/tests/java/htsjdk/samtools/util/IoUtilTest.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.StringWriter;
-import java.util.Arrays;
-import java.util.List;
-
-public class IoUtilTest {
-
-    private static final File SLURP_TEST_FILE = new File("testdata/htsjdk/samtools/io/slurptest.txt");
-    private static final File EMPTY_FILE = new File("testdata/htsjdk/samtools/io/empty.txt");
-    private static final File FIVE_SPACES_THEN_A_NEWLINE_THEN_FIVE_SPACES_FILE = new File("testdata/htsjdk/samtools/io/5newline5.txt");
-    private static final List<String> SLURP_TEST_LINES = Arrays.asList("bacon   and rice   ", "for breakfast  ", "wont you join me");
-    private static final String SLURP_TEST_LINE_SEPARATOR = "\n";
-    private static final String TEST_FILE_PREFIX = "htsjdk-IOUtilTest";
-    private static final String TEST_FILE_EXTENSIONS[] = {".txt", ".txt.gz"};
-    private static final String TEST_STRING = "bar!";
-    private File existingTempFile;
-    private String systemTempDir;
-
-    @BeforeClass
-    public void setUp() throws IOException {
-        existingTempFile = File.createTempFile("FiletypeTest.", ".tmp");
-        existingTempFile.deleteOnExit();
-        systemTempDir = System.getProperty("java.io.tmpdir");
-        final File tmpDir = new File(systemTempDir);
-        if (!tmpDir.isDirectory()) tmpDir.mkdir();
-        if (!tmpDir.isDirectory())
-            throw new RuntimeException("java.io.tmpdir (" + systemTempDir + ") is not a directory");
-    }
-
-    @Test
-    public void testFileReadingAndWriting() throws IOException {
-        String randomizedTestString = TEST_STRING + System.currentTimeMillis();
-        for (String ext : TEST_FILE_EXTENSIONS) {
-            File f = File.createTempFile(TEST_FILE_PREFIX, ext);
-            f.deleteOnExit();
-
-            OutputStream os = IOUtil.openFileForWriting(f);
-            BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os));
-            writer.write(randomizedTestString);
-            writer.close();
-
-            InputStream is = IOUtil.openFileForReading(f);
-            BufferedReader reader = new BufferedReader(new InputStreamReader(is));
-            String line = reader.readLine();
-            Assert.assertEquals(randomizedTestString, line);
-        }
-    }
-
-    @Test(groups = {"unix"})
-    public void testGetCanonicalPath() throws IOException {
-        String tmpPath = System.getProperty("java.io.tmpdir");
-        String userName = System.getProperty("user.name");
-
-        if (tmpPath.endsWith(userName)) {
-            tmpPath = tmpPath.substring(0, tmpPath.length() - userName.length());
-        }
-
-        File tmpDir = new File(tmpPath, userName);
-        tmpDir.mkdir();
-        File actual = new File(tmpDir, "actual.txt");
-        ProcessExecutor.execute(new String[]{"touch", actual.getAbsolutePath()});
-        File symlink = new File(tmpDir, "symlink.txt");
-        ProcessExecutor.execute(new String[]{"ln", "-s", actual.getAbsolutePath(), symlink.getAbsolutePath()});
-        File lnDir = new File(tmpDir, "symLinkDir");
-        ProcessExecutor.execute(new String[]{"ln", "-s", tmpDir.getAbsolutePath(), lnDir.getAbsolutePath()});
-        File lnToActual = new File(lnDir, "actual.txt");
-        File lnToSymlink = new File(lnDir, "symlink.txt");
-
-
-        File files[] = {actual, symlink, lnToActual, lnToSymlink};
-        for (File f : files) {
-            Assert.assertEquals(IOUtil.getFullCanonicalPath(f), actual.getCanonicalPath());
-        }
-
-        actual.delete();
-        symlink.delete();
-        lnToActual.delete();
-        lnToSymlink.delete();
-        lnDir.delete();
-        tmpDir.delete();
-    }
-
-    @Test
-    public void testUtfWriting() throws IOException {
-        final String utf8 = new StringWriter().append((char) 168).append((char) 197).toString();
-        for (String ext : TEST_FILE_EXTENSIONS) {
-            final File f = File.createTempFile(TEST_FILE_PREFIX, ext);
-            f.deleteOnExit();
-
-            final BufferedWriter writer = IOUtil.openFileForBufferedUtf8Writing(f);
-            writer.write(utf8);
-            CloserUtil.close(writer);
-
-            final BufferedReader reader = IOUtil.openFileForBufferedUtf8Reading(f);
-            final String line = reader.readLine();
-            Assert.assertEquals(utf8, line, f.getAbsolutePath());
-
-            CloserUtil.close(reader);
-
-        }
-    }
-
-    @Test
-    public void slurpLinesTest() throws FileNotFoundException {
-        Assert.assertEquals(IOUtil.slurpLines(SLURP_TEST_FILE), SLURP_TEST_LINES);
-    }
-
-    @Test
-    public void slurpWhitespaceOnlyFileTest() throws FileNotFoundException {
-        Assert.assertEquals(IOUtil.slurp(FIVE_SPACES_THEN_A_NEWLINE_THEN_FIVE_SPACES_FILE), "     \n     ");
-    }
-
-    @Test
-    public void slurpEmptyFileTest() throws FileNotFoundException {
-        Assert.assertEquals(IOUtil.slurp(EMPTY_FILE), "");
-    }
-
-    @Test
-    public void slurpTest() throws FileNotFoundException {
-        Assert.assertEquals(IOUtil.slurp(SLURP_TEST_FILE), CollectionUtil.join(SLURP_TEST_LINES, SLURP_TEST_LINE_SEPARATOR));
-    }
-
-    @Test(dataProvider = "fileTypeTestCases")
-    public void testFileType(final String path, boolean expectedIsRegularFile) {
-        final File file = new File(path);
-        Assert.assertEquals(IOUtil.isRegularPath(file), expectedIsRegularFile);
-    }
-
-    @Test(dataProvider = "unixFileTypeTestCases", groups = {"unix"})
-    public void testFileTypeUnix(final String path, boolean expectedIsRegularFile) {
-        final File file = new File(path);
-        Assert.assertEquals(IOUtil.isRegularPath(file), expectedIsRegularFile);
-    }
-
-    @DataProvider(name = "fileTypeTestCases")
-    private Object[][] fileTypeTestCases() {
-        return new Object[][]{
-                {existingTempFile.getAbsolutePath(), Boolean.TRUE},
-                {systemTempDir, Boolean.FALSE}
-
-        };
-    }
-
-    @DataProvider(name = "unixFileTypeTestCases")
-    private Object[][] unixFileTypeTestCases() {
-        return new Object[][]{
-                {"/dev/null", Boolean.FALSE},
-                {"/dev/stdout", Boolean.FALSE},
-                {"/non/existent/file", Boolean.TRUE},
-        };
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java b/src/tests/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java
deleted file mode 100644
index 466b529..0000000
--- a/src/tests/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package htsjdk.samtools.util;
-
-import htsjdk.samtools.SAMException;
-import htsjdk.samtools.SAMRecordSetBuilder;
-import htsjdk.samtools.SamReader;
-import htsjdk.samtools.SamReaderFactory;
-import htsjdk.samtools.fastq.FastqReader;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.Arrays;
-import java.util.List;
-
-public class QualityEncodingDetectorTest {
-
-    private static class Testcase {
-        private final File f;
-        private final FastqQualityFormat q;
-
-        Testcase(final File file, final FastqQualityFormat qualityFormat) {
-            this.f = file;
-            this.q = qualityFormat;
-        }
-    }
-
-    final static List<Testcase> FASTQ_TESTCASES = Arrays.asList(
-            // Need to use full-range quality here, as Solexa and Illumina are near indistinguishable
-            new Testcase(new File("./testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa_full_range_as_solexa.fastq"), FastqQualityFormat.Solexa),
-            new Testcase(new File("./testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/s_1_sequence.txt"), FastqQualityFormat.Illumina),
-            new Testcase(new File("./testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/5k-30BB2AAXX.3.aligned.sam.fastq"), FastqQualityFormat.Standard)
-    );
-    final static List<Testcase> BAM_TESTCASES = Arrays.asList(
-            new Testcase(new File("./testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/unmapped.sam"), FastqQualityFormat.Standard),
-            new Testcase(new File("./testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam"), FastqQualityFormat.Standard),
-            new Testcase(new File("./testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/solexa-as-standard.bam"), FastqQualityFormat.Solexa),
-            new Testcase(new File("./testdata/htsjdk/samtools/util/QualityEncodingDetectorTest/illumina-as-standard.bam"), FastqQualityFormat.Illumina)
-
-    );
-
-    Object[][] renderObjectArrayArray(final List<Testcase> testcaseList) {
-        final Object[][] data = new Object[testcaseList.size()][];
-        for (int i = 0; i < data.length; i++) {
-            final Testcase testcase = testcaseList.get(i);
-            data[i] = new Object[]{testcase.f, testcase.q};
-        }
-        return data;
-    }
-
-    @DataProvider(name = "BAM_TESTCASES")
-    Object[][] bamTestcases() {
-        return renderObjectArrayArray(BAM_TESTCASES);
-    }
-
-    @DataProvider(name = "FASTQ_TESTCASES")
-    Object[][] fastqTestcases() {
-        return renderObjectArrayArray(FASTQ_TESTCASES);
-    }
-
-    @Test(dataProvider = "FASTQ_TESTCASES", groups = {"unix"})
-    public void testFastqQualityInference(final File input, final FastqQualityFormat expectedQualityFormat) {
-        final FastqReader reader = new FastqReader(input);
-        Assert.assertEquals(QualityEncodingDetector.detect(reader), expectedQualityFormat);
-        reader.close();
-    }
-
-    @Test(dataProvider = "BAM_TESTCASES", groups = {"unix"})
-    public void testBamQualityInference(final File input, final FastqQualityFormat expectedQualityFormat) {
-        final SamReader reader = SamReaderFactory.makeDefault().open(input);
-        Assert.assertEquals(QualityEncodingDetector.detect(reader), expectedQualityFormat);
-    }
-
-    @Test
-    public void testSmallBamForDetectorFailure() {
-        final SAMRecordSetBuilder samRecordSetBuilder = createSmallUnmappedSam();
-        Assert.assertNotSame(QualityEncodingDetector.detect(samRecordSetBuilder.getSamReader(),
-                null), FastqQualityFormat.Standard);
-    }
-
-    @Test
-    public void testSmallBamWithExpectedQuality() {
-        final SAMRecordSetBuilder samRecordSetBuilder = createSmallUnmappedSam();
-        Assert.assertEquals(QualityEncodingDetector.detect(samRecordSetBuilder.getSamReader(),
-                FastqQualityFormat.Standard), FastqQualityFormat.Standard);
-    }
-
-    @Test(expectedExceptions = SAMException.class)
-    public void testQualitySanity() {
-        final SAMRecordSetBuilder samRecordSetBuilder = createSmallUnmappedSam();
-        QualityEncodingDetector.detect(samRecordSetBuilder.getSamReader(),
-                FastqQualityFormat.Illumina);
-    }
-
-    private SAMRecordSetBuilder createSmallUnmappedSam() {
-        final SAMRecordSetBuilder samRecordSetBuilder = new SAMRecordSetBuilder();
-        samRecordSetBuilder.setReadLength(25);
-        samRecordSetBuilder.addFrag("READ0", -1, -1, false, true, null, "@@@FFFFFHHHHHJIJIIJIIJJJJ", -1);
-        samRecordSetBuilder.addFrag("READ1", -1, -1, false, true, null, "@@@FFFFFHHHHHJIJIIJIIJJJJ", -1);
-        samRecordSetBuilder.addFrag("READ2", -1, -1, false, true, null, "@CCFDFEDHHHFFHIIII at GH<FFH", -1);
-        samRecordSetBuilder.addFrag("READ3", -1, -1, false, true, null, "@@?DFFDFHFFHDHIIHIIEIIJGG", -1);
-        samRecordSetBuilder.addFrag("READ4", -1, -1, false, true, null, "@CCFFDDFHHHHHIIJJHFJJJJJH", -1);
-        samRecordSetBuilder.addFrag("READ5", -1, -1, false, true, null, "BCCFFFFFHHHHHJJJJJIJJJJJJ", -1);
-        samRecordSetBuilder.addFrag("READ6", -1, -1, false, true, null, "@@CDFFFFHHHFHHIJJJJJJJIJJ", -1);
-        samRecordSetBuilder.addFrag("READ7", -1, -1, false, true, null, "CCCFFFFFHHHHHJJJJIJJJJHII", -1);
-        samRecordSetBuilder.addFrag("READ8", -1, -1, false, true, null, "CCCFFFFFHHHHHJJJJJJJJJJJJ", -1);
-        return samRecordSetBuilder;
-    }
-}
diff --git a/src/tests/java/htsjdk/samtools/util/SamLocusIteratorTest.java b/src/tests/java/htsjdk/samtools/util/SamLocusIteratorTest.java
deleted file mode 100644
index eddd257..0000000
--- a/src/tests/java/htsjdk/samtools/util/SamLocusIteratorTest.java
+++ /dev/null
@@ -1,231 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2010 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import htsjdk.samtools.SamInputResource;
-import htsjdk.samtools.SamReader;
-import htsjdk.samtools.SamReaderFactory;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.ByteArrayInputStream;
-
-/**
- * @author alecw at broadinstitute.org
- */
-public class SamLocusIteratorTest {
-    private SamReader createSamFileReader(final String samExample) {
-        final ByteArrayInputStream inputStream = new ByteArrayInputStream(samExample.getBytes());
-        return SamReaderFactory.makeDefault().open(SamInputResource.of(inputStream));
-    }
-
-    private SamLocusIterator createSamLocusIterator(final SamReader samReader) {
-        final SamLocusIterator ret = new SamLocusIterator(samReader);
-        ret.setEmitUncoveredLoci(false);
-        return ret;
-    }
-
-    @Test
-    public void testBasicIterator() {
-
-        final String sqHeader = "@HD\tSO:coordinate\tVN:1.0\n at SQ\tSN:chrM\tAS:HG18\tLN:100000\n";
-        final String seq1 = "ACCTACGTTCAATATTACAGGCGAACATACTTACTA";
-        final String qual1 = "++++++++++++++++++++++++++++++++++++"; // phred 10
-        final String s1 = "3851612\t16\tchrM\t165\t255\t36M\t*\t0\t0\t" + seq1 + "\t" + qual1 + "\n";
-        final String exampleSam = sqHeader + s1 + s1;
-
-        final SamReader samReader = createSamFileReader(exampleSam);
-        final SamLocusIterator sli = createSamLocusIterator(samReader);
-
-
-        // make sure we accumulated depth of 2 for each position
-        int pos = 165;
-        for (final SamLocusIterator.LocusInfo li : sli) {
-            Assert.assertEquals(pos++, li.getPosition());
-            Assert.assertEquals(2, li.getRecordAndPositions().size());
-        }
-
-    }
-
-    @Test
-    public void testEmitUncoveredLoci() {
-
-        final String sqHeader = "@HD\tSO:coordinate\tVN:1.0\n at SQ\tSN:chrM\tAS:HG18\tLN:100000\n";
-        final String seq1 = "ACCTACGTTCAATATTACAGGCGAACATACTTACTA";
-        final String qual1 = "++++++++++++++++++++++++++++++++++++"; // phred 10
-        final String s1 = "3851612\t16\tchrM\t165\t255\t36M\t*\t0\t0\t" + seq1 + "\t" + qual1 + "\n";
-        final String exampleSam = sqHeader + s1 + s1;
-
-        final SamReader samReader = createSamFileReader(exampleSam);
-        final SamLocusIterator sli = new SamLocusIterator(samReader);
-
-        // make sure we accumulated depth of 2 for each position
-        int pos = 1;
-        final int coveredStart = 165;
-        final int coveredEnd = CoordMath.getEnd(coveredStart, seq1.length());
-        for (final SamLocusIterator.LocusInfo li : sli) {
-            Assert.assertEquals(li.getPosition(), pos++);
-            final int expectedReads;
-            if (li.getPosition() >= coveredStart && li.getPosition() <= coveredEnd) {
-                expectedReads = 2;
-            } else {
-                expectedReads = 0;
-            }
-            Assert.assertEquals(li.getRecordAndPositions().size(), expectedReads);
-        }
-        Assert.assertEquals(pos, 100001);
-
-    }
-
-    @Test
-    public void testQualityFilter() {
-
-        final String sqHeader = "@HD\tSO:coordinate\tVN:1.0\n at SQ\tSN:chrM\tAS:HG18\tLN:100000\n";
-        final String seq1 = "ACCTACGTTCAATATTACAGGCGAACATACTTACTA";
-        final String qual1 = "++++++++++++++++++++++++++++++++++++"; // phred 10
-        final String qual2 = "+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*+*"; // phred 10,9...
-        final String s1 = "3851612\t16\tchrM\t165\t255\t36M\t*\t0\t0\t" + seq1 + "\t" + qual1 + "\n";
-        final String s2 = "3851612\t16\tchrM\t165\t255\t36M\t*\t0\t0\t" + seq1 + "\t" + qual2 + "\n";
-        final String exampleSam = sqHeader + s1 + s2;
-
-        final SamReader samReader = createSamFileReader(exampleSam);
-        final SamLocusIterator sli = createSamLocusIterator(samReader);
-        sli.setQualityScoreCutoff(10);
-
-
-        // make sure we accumulated depth 2 for even positions, 1 for odd positions
-        int pos = 165;
-        for (final SamLocusIterator.LocusInfo li : sli) {
-            Assert.assertEquals((pos % 2 == 0) ? 1 : 2, li.getRecordAndPositions().size());
-            Assert.assertEquals(pos++, li.getPosition());
-        }
-
-    }
-
-    /**
-     * Try all CIGAR operands (except H and P) and confirm that loci produced by SamLocusIterator are as expected.
-     */
-    @Test
-    public void testSimpleGappedAlignment() {
-        final String sqHeader = "@HD\tSO:coordinate\tVN:1.0\n at SQ\tSN:chrM\tAS:HG18\tLN:100000\n";
-        final String seq1 = "ACCTACGTTCAATATTACAGGCGAACATACTTACTA";
-        final String qual1 = "++++++++++++++++++++++++++++++++++++"; // phred 10
-        final String s1 = "3851612\t16\tchrM\t165\t255\t3S3M3N3M3D3M3I18M3S\t*\t0\t0\t" + seq1 + "\t" + qual1 + "\n";
-        final String exampleSam = sqHeader + s1 + s1;
-
-        final SamReader samReader = createSamFileReader(exampleSam);
-        final SamLocusIterator sli = createSamLocusIterator(samReader);
-
-
-        // make sure we accumulated depth of 2 for each position
-        final int[] expectedReferencePositions = new int[]{
-                // 3S
-                165, 166, 167, // 3M
-                // 3N
-                171, 172, 173, // 3M
-                // 3D
-                177, 178, 179, // 3M
-                // 3I
-                180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197}; // 18M
-
-        final int[] expectedReadOffsets = new int[]{
-                // 3S
-                3, 4, 5, // 3M
-                // 3N
-                6, 7, 8, // 3M
-                // 3D
-                9, 10, 11, // 3M
-                // 3I
-                15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32 // 3M
-        };
-        int i = 0;
-        for (final SamLocusIterator.LocusInfo li : sli) {
-            Assert.assertEquals(li.getRecordAndPositions().size(), 2);
-            Assert.assertEquals(li.getPosition(), expectedReferencePositions[i]);
-            Assert.assertEquals(li.getRecordAndPositions().get(0).getOffset(), expectedReadOffsets[i]);
-            Assert.assertEquals(li.getRecordAndPositions().get(1).getOffset(), expectedReadOffsets[i]);
-            ++i;
-        }
-    }
-
-    /**
-     * Test two reads that overlap because one has a deletion in the middle of it.
-     */
-    @Test
-    public void testOverlappingGappedAlignments() {
-        final String sqHeader = "@HD\tSO:coordinate\tVN:1.0\n at SQ\tSN:chrM\tAS:HG18\tLN:100000\n";
-        final String seq1 = "ACCTACGTTCAATATTACAGGCGAACATACTTACTA";
-        final String qual1 = "++++++++++++++++++++++++++++++++++++"; // phred 10
-        // Were it not for the gap, these two reads would not overlap
-        final String s1 = "3851612\t16\tchrM\t165\t255\t18M10D18M\t*\t0\t0\t" + seq1 + "\t" + qual1 + "\n";
-        final String s2 = "3851613\t16\tchrM\t206\t255\t36M\t*\t0\t0\t" + seq1 + "\t" + qual1 + "\n";
-        final String exampleSam = sqHeader + s1 + s2;
-
-        final SamReader samReader = createSamFileReader(exampleSam);
-        final SamLocusIterator sli = createSamLocusIterator(samReader);
-        // 5 base overlap btw the two reads
-        final int numBasesCovered = 36 + 36 - 5;
-        final int[] expectedReferencePositions = new int[numBasesCovered];
-        final int[] expectedDepths = new int[numBasesCovered];
-        final int[][] expectedReadOffsets = new int[numBasesCovered][];
-
-        int i;
-        // First 18 bases are from the first read
-        for (i = 0; i < 18; ++i) {
-            expectedReferencePositions[i] = 165 + i;
-            expectedDepths[i] = 1;
-            expectedReadOffsets[i] = new int[]{i};
-        }
-        // Gap of 10, then 13 bases from the first read
-        for (; i < 36 - 5; ++i) {
-            expectedReferencePositions[i] = 165 + 10 + i;
-            expectedDepths[i] = 1;
-            expectedReadOffsets[i] = new int[]{i};
-        }
-        // Last 5 bases of first read overlap first 5 bases of second read
-        for (; i < 36; ++i) {
-            expectedReferencePositions[i] = 165 + 10 + i;
-            expectedDepths[i] = 2;
-            expectedReadOffsets[i] = new int[]{i, i - 31};
-
-        }
-        // Last 31 bases of 2nd read
-        for (; i < 36 + 36 - 5; ++i) {
-            expectedReferencePositions[i] = 165 + 10 + i;
-            expectedDepths[i] = 1;
-            expectedReadOffsets[i] = new int[]{i - 31};
-        }
-
-        i = 0;
-        for (final SamLocusIterator.LocusInfo li : sli) {
-            Assert.assertEquals(li.getRecordAndPositions().size(), expectedDepths[i]);
-            Assert.assertEquals(li.getPosition(), expectedReferencePositions[i]);
-            Assert.assertEquals(li.getRecordAndPositions().size(), expectedReadOffsets[i].length);
-            for (int j = 0; j < expectedReadOffsets[i].length; ++j) {
-                Assert.assertEquals(li.getRecordAndPositions().get(j).getOffset(), expectedReadOffsets[i][j]);
-            }
-            ++i;
-        }
-    }
-}
diff --git a/src/tests/java/htsjdk/tribble/TestUtils.java b/src/tests/java/htsjdk/tribble/TestUtils.java
deleted file mode 100644
index 70f30bf..0000000
--- a/src/tests/java/htsjdk/tribble/TestUtils.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/** This software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.tribble;
-
-
-/**
- * User: jacob
- * Date: 2012-Dec-13
- */
-public class TestUtils {
-    public static String DATA_DIR = "testdata/htsjdk/tribble/";
-}
diff --git a/src/tests/java/htsjdk/tribble/index/IndexFactoryTest.java b/src/tests/java/htsjdk/tribble/index/IndexFactoryTest.java
deleted file mode 100644
index 4d5823b..0000000
--- a/src/tests/java/htsjdk/tribble/index/IndexFactoryTest.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.tribble.index;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.tribble.TestUtils;
-import htsjdk.tribble.TribbleException;
-import htsjdk.tribble.bed.BEDCodec;
-import htsjdk.tribble.index.tabix.TabixFormat;
-import htsjdk.tribble.index.tabix.TabixIndex;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFFileReader;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * User: jacob
- * Date: 2012-Aug-23
- */
-public class IndexFactoryTest {
-
-    final File sortedBedFile = new File(TestUtils.DATA_DIR + "bed/Unigene.sample.bed");
-    final File unsortedBedFile = new File(TestUtils.DATA_DIR + "bed/unsorted.bed");
-    final File discontinuousFile = new File(TestUtils.DATA_DIR + "bed/disconcontigs.bed");
-    final BEDCodec bedCodec = new BEDCodec();
-
-    @Test
-    public void testCreateLinearIndex() throws Exception {
-        Index index = IndexFactory.createLinearIndex(sortedBedFile, bedCodec);
-        String chr = "chr2";
-
-        Assert.assertTrue(index.getSequenceNames().contains(chr));
-        Assert.assertTrue(index.containsChromosome(chr));
-        Assert.assertEquals(1, index.getSequenceNames().size());
-        List<Block> blocks = index.getBlocks(chr, 1, 50);
-        Assert.assertEquals(1, blocks.size());
-
-        Block block = blocks.get(0);
-        Assert.assertEquals(78, block.getSize());
-    }
-
-    @Test(expectedExceptions = TribbleException.MalformedFeatureFile.class, dataProvider = "indexFactoryProvider")
-    public void testCreateIndexUnsorted(IndexFactory.IndexType type) throws Exception{
-        Index index = IndexFactory.createIndex(unsortedBedFile, bedCodec, type);
-    }
-
-    @Test(expectedExceptions = TribbleException.MalformedFeatureFile.class, dataProvider = "indexFactoryProvider")
-    public void testCreateIndexDiscontinuousContigs(IndexFactory.IndexType type) throws Exception{
-        Index index = IndexFactory.createIndex(discontinuousFile, bedCodec, type);
-    }
-
-    @DataProvider(name = "indexFactoryProvider")
-    public Object[][] getIndexFactoryTypes(){
-        return new Object[][] {
-                new Object[] { IndexFactory.IndexType.LINEAR },
-                new Object[] { IndexFactory.IndexType.INTERVAL_TREE }
-        };
-    }
-
-    @Test
-    public void testCreateTabixIndexOnBlockCompressed() {
-        // index a VCF
-        final File inputFileVcf = new File("testdata/htsjdk/tribble/tabix/testTabixIndex.vcf");
-        final VCFFileReader readerVcf = new VCFFileReader(inputFileVcf, false);
-        final SAMSequenceDictionary vcfDict = readerVcf.getFileHeader().getSequenceDictionary();
-        final TabixIndex tabixIndexVcf =
-                IndexFactory.createTabixIndex(inputFileVcf, new VCFCodec(), TabixFormat.VCF,
-                vcfDict);
-
-        // index the same bgzipped VCF
-        final File inputFileVcfGz = new File("testdata/htsjdk/tribble/tabix/testTabixIndex.vcf.gz");
-        final VCFFileReader readerVcfGz = new VCFFileReader(inputFileVcfGz, false);
-        final TabixIndex tabixIndexVcfGz =
-                IndexFactory.createTabixIndex(inputFileVcfGz, new VCFCodec(), TabixFormat.VCF,
-                        readerVcfGz.getFileHeader().getSequenceDictionary());
-
-        // assert that each sequence in the header that represents some VCF row ended up in the index
-        // for both the VCF and bgzipped VCF
-        for (SAMSequenceRecord samSequenceRecord : vcfDict.getSequences()) {
-            Assert.assertTrue(
-                    tabixIndexVcf.containsChromosome(samSequenceRecord.getSequenceName()),
-                    "Tabix indexed VCF does not contain sequence: " + samSequenceRecord.getSequenceName());
-
-            Assert.assertTrue(
-                    tabixIndexVcfGz.containsChromosome(samSequenceRecord.getSequenceName()),
-                    "Tabix indexed (bgzipped) VCF does not contain sequence: " + samSequenceRecord.getSequenceName());
-        }
-    }
-}
diff --git a/src/tests/java/htsjdk/tribble/index/tabix/TabixIndexTest.java b/src/tests/java/htsjdk/tribble/index/tabix/TabixIndexTest.java
deleted file mode 100644
index 340ff70..0000000
--- a/src/tests/java/htsjdk/tribble/index/tabix/TabixIndexTest.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2014 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.tribble.index.tabix;
-
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import htsjdk.tribble.index.IndexFactory;
-import htsjdk.tribble.util.LittleEndianOutputStream;
-import htsjdk.tribble.util.TabixUtils;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFFileReader;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.Iterator;
-
-public class TabixIndexTest {
-    private static final File SMALL_TABIX_FILE = new File("testdata/htsjdk/tribble/tabix/trioDup.vcf.gz.tbi");
-    private static final File BIGGER_TABIX_FILE = new File("testdata/htsjdk/tribble/tabix/bigger.vcf.gz.tbi");
-
-    /**
-     * Read an existing index from disk, write it to a temp file, read that in, and assert that both in-memory
-     * representations are identical.  Disk representations may not be identical due to arbitrary bin order and
-     * compression differences.
-     */
-    @Test(dataProvider = "readWriteTestDataProvider")
-    public void readWriteTest(final File tabixFile) throws Exception {
-        final TabixIndex index = new TabixIndex(tabixFile);
-        final File indexFile = File.createTempFile("TabixIndexTest.", TabixUtils.STANDARD_INDEX_EXTENSION);
-        indexFile.deleteOnExit();
-        final LittleEndianOutputStream los = new LittleEndianOutputStream(new BlockCompressedOutputStream(indexFile));
-        index.write(los);
-        los.close();
-        final TabixIndex index2 = new TabixIndex(indexFile);
-        Assert.assertEquals(index, index2);
-        // Unfortunately, can't do byte comparison of original file and temp file, because 1) different compression
-        // levels; and more importantly, arbitrary order of bins in bin list.
-    }
-
-    @DataProvider(name = "readWriteTestDataProvider")
-    public Object[][] readWriteTestDataProvider() {
-        return new Object[][]{
-                {SMALL_TABIX_FILE},
-                {BIGGER_TABIX_FILE}
-        };
-    }
-
-    @Test
-    public void testQueryProvidedItemsAmount() {
-        final String VCF = "testdata/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf";
-        // Note that we store only compressed files
-        final File plainTextVcfInputFile = new File(VCF);
-        plainTextVcfInputFile.deleteOnExit();
-        final File plainTextVcfIndexFile = new File(VCF + ".tbi");
-        plainTextVcfIndexFile.deleteOnExit();
-        final File compressedVcfInputFile = new File(VCF + ".gz");
-        final File compressedTbiIndexFile = new File(VCF + ".gz.tbi");
-        final VCFFileReader compressedVcfReader = new VCFFileReader(compressedVcfInputFile, compressedTbiIndexFile);
-
-        //create plain text VCF without "index on the fly" option
-        final VariantContextWriter plainTextVcfWriter = new VariantContextWriterBuilder()
-                .setOptions(VariantContextWriterBuilder.NO_OPTIONS)
-                .setOutputFile(VCF)
-                .build();
-        plainTextVcfWriter.writeHeader(compressedVcfReader.getFileHeader());
-        for (VariantContext vc : compressedVcfReader) {
-            if (vc != null) plainTextVcfWriter.add(vc);
-        }
-        plainTextVcfWriter.close();
-
-        IndexFactory.createTabixIndex(plainTextVcfInputFile,
-                new VCFCodec(),
-                TabixFormat.VCF,
-                new VCFFileReader(plainTextVcfInputFile, false).getFileHeader().getSequenceDictionary()
-        ) // create TabixIndex straight from plaintext VCF
-                .write(plainTextVcfIndexFile); // write it
-
-        final VCFFileReader plainTextVcfReader = new VCFFileReader(plainTextVcfInputFile, plainTextVcfIndexFile);
-        // Now we have both plaintext and compressed VCFs with provided TabixIndex-es and could test their "queryability"
-
-        // magic numbers chosen from just looking in provided VCF file
-        try {
-            // just somewhere in middle of chromosome
-            Assert.assertEquals(42, countIteratedElements(compressedVcfReader.query("1", 868379 - 1, 1006891 + 1)));
-            Assert.assertEquals(42, countIteratedElements(plainTextVcfReader.query("1", 868379 - 1, 1006891 + 1)));
-            // chromosome start
-            Assert.assertEquals(13, countIteratedElements(compressedVcfReader.query("1", 1, 836463 + 1)));
-            Assert.assertEquals(13, countIteratedElements(plainTextVcfReader.query("1", 1, 836463 + 1)));
-            // chromosome end
-            Assert.assertEquals(36, countIteratedElements(compressedVcfReader.query("1", 76690833 - 1, 76837502 + 11111111)));
-            Assert.assertEquals(36, countIteratedElements(plainTextVcfReader.query("1", 76690833 - 1, 76837502 + 11111111)));
-            // where's no one feature in the middle of chromosome
-            Assert.assertEquals(0, countIteratedElements(compressedVcfReader.query("1", 36606472 + 1, 36623523 - 1)));
-            Assert.assertEquals(0, countIteratedElements(plainTextVcfReader.query("1", 36606472 + 1, 36623523 - 1)));
-            // before chromosome
-            Assert.assertEquals(0, countIteratedElements(compressedVcfReader.query("1", 1, 10)));
-            Assert.assertEquals(0, countIteratedElements(plainTextVcfReader.query("1", 1, 10)));
-            // after chromosome
-            Assert.assertEquals(0, countIteratedElements(compressedVcfReader.query("1", 76837502 * 15, 76837502 * 16)));
-            Assert.assertEquals(0, countIteratedElements(plainTextVcfReader.query("1", 76837502 * 15, 76837502 * 16)));
-        } catch (NullPointerException e) {
-            Assert.fail("Exception caught on querying: ", e);
-            // before fix exception was thrown from 'TabixIndex.getBlocks()' on 'chunks.size()' while 'chunks == null' for plain files
-        } finally {
-            plainTextVcfReader.close();
-            compressedVcfReader.close();
-        }
-    }
-
-    private static int countIteratedElements(Iterator iterator) {
-        int counter = 0;
-        while (iterator.hasNext()) {
-            iterator.next();
-            counter++;
-        }
-        return counter;
-    }
-}
diff --git a/src/tests/java/htsjdk/tribble/readers/AsynchronousLineReaderTest.java b/src/tests/java/htsjdk/tribble/readers/AsynchronousLineReaderTest.java
deleted file mode 100644
index 78a0e3b..0000000
--- a/src/tests/java/htsjdk/tribble/readers/AsynchronousLineReaderTest.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package htsjdk.tribble.readers;
-
-import htsjdk.tribble.TestUtils;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStreamReader;
-
-/**
- * @author mccowan
- */
-public class AsynchronousLineReaderTest {
-
-        /**
-         * Test that we read the correct number of lines
-         * from a file
-         * @throws Exception
-         */
-        @Test
-        public void testReadLines() throws Exception {
-            final File filePath = new File(TestUtils.DATA_DIR + "large.txt");
-            final AsynchronousLineReader reader = new AsynchronousLineReader(new InputStreamReader( new FileInputStream(filePath)));
-            final BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(filePath)));
-
-            String nextLine;
-            while((nextLine = br.readLine()) != null){
-                Assert.assertEquals(nextLine, reader.readLine());
-            }
-            Assert.assertNull(reader.readLine());
-        }
-}
diff --git a/src/tests/java/htsjdk/tribble/readers/LineReaderUtilTest.java b/src/tests/java/htsjdk/tribble/readers/LineReaderUtilTest.java
deleted file mode 100644
index c3d9bb4..0000000
--- a/src/tests/java/htsjdk/tribble/readers/LineReaderUtilTest.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package htsjdk.tribble.readers;
-
-import htsjdk.tribble.TestUtils;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStreamReader;
-
-/**
- * @author mccowan
- */
-public class LineReaderUtilTest {
-    @Test
-    public void testLineReaderIterator() throws Exception {
-        final File filePath = new File(TestUtils.DATA_DIR + "gwas/smallp.gwas");
-        final LineIterator lineIterator = new LineIteratorImpl(LineReaderUtil.fromBufferedStream(new PositionalBufferedStream(new FileInputStream(filePath))));
-        final BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath)));
-
-        while (lineIterator.hasNext()) {
-            Assert.assertEquals(lineIterator.next(), br.readLine());
-        }
-        Assert.assertNull(br.readLine());
-    }
-}
diff --git a/src/tests/java/htsjdk/variant/PrintVariantsExampleTest.java b/src/tests/java/htsjdk/variant/PrintVariantsExampleTest.java
deleted file mode 100644
index 8bb95c5..0000000
--- a/src/tests/java/htsjdk/variant/PrintVariantsExampleTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-*
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-*
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant;
-
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.variant.example.PrintVariantsExample;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.List;
-import java.util.OptionalInt;
-import java.util.stream.IntStream;
-
-public class PrintVariantsExampleTest {
-    @Test
-    public void testExampleWriteFile() throws IOException {
-        final File tempFile = File.createTempFile("example", ".vcf");
-        tempFile.deleteOnExit();
-        File f1 = new File("testdata/htsjdk/variant/ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf");
-        final String[] args = {
-                f1.getAbsolutePath(),
-                tempFile.getAbsolutePath()
-        };
-        Assert.assertEquals(tempFile.length(), 0);
-        PrintVariantsExample.main(args);
-        Assert.assertNotEquals(tempFile.length(), 0);
-
-        assertFilesEqualSkipHeaders(tempFile, f1);
-    }
-
-    private void assertFilesEqualSkipHeaders(File tempFile, File f1) throws FileNotFoundException {
-        final List<String> lines1 = IOUtil.slurpLines(f1);
-        final List<String> lines2 = IOUtil.slurpLines(tempFile);
-        final int firstNonComment1 = IntStream.range(0, lines1.size()).filter(i -> !lines1.get(i).startsWith("#")).findFirst().getAsInt();
-        final int firstNonComment2 = IntStream.range(0, lines2.size()).filter(i -> !lines2.get(i).startsWith("#")).findFirst().getAsInt();
-        Assert.assertEquals(lines1.subList(firstNonComment1, lines1.size()), lines2.subList(firstNonComment2,lines2.size()));
-    }
-}
diff --git a/src/tests/java/htsjdk/variant/VariantBaseTest.java b/src/tests/java/htsjdk/variant/VariantBaseTest.java
deleted file mode 100644
index f70bc27..0000000
--- a/src/tests/java/htsjdk/variant/VariantBaseTest.java
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.variant.variantcontext.Genotype;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.vcf.VCFConstants;
-import org.testng.Assert;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * Base class for test classes within org.broadinstitute.variant
- */
-public class VariantBaseTest {
-
-    public static final String variantTestDataRoot = new File("testdata/htsjdk/variant/").getAbsolutePath() + "/";
-
-    /**
-     * Creates a temp file that will be deleted on exit after tests are complete.
-     * @param name Prefix of the file.
-     * @param extension Extension to concat to the end of the file.
-     * @return A file in the temporary directory starting with name, ending with extension, which will be deleted after the program exits.
-     */
-    public static File createTempFile(String name, String extension) {
-        try {
-            File file = File.createTempFile(name, extension);
-            file.deleteOnExit();
-            return file;
-        } catch (IOException ex) {
-            throw new RuntimeException("Cannot create temp file: " + ex.getMessage(), ex);
-        }
-    }
-
-    private static final double DEFAULT_FLOAT_TOLERANCE = 1e-1;
-
-    public static final void assertEqualsDoubleSmart(final Object actual, final Double expected) {
-        Assert.assertTrue(actual instanceof Double, "Not a double");
-        assertEqualsDoubleSmart((double)(Double)actual, (double)expected);
-    }
-
-    public static final void assertEqualsDoubleSmart(final Object actual, final Double expected, final double tolerance) {
-        Assert.assertTrue(actual instanceof Double, "Not a double");
-        assertEqualsDoubleSmart((double)(Double)actual, (double)expected, tolerance);
-    }
-
-    public static final void assertEqualsDoubleSmart(final double actual, final double expected) {
-        assertEqualsDoubleSmart(actual, expected, DEFAULT_FLOAT_TOLERANCE);
-    }
-
-    public static final <T> void assertEqualsSet(final Set<T> actual, final Set<T> expected, final String info) {
-        final Set<T> actualSet = new HashSet<T>(actual);
-        final Set<T> expectedSet = new HashSet<T>(expected);
-        Assert.assertTrue(actualSet.equals(expectedSet), info); // note this is necessary due to testng bug for set comps
-    }
-
-    public static void assertEqualsDoubleSmart(final double actual, final double expected, final double tolerance) {
-        assertEqualsDoubleSmart(actual, expected, tolerance, null);
-    }
-
-    public static void assertEqualsDoubleSmart(final double actual, final double expected, final double tolerance, final String message) {
-        if ( Double.isNaN(expected) ) // NaN == NaN => false unfortunately
-            Assert.assertTrue(Double.isNaN(actual), "expected is nan, actual is not");
-        else if ( Double.isInfinite(expected) ) // NaN == NaN => false unfortunately
-            Assert.assertTrue(Double.isInfinite(actual), "expected is infinite, actual is not");
-        else {
-            final double delta = Math.abs(actual - expected);
-            final double ratio = Math.abs(actual / expected - 1.0);
-            Assert.assertTrue(delta < tolerance || ratio < tolerance, "expected = " + expected + " actual = " + actual
-                    + " not within tolerance " + tolerance
-                    + (message == null ? "" : "message: " + message));
-        }
-    }
-
-    public static SAMSequenceDictionary createArtificialSequenceDictionary() {
-        final int[] contigLengths = { 249250621, 243199373, 198022430, 191154276, 180915260, 171115067, 159138663, 146364022,
-                                      141213431, 135534747, 135006516, 133851895, 115169878, 107349540, 102531392, 90354753,
-                                      81195210, 78077248, 59128983, 63025520, 48129895, 51304566, 155270560, 59373566, 16569 };
-        List<SAMSequenceRecord> contigs = new ArrayList<SAMSequenceRecord>();
-
-        for ( int contig = 1; contig <= 22; contig++ ) {
-            contigs.add(new SAMSequenceRecord(Integer.toString(contig), contigLengths[contig - 1]));
-        }
-
-        int position = 22;
-        for ( String contigName : Arrays.asList("X", "Y", "MT") ) {
-            contigs.add(new SAMSequenceRecord(contigName, contigLengths[position]));
-            position++;
-        }
-
-        return new SAMSequenceDictionary(contigs);
-    }
-
-    /**
-     * Asserts that the two provided VariantContext objects are equal.
-     *
-     * @param actual actual VariantContext object
-     * @param expected expected VariantContext to compare against
-     */
-    public static void assertVariantContextsAreEqual( final VariantContext actual, final VariantContext expected ) {
-        Assert.assertNotNull(actual, "VariantContext expected not null");
-        Assert.assertEquals(actual.getContig(), expected.getContig(), "chr");
-        Assert.assertEquals(actual.getStart(), expected.getStart(), "start");
-        Assert.assertEquals(actual.getEnd(), expected.getEnd(), "end");
-        Assert.assertEquals(actual.getID(), expected.getID(), "id");
-        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "alleles for " + expected + " vs " + actual);
-
-        assertAttributesEquals(actual.getAttributes(), expected.getAttributes());
-        Assert.assertEquals(actual.filtersWereApplied(), expected.filtersWereApplied(), "filtersWereApplied");
-        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "isFiltered");
-        assertEqualsSet(actual.getFilters(), expected.getFilters(), "filters");
-        assertEqualsDoubleSmart(actual.getPhredScaledQual(), expected.getPhredScaledQual());
-
-        Assert.assertEquals(actual.hasGenotypes(), expected.hasGenotypes(), "hasGenotypes");
-        if ( expected.hasGenotypes() ) {
-            assertEqualsSet(actual.getSampleNames(), expected.getSampleNames(), "sample names set");
-            Assert.assertEquals(actual.getSampleNamesOrderedByName(), expected.getSampleNamesOrderedByName(), "sample names");
-            final Set<String> samples = expected.getSampleNames();
-            for ( final String sample : samples ) {
-                assertGenotypesAreEqual(actual.getGenotype(sample), expected.getGenotype(sample));
-            }
-        }
-    }
-
-    /**
-     * Asserts that the two provided Genotype objects are equal.
-     *
-     * @param actual actual Genotype object
-     * @param expected expected Genotype object to compare against
-     */
-    public static void assertGenotypesAreEqual(final Genotype actual, final Genotype expected) {
-        Assert.assertEquals(actual.getSampleName(), expected.getSampleName(), "Genotype names");
-        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "Genotype alleles");
-        Assert.assertEquals(actual.getGenotypeString(), expected.getGenotypeString(), "Genotype string");
-        Assert.assertEquals(actual.getType(), expected.getType(), "Genotype type");
-
-        // filters are the same
-        Assert.assertEquals(actual.getFilters(), expected.getFilters(), "Genotype fields");
-        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "Genotype isFiltered");
-
-        // inline attributes
-        Assert.assertEquals(actual.getDP(), expected.getDP(), "Genotype dp");
-        Assert.assertTrue(Arrays.equals(actual.getAD(), expected.getAD()));
-        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype gq");
-        Assert.assertEquals(actual.hasPL(), expected.hasPL(), "Genotype hasPL");
-        Assert.assertEquals(actual.hasAD(), expected.hasAD(), "Genotype hasAD");
-        Assert.assertEquals(actual.hasGQ(), expected.hasGQ(), "Genotype hasGQ");
-        Assert.assertEquals(actual.hasDP(), expected.hasDP(), "Genotype hasDP");
-
-        Assert.assertEquals(actual.hasLikelihoods(), expected.hasLikelihoods(), "Genotype haslikelihoods");
-        Assert.assertEquals(actual.getLikelihoodsString(), expected.getLikelihoodsString(), "Genotype getlikelihoodsString");
-        Assert.assertEquals(actual.getLikelihoods(), expected.getLikelihoods(), "Genotype getLikelihoods");
-        Assert.assertTrue(Arrays.equals(actual.getPL(), expected.getPL()));
-
-        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype phredScaledQual");
-        assertAttributesEquals(actual.getExtendedAttributes(), expected.getExtendedAttributes());
-        Assert.assertEquals(actual.isPhased(), expected.isPhased(), "Genotype isPhased");
-        Assert.assertEquals(actual.getPloidy(), expected.getPloidy(), "Genotype getPloidy");
-    }
-
-    /**
-     * Asserts that the two sets of attribute mappings are equal. Ignores null-valued attributes in
-     * "actual" that are not present in "expected" while performing the comparison.
-     *
-     * @param actual actual mapping of attributes
-     * @param expected expected mapping of attributes
-     */
-    private static void assertAttributesEquals(final Map<String, Object> actual, Map<String, Object> expected) {
-        final Set<String> expectedKeys = new HashSet<String>(expected.keySet());
-
-        for ( final Map.Entry<String, Object> act : actual.entrySet() ) {
-            final Object actualValue = act.getValue();
-            if ( expected.containsKey(act.getKey()) && expected.get(act.getKey()) != null ) {
-                final Object expectedValue = expected.get(act.getKey());
-                if ( expectedValue instanceof List ) {
-                    final List<Object> expectedList = (List<Object>)expectedValue;
-                    Assert.assertTrue(actualValue instanceof List, act.getKey() + " should be a list but isn't");
-                    final List<Object> actualList = (List<Object>)actualValue;
-                    Assert.assertEquals(actualList.size(), expectedList.size(), act.getKey() + " size");
-                    for ( int i = 0; i < expectedList.size(); i++ ) {
-                        assertAttributeEquals(act.getKey(), actualList.get(i), expectedList.get(i));
-                    }
-                }
-                else {
-                    assertAttributeEquals(act.getKey(), actualValue, expectedValue);
-                }
-            }
-            else {
-                // it's ok to have a binding in x -> null that's absent in y
-                Assert.assertNull(actualValue, act.getKey() + " present in one but not in the other");
-            }
-            expectedKeys.remove(act.getKey());
-        }
-
-        // now expectedKeys contains only the keys found in expected but not in actual,
-        // and they must all be null
-        for ( final String missingExpected : expectedKeys ) {
-            final Object value = expected.get(missingExpected);
-            Assert.assertTrue(isMissingAttribute(value), "Attribute " + missingExpected + " missing in one but not in other" );
-        }
-    }
-
-    /**
-     * Asserts that the two provided attribute values are equal. If the values are Doubles, uses a
-     * more lenient comparision with a tolerance of 1e-2.
-     *
-     * @param key key for the attribute values
-     * @param actual actual attribute value
-     * @param expected expected attribute value against which to compare
-     */
-    private static void assertAttributeEquals(final String key, final Object actual, final Object expected) {
-        if ( expected instanceof Double ) {
-            // must be very tolerant because doubles are being rounded to 2 sig figs
-            assertEqualsDoubleSmart(actual, (Double) expected, 1e-2);
-        }
-        else {
-            Assert.assertEquals(actual, expected, "Attribute " + key);
-        }
-    }
-
-    /**
-     * Determines whether the provided attribute value is missing according to the VCF spec.
-     * An attribute value is missing if it's null, is equal to {@link VCFConstants#MISSING_VALUE_v4},
-     * or if it's a List that is either empty or contains only null values.
-     *
-     * @param value attribute value to test
-     * @return true if value is a missing VCF attribute value, otherwise false
-     */
-    private static boolean isMissingAttribute(final Object value) {
-        if ( value == null || value.equals(VCFConstants.MISSING_VALUE_v4) ) {
-            return true;
-        }
-        else if ( value instanceof List ) {
-            // handles the case where all elements are null or the list is empty
-            for ( final Object elt : (List)value) {
-                if (elt != null) {
-                    return false;
-                }
-            }
-            return true;
-        }
-
-        return false;
-    }
-
-}
diff --git a/src/tests/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java b/src/tests/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java
deleted file mode 100644
index 5e56587..0000000
--- a/src/tests/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2014 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.variant.utils;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.SequenceUtil;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import java.io.File;
-
-/**
- * @author farjoun on 4/9/14.
- */
-public class SAMSequenceDictionaryExtractorTest {
-    String path = "testdata/htsjdk/variant/utils/SamSequenceDictionaryExtractor/";
-
-    @DataProvider(name = "testExtractDictionaries")
-    public Object[][] dictionaries() {
-        return new Object[][]{
-                new Object[]{"test1_comp.interval_list", "test1.dict"},
-                new Object[]{"test1.vcf", "test1.dict"},
-                new Object[]{"test1.dict", "test1.dict"},
-                new Object[]{"empty.interval_list", "test1.dict"},
-                new Object[]{"Homo_sapiens_assembly18.trimmed.fasta", "Homo_sapiens_assembly18.trimmed.dict"},
-                new Object[]{"test2_comp.interval_list", "Homo_sapiens_assembly18.trimmed.dict"},
-                new Object[]{"ScreenSamReads.100.input.sam", "test3_comp.interval_list"},
-                new Object[]{"ScreenSamReads.100.input.sam", "test4_comp.interval_list"},
-        };
-    }
-
-    @Test(dataProvider = "testExtractDictionaries")
-    public void testExtractDictionary(final String dictSource, final String dictExpected) throws Exception {
-        final File dictSourceFile = new File(path, dictSource);
-        final File dictExpectedFile = new File(path, dictExpected);
-        final SAMSequenceDictionary dict1 = SAMSequenceDictionaryExtractor.extractDictionary(dictSourceFile);
-        final SAMSequenceDictionary dict2 = SAMSequenceDictionaryExtractor.extractDictionary(dictExpectedFile);
-
-        Assert.assertTrue(SequenceUtil.areSequenceDictionariesEqual(dict1,
-                dict2));
-        Assert.assertTrue(dict1.md5().equals(dict2.md5()));
-    }
-}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/GenotypeLikelihoodsUnitTest.java b/src/tests/java/htsjdk/variant/variantcontext/GenotypeLikelihoodsUnitTest.java
deleted file mode 100644
index b513396..0000000
--- a/src/tests/java/htsjdk/variant/variantcontext/GenotypeLikelihoodsUnitTest.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext;
-
-
-// the imports for unit testing.
-
-
-import htsjdk.tribble.TribbleException;
-import htsjdk.variant.VariantBaseTest;
-import htsjdk.variant.utils.GeneralUtils;
-import org.testng.Assert;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-
-/**
- * Basic unit test for Genotype likelihoods objects
- */
-public class GenotypeLikelihoodsUnitTest extends VariantBaseTest {
-    double [] v = new double[]{-10.5, -1.25, -5.11};
-    final static String vGLString = "-10.50,-1.25,-5.11";
-    final static String vPLString = "93,0,39";
-    double[] triAllelic = new double[]{-4.2,-2.0,-3.0,-1.6,0.0,-4.0}; //AA,AB,AC,BB,BC,CC
-
-    @BeforeTest
-    public void initializeAnyploidPLIndexToAlleleIndices() {
-        GenotypeLikelihoods.initializeAnyploidPLIndexToAlleleIndices(1, 1);
-        GenotypeLikelihoods.initializeAnyploidPLIndexToAlleleIndices(2, 2);
-        GenotypeLikelihoods.initializeAnyploidPLIndexToAlleleIndices(2, 3);
-    }
-
-    @Test
-    public void testFromVector2() {
-        GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(v);
-        assertDoubleArraysAreEqual(gl.getAsVector(), v);
-        Assert.assertEquals(gl.getAsString(), vPLString);
-    }
-
-    @Test
-    public void testFromString1() {
-        GenotypeLikelihoods gl = GenotypeLikelihoods.fromPLField(vPLString);
-        assertDoubleArraysAreEqual(gl.getAsVector(), new double[]{-9.3, 0, -3.9});
-        Assert.assertEquals(gl.getAsString(), vPLString);
-    }
-
-    @Test
-    public void testFromString2() {
-        GenotypeLikelihoods gl = GenotypeLikelihoods.fromGLField(vGLString);
-        assertDoubleArraysAreEqual(gl.getAsVector(), v);
-        Assert.assertEquals(gl.getAsString(), vPLString);
-    }
-
-    @Test (expectedExceptions = TribbleException.class)
-    public void testErrorBadFormat() {
-        GenotypeLikelihoods gl = GenotypeLikelihoods.fromPLField("adf,b,c");
-        gl.getAsVector();
-    }
-
-    @Test
-    public void testGetAsMap(){
-        GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(v);
-        //Log scale
-        EnumMap<GenotypeType,Double> glMap = gl.getAsMap(false);
-        Assert.assertEquals(v[GenotypeType.HOM_REF.ordinal()-1],glMap.get(GenotypeType.HOM_REF));
-        Assert.assertEquals(v[GenotypeType.HET.ordinal()-1],glMap.get(GenotypeType.HET));
-        Assert.assertEquals(v[GenotypeType.HOM_VAR.ordinal()-1],glMap.get(GenotypeType.HOM_VAR));
-
-        //Linear scale
-        glMap = gl.getAsMap(true);
-        double [] vl = GeneralUtils.normalizeFromLog10(v);
-        Assert.assertEquals(vl[GenotypeType.HOM_REF.ordinal()-1],glMap.get(GenotypeType.HOM_REF));
-        Assert.assertEquals(vl[GenotypeType.HET.ordinal()-1],glMap.get(GenotypeType.HET));
-        Assert.assertEquals(vl[GenotypeType.HOM_VAR.ordinal()-1],glMap.get(GenotypeType.HOM_VAR));
-
-        //Test missing likelihoods
-        gl = GenotypeLikelihoods.fromPLField(".");
-        glMap = gl.getAsMap(false);
-        Assert.assertNull(glMap);
-
-    }
-
-    @Test
-    public void testCalculateNumLikelihoods() {    
-        
-        for (int nAlleles=2; nAlleles<=5; nAlleles++)
-            // simplest case: diploid
-            Assert.assertEquals(GenotypeLikelihoods.numLikelihoods(nAlleles, 2), nAlleles*(nAlleles+1)/2);
-
-        // some special cases: ploidy = 20, #alleles = 4
-        Assert.assertEquals(GenotypeLikelihoods.numLikelihoods(4, 20), 1771);
-    }
-    
-    @Test
-    public void testGetLog10GQ(){
-        GenotypeLikelihoods gl = GenotypeLikelihoods.fromPLField(vPLString);
-
-        //GQ for the best guess genotype
-        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HET),-3.9);
-
-        double[] test = GeneralUtils.normalizeFromLog10(gl.getAsVector());
-
-        //GQ for the other genotypes
-        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HOM_REF), Math.log10(1.0 - test[GenotypeType.HOM_REF.ordinal()-1]));
-        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HOM_VAR), Math.log10(1.0 - test[GenotypeType.HOM_VAR.ordinal()-1]));
-
-       //Test missing likelihoods
-        gl = GenotypeLikelihoods.fromPLField(".");
-        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HOM_REF),Double.NEGATIVE_INFINITY);
-        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HET),Double.NEGATIVE_INFINITY);
-        Assert.assertEquals(gl.getLog10GQ(GenotypeType.HOM_VAR),Double.NEGATIVE_INFINITY);
-
-    }
-
-    @Test
-    public void testgetQualFromLikelihoods() {
-        double[] likelihoods = new double[]{-1, 0, -2};
-        // qual values we expect for each possible "best" genotype
-        double[] expectedQuals = new double[]{-0.04100161, -1, -0.003930294};
-
-        for ( int i = 0; i < likelihoods.length; i++ ) {
-            Assert.assertEquals(GenotypeLikelihoods.getGQLog10FromLikelihoods(i, likelihoods), expectedQuals[i], 1e-6,
-                    "GQ value for genotype " + i + " was not calculated correctly");
-        }
-    }
-
-    // this test is completely broken, the method is wrong.
-    public void testGetQualFromLikelihoodsMultiAllelicBroken() {
-        GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(triAllelic);
-        double actualGQ = gl.getLog10GQ(GenotypeType.HET);
-        double expectedGQ = 1.6;
-        Assert.assertEquals(actualGQ,expectedGQ);
-    }
-
-    public void testGetQualFromLikelihoodsMultiAllelic() {
-        GenotypeLikelihoods gl = GenotypeLikelihoods.fromLog10Likelihoods(triAllelic);
-        Allele ref = Allele.create((byte)'A',true);
-        Allele alt1 = Allele.create((byte)'C');
-        Allele alt2 = Allele.create((byte)'T');
-        List<Allele> allAlleles = Arrays.asList(ref,alt1,alt2);
-        List<Allele> gtAlleles = Arrays.asList(alt1,alt2);
-        GenotypeBuilder gtBuilder = new GenotypeBuilder();
-        gtBuilder.alleles(gtAlleles);
-        double actualGQ = gl.getLog10GQ(gtBuilder.make(),allAlleles);
-        double expectedGQ = 1.6;
-        Assert.assertEquals(actualGQ,expectedGQ);
-    }
-
-    private void assertDoubleArraysAreEqual(double[] v1, double[] v2) {
-        Assert.assertEquals(v1.length, v2.length);
-        for ( int i = 0; i < v1.length; i++ ) {
-            Assert.assertEquals(v1[i], v2[i], 1e-6);
-        }
-    }
-
-    @Test
-    public void testCalculatePLindex(){
-        int counter = 0;
-        for ( int i = 0; i <= 3; i++ ) {
-            for ( int j = i; j <= 3; j++ ) {
-                Assert.assertEquals(GenotypeLikelihoods.calculatePLindex(i, j), GenotypeLikelihoods.PLindexConversion[counter++], "PL index of alleles " + i + "," + j + " was not calculated correctly");
-            }
-        }
-    }
-
-    @DataProvider
-    public Object[][] testGetAllelePairData() {
-        return new Object[][]{
-                {0, 0, 0},
-                {1, 0, 1},
-                {2, 1, 1},
-                {3, 0, 2},
-                {4, 1, 2},
-                {5, 2, 2},
-                {6, 0, 3},
-                {7, 1, 3},
-                {8, 2, 3},
-                {9, 3, 3}
-        };
-    }
-
-    @Test(dataProvider = "testGetAllelePairData")
-    public void testGetAllelePair(final int PLindex, final int allele1, final int allele2) {
-        Assert.assertEquals(GenotypeLikelihoods.getAllelePair(PLindex).alleleIndex1, allele1, "allele index " + allele1 + " from PL index " + PLindex + " was not calculated correctly");
-        Assert.assertEquals(GenotypeLikelihoods.getAllelePair(PLindex).alleleIndex2, allele2, "allele index " + allele2 + " from PL index " + PLindex + " was not calculated correctly");
-    }
-
-    @DataProvider
-    public Object[][] testCalculateAnyploidPLcacheData() {
-        return new Object[][]{
-                {
-                        1, 1,
-                        Arrays.asList(Arrays.asList(0),
-                                Arrays.asList(1)
-                        )
-                },
-                {
-                        2, 2,
-                        Arrays.asList(Arrays.asList(0, 0),
-                                Arrays.asList(0, 1),
-                                Arrays.asList(1, 1),
-                                Arrays.asList(0, 2),
-                                Arrays.asList(1, 2),
-                                Arrays.asList(2, 2)
-                        )
-                },
-                {
-                        2, 3,
-                        Arrays.asList(Arrays.asList(0, 0, 0),
-                                Arrays.asList(0, 0, 1),
-                                Arrays.asList(0, 1, 1),
-                                Arrays.asList(1, 1, 1),
-                                Arrays.asList(0, 0, 2),
-                                Arrays.asList(0, 1, 2),
-                                Arrays.asList(1, 1, 2),
-                                Arrays.asList(0, 2, 2),
-                                Arrays.asList(1, 2, 2),
-                                Arrays.asList(2, 2, 2)
-                        )
-                },
-                {
-                        2, -1,
-                        Arrays.asList(Arrays.asList())
-                },
-                {
-                        -1, 2,
-                        Arrays.asList(Arrays.asList())
-                }
-        };
-    }
-
-    @Test(dataProvider = "testCalculateAnyploidPLcacheData")
-    public void testCalculateAnyploidPLcache(final int altAlleles, final int ploidy, final List<List<Integer>> expected) {
-        List<List<Integer>> anyploidPLIndexToAlleleIndices = GenotypeLikelihoods.calculateAnyploidPLcache(altAlleles, ploidy);
-        for ( int i=0; i < anyploidPLIndexToAlleleIndices.size(); i++ )
-            Assert.assertEquals(anyploidPLIndexToAlleleIndices.get(i), expected.get(i));
-    }
-
-    @Test(dataProvider = "testCalculateAnyploidPLcacheData")
-    public void testInitializeAnyploidPLIndexToAlleleIndices(final int altAlleles, final int ploidy, final List<List<Integer>> expected) {
-        if ( altAlleles >= 1 && ploidy >= 1 ) { // Bypass test with bad data
-            Map<Integer, List<List<Integer>>> expectedMap = new HashMap<Integer, List<List<Integer>>>();
-            expectedMap.put(ploidy, expected);
-            for (Map.Entry<Integer, List<List<Integer>>> entry : GenotypeLikelihoods.anyploidPloidyToPLIndexToAlleleIndices.entrySet()) {
-                if (expectedMap.containsKey(entry.getKey()))
-                    Assert.assertEquals(entry.getValue(), expectedMap.get(entry.getKey()));
-            }
-        }
-    }
-
-    @DataProvider
-    public Object[][] testInitializeAnyploidPLIndexToAlleleIndiceseBadData() {
-        return new Object[][]{
-                { 2, -1 },
-                { -1, 2 }
-        };
-    }
-
-    @Test(dataProvider = "testInitializeAnyploidPLIndexToAlleleIndiceseBadData", expectedExceptions = IllegalArgumentException.class)
-    public void testInitializeAnyploidPLIndexToAlleleIndicesBadData(final int altAlleles, final int ploidy) {
-        GenotypeLikelihoods.initializeAnyploidPLIndexToAlleleIndices(altAlleles, ploidy);
-    }
-
-    @DataProvider
-    public Object[][] testGetAllelesData() {
-        return new Object[][]{
-                {0, 2, 3, Arrays.asList(0,0,0)},
-                {1, 2, 3, Arrays.asList(0,0,1)},
-                {2, 2, 3, Arrays.asList(0,1,1)},
-                {3, 2, 3, Arrays.asList(1,1,1)},
-                {4, 2, 3, Arrays.asList(0,0,2)},
-                {5, 2, 3, Arrays.asList(0,1,2)},
-                {6, 2, 3, Arrays.asList(1,1,2)},
-                {7, 2, 3, Arrays.asList(0,2,2)},
-                {8, 2, 3, Arrays.asList(1,2,2)},
-                {9, 2, 3, Arrays.asList(2,2,2)},
-                {1, 2, 1, Arrays.asList(1)}
-        };
-    }
-
-    @Test(dataProvider = "testGetAllelesData")
-    public void testGetAlleles(final int PLindex, final int altAlleles, final int ploidy, final List<Integer> expected ) {
-        Assert.assertEquals(GenotypeLikelihoods.getAlleles(PLindex, ploidy), expected);
-    }
-
-    @DataProvider
-    public Object[][] testGetAllelesIndexOutOfBoundsData() {
-        return new Object[][]{
-                {-1, 3},  // PL index too small, non-diploid
-                {10, 3},  // PL index too large, non-diploid
-                {-1, 2},  // PL index too small, diploid
-                {GenotypeLikelihoods.numLikelihoods(GenotypeLikelihoods.MAX_DIPLOID_ALT_ALLELES_THAT_CAN_BE_GENOTYPED+1,2), 2} // PL index too large, diploid
-        };
-    }
-
-    @Test(dataProvider = "testGetAllelesIndexOutOfBoundsData", expectedExceptions = IllegalStateException.class)
-    public void testGetAllelesOutOfBounds(final int PLindex, final int ploidy) {
-        final List<Integer> alleles = GenotypeLikelihoods.getAlleles(PLindex, ploidy);
-    }
-
-    @Test(expectedExceptions = IllegalStateException.class)
-    public void testGetAllelesUnitialized() {
-        GenotypeLikelihoods.anyploidPloidyToPLIndexToAlleleIndices.clear();
-        final List<Integer> alleles = GenotypeLikelihoods.getAlleles(0, 3);
-    }
-}
\ No newline at end of file
diff --git a/src/tests/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java b/src/tests/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java
deleted file mode 100644
index 720c383..0000000
--- a/src/tests/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java
+++ /dev/null
@@ -1,1017 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext;
-
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.tribble.FeatureCodecHeader;
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.readers.LineIteratorImpl;
-import htsjdk.tribble.readers.LineReaderUtil;
-import htsjdk.tribble.readers.PositionalBufferedStream;
-import htsjdk.variant.VariantBaseTest;
-import htsjdk.variant.bcf2.BCF2Codec;
-import htsjdk.variant.utils.GeneralUtils;
-import htsjdk.variant.variantcontext.writer.Options;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFConstants;
-import htsjdk.variant.vcf.VCFContigHeaderLine;
-import htsjdk.variant.vcf.VCFFilterHeaderLine;
-import htsjdk.variant.vcf.VCFFormatHeaderLine;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-import htsjdk.variant.vcf.VCFHeaderLineCount;
-import htsjdk.variant.vcf.VCFHeaderLineType;
-import htsjdk.variant.vcf.VCFInfoHeaderLine;
-
-import org.testng.Assert;
-
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
-/**
- * Routines for generating all sorts of VCs for testing
- *
- * @author Your Name
- * @since Date created
- */
-public class VariantContextTestProvider {
-    final private static boolean ENABLE_GENOTYPE_TESTS = true;
-    final private static boolean ENABLE_A_AND_G_TESTS = true;
-    final private static boolean ENABLE_VARARRAY_TESTS = true;
-    final private static boolean ENABLE_PLOIDY_TESTS = true;
-    final private static boolean ENABLE_PL_TESTS = true;
-    final private static boolean ENABLE_SYMBOLIC_ALLELE_TESTS = true;
-    final private static boolean ENABLE_SOURCE_VCF_TESTS = true;
-    final private static boolean ENABLE_VARIABLE_LENGTH_GENOTYPE_STRING_TESTS = true;
-    final private static List<Integer> TWENTY_INTS = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20);
-
-    private static VCFHeader syntheticHeader;
-    final static List<VariantContextTestData> TEST_DATAs = new ArrayList<VariantContextTestData>();
-    private static VariantContext ROOT;
-
-    private final static List<File> testSourceVCFs = new ArrayList<File>();
-    static {
-        testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf"));
-        testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "ex2.vcf"));
-        testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "dbsnp_135.b37.1000.vcf"));
-        if ( ENABLE_SYMBOLIC_ALLELE_TESTS ) {
-            testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "diagnosis_targets_testfile.vcf"));
-            testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "VQSR.mixedTest.recal"));
-            testSourceVCFs.add(new File(VariantBaseTest.variantTestDataRoot + "breakpoint.vcf"));
-        }
-    }
-
-    public static class VariantContextContainer {
-        private VCFHeader header;
-        private Iterable<VariantContext> vcs;
-
-        public VariantContextContainer( VCFHeader header, Iterable<VariantContext> vcs ) {
-            this.header = header;
-            this.vcs = vcs;
-        }
-
-        public VCFHeader getHeader() {
-            return header;
-        }
-
-        public Iterable<VariantContext> getVCs() {
-            return vcs;
-        }
-    }
-
-    public abstract static class VariantContextIOTest<CODECTYPE> {
-        public String toString() {
-            return "VariantContextIOTest:" + getExtension();
-        }
-        public abstract String getExtension();
-        public abstract CODECTYPE makeCodec();
-        public abstract VariantContextWriter makeWriter(final File outputFile, final EnumSet<Options> baseOptions);
-
-        public abstract VariantContextContainer readAllVCs(final File input) throws IOException;
-        
-        public List<VariantContext> preprocess(final VCFHeader header, List<VariantContext> vcsBeforeIO) {
-            return vcsBeforeIO;
-        }
-
-        public List<VariantContext> postprocess(final VCFHeader header, List<VariantContext> vcsAfterIO) {
-            return vcsAfterIO;
-        }
-    }
-
-    public static class VariantContextTestData {
-        public final VCFHeader header;
-        public List<VariantContext> vcs;
-
-        public VariantContextTestData(final VCFHeader header, final VariantContextBuilder builder) {
-            this(header, Collections.singletonList(builder.fullyDecoded(true).make()));
-        }
-
-        public VariantContextTestData(final VCFHeader header, final List<VariantContext> vcs) {
-            final Set<String> samples = new HashSet<String>();
-            for ( final VariantContext vc : vcs )
-                if ( vc.hasGenotypes() )
-                    samples.addAll(vc.getSampleNames());
-            this.header = samples.isEmpty() ? header : new VCFHeader(header.getMetaDataInSortedOrder(), samples);
-            this.vcs = vcs;
-        }
-
-        public boolean hasGenotypes() {
-            return vcs.get(0).hasGenotypes();
-        }
-
-        public String toString() {
-            StringBuilder b = new StringBuilder();
-            b.append("VariantContextTestData: [");
-            final VariantContext vc = vcs.get(0);
-            final VariantContextBuilder builder = new VariantContextBuilder(vc);
-            builder.noGenotypes();
-            b.append(builder.make().toString());
-            if ( vc.getNSamples() < 5 ) {
-                for ( final Genotype g : vc.getGenotypes() )
-                    b.append(g.toString());
-            } else {
-                b.append(" nGenotypes = ").append(vc.getNSamples());
-            }
-
-            if ( vcs.size() > 1 ) b.append(" ----- with another ").append(vcs.size() - 1).append(" VariantContext records");
-            b.append("]");
-            return b.toString();
-        }
-    }
-
-    private final static VariantContextBuilder builder() {
-        return new VariantContextBuilder(ROOT);
-    }
-
-    private final static void add(VariantContextBuilder builder) {
-        TEST_DATAs.add(new VariantContextTestData(syntheticHeader, builder));
-    }
-
-    public static void initializeTests() throws IOException {
-        createSyntheticHeader();
-        makeSyntheticTests();
-        makeEmpiricalTests();
-    }
-
-    private static void makeEmpiricalTests() throws IOException {
-        if ( ENABLE_SOURCE_VCF_TESTS ) {
-            for ( final File file : testSourceVCFs ) {
-                VCFCodec codec = new VCFCodec();
-                VariantContextContainer x = readAllVCs( file, codec );
-                List<VariantContext> fullyDecoded = new ArrayList<VariantContext>();
-
-                for ( final VariantContext raw : x.getVCs() ) {
-                    if ( raw != null )
-                        fullyDecoded.add(raw.fullyDecode(x.getHeader(), false));
-                }
-
-                TEST_DATAs.add(new VariantContextTestData(x.getHeader(), fullyDecoded));
-            }
-        }
-    }
-
-    private final static void addHeaderLine(final Set<VCFHeaderLine> metaData, final String id, final int count, final VCFHeaderLineType type) {
-        metaData.add(new VCFInfoHeaderLine(id, count, type, "x"));
-        if ( type != VCFHeaderLineType.Flag )
-            metaData.add(new VCFFormatHeaderLine(id, count, type, "x"));
-    }
-
-    private final static void addHeaderLine(final Set<VCFHeaderLine> metaData, final String id, final VCFHeaderLineCount count, final VCFHeaderLineType type) {
-        metaData.add(new VCFInfoHeaderLine(id, count, type, "x"));
-        if ( type != VCFHeaderLineType.Flag )
-            metaData.add(new VCFFormatHeaderLine(id, count, type, "x"));
-    }
-
-    private static void createSyntheticHeader() {
-        Set<VCFHeaderLine> metaData = new TreeSet<VCFHeaderLine>();
-
-        addHeaderLine(metaData, "STRING1", 1, VCFHeaderLineType.String);
-        addHeaderLine(metaData, "END", 1, VCFHeaderLineType.Integer);
-        addHeaderLine(metaData, "STRING3", 3, VCFHeaderLineType.String);
-        addHeaderLine(metaData, "STRING20", 20, VCFHeaderLineType.String);
-        addHeaderLine(metaData, "VAR.INFO.STRING", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String);
-
-        addHeaderLine(metaData, "GT", 1, VCFHeaderLineType.String);
-        addHeaderLine(metaData, "GQ", 1, VCFHeaderLineType.Integer);
-        addHeaderLine(metaData, "ADA", VCFHeaderLineCount.A, VCFHeaderLineType.Integer);
-        addHeaderLine(metaData, "PL", VCFHeaderLineCount.G, VCFHeaderLineType.Integer);
-        addHeaderLine(metaData, "GS", 2, VCFHeaderLineType.String);
-        addHeaderLine(metaData, "GV", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String);
-        addHeaderLine(metaData, "FT", 1, VCFHeaderLineType.String);
-
-        // prep the header
-        metaData.add(new VCFContigHeaderLine(Collections.singletonMap("ID", "1"), 0));
-
-        metaData.add(new VCFFilterHeaderLine("FILTER1"));
-        metaData.add(new VCFFilterHeaderLine("FILTER2"));
-
-        addHeaderLine(metaData, "INT1", 1, VCFHeaderLineType.Integer);
-        addHeaderLine(metaData, "INT3", 3, VCFHeaderLineType.Integer);
-        addHeaderLine(metaData, "INT20", 20, VCFHeaderLineType.Integer);
-        addHeaderLine(metaData, "INT.VAR", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer);
-        addHeaderLine(metaData, "FLOAT1", 1, VCFHeaderLineType.Float);
-        addHeaderLine(metaData, "FLOAT3", 3, VCFHeaderLineType.Float);
-        addHeaderLine(metaData, "FLAG", 0, VCFHeaderLineType.Flag);
-
-        syntheticHeader = new VCFHeader(metaData);
-    }
-
-
-    private static void makeSyntheticTests() {
-        VariantContextBuilder rootBuilder = new VariantContextBuilder();
-        rootBuilder.source("test");
-        rootBuilder.loc("1", 10, 10);
-        rootBuilder.alleles("A", "C");
-        rootBuilder.unfiltered();
-        ROOT = rootBuilder.make();
-
-        add(builder());
-        add(builder().alleles("A"));
-        add(builder().alleles("A", "C", "T"));
-        add(builder().alleles("A", "AC"));
-        add(builder().alleles("A", "ACAGT"));
-        add(builder().loc("1", 10, 11).alleles("AC", "A"));
-        add(builder().loc("1", 10, 13).alleles("ACGT", "A"));
-
-        // make sure filters work
-        add(builder().unfiltered());
-        add(builder().passFilters());
-        add(builder().filters("FILTER1"));
-        add(builder().filters("FILTER1", "FILTER2"));
-
-        add(builder().log10PError(VariantContext.NO_LOG10_PERROR));
-        add(builder().log10PError(-1));
-        add(builder().log10PError(-1.234e6));
-
-        add(builder().noID());
-        add(builder().id("rsID12345"));
-
-
-        add(builder().attribute("INT1", 1));
-        add(builder().attribute("INT1", 100));
-        add(builder().attribute("INT1", 1000));
-        add(builder().attribute("INT1", 100000));
-        add(builder().attribute("INT1", null));
-        add(builder().attribute("INT3", Arrays.asList(1, 2, 3)));
-        add(builder().attribute("INT3", Arrays.asList(1000, 2000, 3000)));
-        add(builder().attribute("INT3", Arrays.asList(100000, 200000, 300000)));
-        add(builder().attribute("INT3", null));
-        add(builder().attribute("INT20", TWENTY_INTS));
-
-        add(builder().attribute("FLOAT1", 1.0));
-        add(builder().attribute("FLOAT1", 100.0));
-        add(builder().attribute("FLOAT1", 1000.0));
-        add(builder().attribute("FLOAT1", 100000.0));
-        add(builder().attribute("FLOAT1", null));
-        add(builder().attribute("FLOAT3", Arrays.asList(1.0, 2.0, 3.0)));
-        add(builder().attribute("FLOAT3", Arrays.asList(1000.0, 2000.0, 3000.0)));
-        add(builder().attribute("FLOAT3", Arrays.asList(100000.0, 200000.0, 300000.0)));
-        add(builder().attribute("FLOAT3", null));
-
-        add(builder().attribute("FLAG", true));
-        //add(builder().attribute("FLAG", false)); // NOTE -- VCF doesn't allow false flags
-
-        add(builder().attribute("STRING1", "s1"));
-        add(builder().attribute("STRING1", null));
-        add(builder().attribute("STRING3", Arrays.asList("s1", "s2", "s3")));
-        add(builder().attribute("STRING3", null));
-        add(builder().attribute("STRING20", Arrays.asList("s1", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15", "s16", "s17", "s18", "s19", "s20")));
-
-        add(builder().attribute("VAR.INFO.STRING", "s1"));
-        add(builder().attribute("VAR.INFO.STRING", Arrays.asList("s1", "s2")));
-        add(builder().attribute("VAR.INFO.STRING", Arrays.asList("s1", "s2", "s3")));
-        add(builder().attribute("VAR.INFO.STRING", null));
-
-        if ( ENABLE_GENOTYPE_TESTS ) {
-            addGenotypesToTestData();
-            addComplexGenotypesTest();
-        }
-
-        if ( ENABLE_A_AND_G_TESTS )
-            addGenotypesAndGTests();
-
-        if ( ENABLE_SYMBOLIC_ALLELE_TESTS )
-            addSymbolicAlleleTests();
-    }
-
-    private static void addSymbolicAlleleTests() {
-        // two tests to ensure that the end is computed correctly when there's (and not) an END field present
-        add(builder().alleles("N", "<VQSR>").start(10).stop(11).attribute("END", 11));
-        add(builder().alleles("N", "<VQSR>").start(10).stop(10));
-    }
-
-    private static void addGenotypesToTestData() {
-        final ArrayList<VariantContext> sites = new ArrayList<VariantContext>();
-
-        sites.add(builder().alleles("A").make());
-        sites.add(builder().alleles("A", "C", "T").make());
-        sites.add(builder().alleles("A", "AC").make());
-        sites.add(builder().alleles("A", "ACAGT").make());
-
-        for ( VariantContext site : sites ) {
-            addGenotypes(site);
-        }
-    }
-
-    private static void addGenotypeTests( final VariantContext site, Genotype ... genotypes ) {
-        // for each sites VC, we are going to add create two root genotypes.
-        // The first is the primary, and will be added to each new test
-        // The second is variable.  In some tests it's absent (testing 1 genotype), in others it is duplicated
-        // 1 once, 10, 100, or 1000 times to test scaling
-
-        final VariantContextBuilder builder = new VariantContextBuilder(site);
-
-        // add a single context
-        builder.genotypes(genotypes[0]);
-        add(builder);
-
-        if ( genotypes.length > 1 ) {
-            // add all
-            add(builder.genotypes(Arrays.asList(genotypes)));
-
-            // add all with the last replicated 10x and 100x times
-            for ( int nCopiesOfLast : Arrays.asList(10, 100, 1000) ) {
-                final GenotypesContext gc = new GenotypesContext();
-                final Genotype last = genotypes[genotypes.length-1];
-                for ( int i = 0; i < genotypes.length - 1; i++ )
-                    gc.add(genotypes[i]);
-                for ( int i = 0; i < nCopiesOfLast; i++ )
-                    gc.add(new GenotypeBuilder(last).name("copy" + i).make());
-                add(builder.genotypes(gc));
-            }
-        }
-    }
-
-    private static void addGenotypes( final VariantContext site) {
-        // test ref/ref
-        final Allele ref = site.getReference();
-        final Allele alt1 = site.getNAlleles() > 1 ? site.getAlternateAllele(0) : null;
-        final Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(ref, ref));
-        addGenotypeTests(site, homRef);
-
-        if ( alt1 != null ) {
-            final Genotype het = GenotypeBuilder.create("het", Arrays.asList(ref, alt1));
-            final Genotype homVar = GenotypeBuilder.create("homVar", Arrays.asList(alt1, alt1));
-            addGenotypeTests(site, homRef, het);
-            addGenotypeTests(site, homRef, het, homVar);
-
-            // test no GT at all
-            addGenotypeTests(site, new GenotypeBuilder("noGT", new ArrayList<Allele>(0)).attribute("INT1", 10).make());
-
-            final List<Allele> noCall = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
-
-            // ploidy
-            if ( ENABLE_PLOIDY_TESTS ) {
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
-                        GenotypeBuilder.create("hap", Arrays.asList(ref)));
-
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("noCall", noCall),
-                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
-                        GenotypeBuilder.create("hap", Arrays.asList(ref)));
-
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("noCall",  noCall),
-                        GenotypeBuilder.create("noCall2", noCall),
-                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
-                        GenotypeBuilder.create("hap", Arrays.asList(ref)));
-
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
-                        GenotypeBuilder.create("tet", Arrays.asList(ref, alt1, alt1)));
-
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("noCall", noCall),
-                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
-                        GenotypeBuilder.create("tet", Arrays.asList(ref, alt1, alt1)));
-
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("noCall", noCall),
-                        GenotypeBuilder.create("noCall2", noCall),
-                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
-                        GenotypeBuilder.create("tet", Arrays.asList(ref, alt1, alt1)));
-
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("nocall", noCall),
-                        GenotypeBuilder.create("dip", Arrays.asList(ref, alt1)),
-                        GenotypeBuilder.create("tet", Arrays.asList(ref, alt1, alt1)));
-            }
-
-
-            //
-            //
-            // TESTING PHASE
-            //
-            //
-            final Genotype gUnphased = new GenotypeBuilder("gUnphased", Arrays.asList(ref, alt1)).make();
-            final Genotype gPhased = new GenotypeBuilder("gPhased", Arrays.asList(ref, alt1)).phased(true).make();
-            final Genotype gPhased2 = new GenotypeBuilder("gPhased2", Arrays.asList(alt1, alt1)).phased(true).make();
-            final Genotype gPhased3 = new GenotypeBuilder("gPhased3", Arrays.asList(ref, ref)).phased(true).make();
-            final Genotype haploidNoPhase = new GenotypeBuilder("haploidNoPhase", Arrays.asList(ref)).make();
-            addGenotypeTests(site, gUnphased, gPhased);
-            addGenotypeTests(site, gUnphased, gPhased2);
-            addGenotypeTests(site, gUnphased, gPhased3);
-            addGenotypeTests(site, gPhased, gPhased2);
-            addGenotypeTests(site, gPhased, gPhased3);
-            addGenotypeTests(site, gPhased2, gPhased3);
-            addGenotypeTests(site, haploidNoPhase, gPhased);
-            addGenotypeTests(site, haploidNoPhase, gPhased2);
-            addGenotypeTests(site, haploidNoPhase, gPhased3);
-            addGenotypeTests(site, haploidNoPhase, gPhased, gPhased2);
-            addGenotypeTests(site, haploidNoPhase, gPhased, gPhased3);
-            addGenotypeTests(site, haploidNoPhase, gPhased2, gPhased3);
-            addGenotypeTests(site, haploidNoPhase, gPhased, gPhased2, gPhased3);
-
-            final Genotype gUnphasedTet = new GenotypeBuilder("gUnphasedTet", Arrays.asList(ref, alt1, ref, alt1)).make();
-            final Genotype gPhasedTet = new GenotypeBuilder("gPhasedTet", Arrays.asList(ref, alt1, alt1, alt1)).phased(true).make();
-            addGenotypeTests(site, gUnphasedTet, gPhasedTet);
-        }
-
-        if ( ENABLE_PL_TESTS ) {
-            if ( site.getNAlleles() == 2 ) {
-                // testing PLs
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{0, -1, -2}),
-                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref), new double[]{0, -2, -3}));
-
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{-1, 0, -2}),
-                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref), new double[]{0, -2, -3}));
-
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{-1, 0, -2}),
-                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref), new double[]{0, -2000, -1000}));
-
-                addGenotypeTests(site, // missing PLs
-                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{-1, 0, -2}),
-                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref)));
-            }
-            else if ( site.getNAlleles() == 3 ) {
-                // testing PLs
-                addGenotypeTests(site,
-                        GenotypeBuilder.create("g1", Arrays.asList(ref, ref), new double[]{0, -1, -2, -3, -4, -5}),
-                        GenotypeBuilder.create("g2", Arrays.asList(ref, ref), new double[]{0, -2, -3, -4, -5, -6}));
-            }
-        }
-
-        // test attributes
-        addGenotypeTests(site,
-                attr("g1", ref, "INT1", 1),
-                attr("g2", ref, "INT1", 2));
-        addGenotypeTests(site,
-                attr("g1", ref, "INT1", 1),
-                attr("g2", ref, "INT1"));
-        addGenotypeTests(site,
-                attr("g1", ref, "INT3", 1, 2, 3),
-                attr("g2", ref, "INT3", 4, 5, 6));
-        addGenotypeTests(site,
-                attr("g1", ref, "INT3", 1, 2, 3),
-                attr("g2", ref, "INT3"));
-
-        addGenotypeTests(site,
-                attr("g1", ref, "INT20", TWENTY_INTS),
-                attr("g2", ref, "INT20", TWENTY_INTS));
-
-
-        if (ENABLE_VARARRAY_TESTS) {
-            addGenotypeTests(site,
-                    attr("g1", ref, "INT.VAR", 1, 2, 3),
-                    attr("g2", ref, "INT.VAR", 4, 5),
-                    attr("g3", ref, "INT.VAR", 6));
-            addGenotypeTests(site,
-                    attr("g1", ref, "INT.VAR", 1, 2, 3),
-                    attr("g2", ref, "INT.VAR"),
-                    attr("g3", ref, "INT.VAR", 5));
-        }
-
-        addGenotypeTests(site,
-                attr("g1", ref, "FLOAT1", 1.0),
-                attr("g2", ref, "FLOAT1", 2.0));
-        addGenotypeTests(site,
-                attr("g1", ref, "FLOAT1", 1.0),
-                attr("g2", ref, "FLOAT1"));
-        addGenotypeTests(site,
-                attr("g1", ref, "FLOAT3", 1.0, 2.0, 3.0),
-                attr("g2", ref, "FLOAT3", 4.0, 5.0, 6.0));
-        addGenotypeTests(site,
-                attr("g1", ref, "FLOAT3", 1.0, 2.0, 3.0),
-                attr("g2", ref, "FLOAT3"));
-
-        if (ENABLE_VARIABLE_LENGTH_GENOTYPE_STRING_TESTS) {
-            //
-            //
-            // TESTING MULTIPLE SIZED LISTS IN THE GENOTYPE FIELD
-            //
-            //
-            addGenotypeTests(site,
-                    attr("g1", ref, "GS", Arrays.asList("S1", "S2")),
-                    attr("g2", ref, "GS", Arrays.asList("S3", "S4")));
-
-            addGenotypeTests(site, // g1 is missing the string, and g2 is missing FLOAT1
-                    attr("g1", ref, "FLOAT1", 1.0),
-                    attr("g2", ref, "GS", Arrays.asList("S3", "S4")));
-
-            // variable sized lists
-            addGenotypeTests(site,
-                    attr("g1", ref, "GV", "S1"),
-                    attr("g2", ref, "GV", Arrays.asList("S3", "S4")));
-
-            addGenotypeTests(site,
-                    attr("g1", ref, "GV", Arrays.asList("S1", "S2")),
-                    attr("g2", ref, "GV", Arrays.asList("S3", "S4", "S5")));
-
-            addGenotypeTests(site, // missing value in varlist of string
-                    attr("g1", ref, "FLOAT1", 1.0),
-                    attr("g2", ref, "GV", Arrays.asList("S3", "S4", "S5")));
-        }
-
-        //
-        //
-        // TESTING GENOTYPE FILTERS
-        //
-        //
-        addGenotypeTests(site,
-                new GenotypeBuilder("g1-x", Arrays.asList(ref, ref)).filters("X").make(),
-                new GenotypeBuilder("g2-x", Arrays.asList(ref, ref)).filters("X").make());
-        addGenotypeTests(site,
-                new GenotypeBuilder("g1-unft", Arrays.asList(ref, ref)).unfiltered().make(),
-                new GenotypeBuilder("g2-x", Arrays.asList(ref, ref)).filters("X").make());
-        addGenotypeTests(site,
-                new GenotypeBuilder("g1-unft", Arrays.asList(ref, ref)).unfiltered().make(),
-                new GenotypeBuilder("g2-xy", Arrays.asList(ref, ref)).filters("X", "Y").make());
-        addGenotypeTests(site,
-                new GenotypeBuilder("g1-unft", Arrays.asList(ref, ref)).unfiltered().make(),
-                new GenotypeBuilder("g2-x", Arrays.asList(ref, ref)).filters("X").make(),
-                new GenotypeBuilder("g3-xy", Arrays.asList(ref, ref)).filters("X", "Y").make());
-    }
-
-    private static void addGenotypesAndGTests() {
-//        for ( final int ploidy : Arrays.asList(2)) {
-        for ( final int ploidy : Arrays.asList(1, 2, 3, 4, 5)) {
-            final List<List<String>> alleleCombinations =
-                    Arrays.asList(
-                            Arrays.asList("A"),
-                            Arrays.asList("A", "C"),
-                            Arrays.asList("A", "C", "G"),
-                            Arrays.asList("A", "C", "G", "T"));
-
-            for ( final List<String> alleles : alleleCombinations ) {
-                final VariantContextBuilder vcb = builder().alleles(alleles);
-                final VariantContext site = vcb.make();
-                final int nAlleles = site.getNAlleles();
-                final Allele ref = site.getReference();
-
-                // base genotype is ref/.../ref up to ploidy
-                final List<Allele> baseGenotype = new ArrayList<Allele>(ploidy);
-                for ( int i = 0; i < ploidy; i++) baseGenotype.add(ref);
-                final int nPLs = GenotypeLikelihoods.numLikelihoods(nAlleles, ploidy);
-
-                // ada is 0, 1, ..., nAlleles - 1
-                final List<Integer> ada = new ArrayList<Integer>(nAlleles);
-                for ( int i = 0; i < nAlleles - 1; i++ ) ada.add(i);
-
-                // pl is 0, 1, ..., up to nPLs (complex calc of nAlleles and ploidy)
-                final int[] pl = new int[nPLs];
-                for ( int i = 0; i < pl.length; i++ ) pl[i] = i;
-
-                final GenotypeBuilder gb = new GenotypeBuilder("ADA_PL_SAMPLE");
-                gb.alleles(baseGenotype);
-                gb.PL(pl);
-                gb.attribute("ADA", nAlleles == 2 ? ada.get(0) : ada);
-                vcb.genotypes(gb.make());
-
-                add(vcb);
-            }
-        }
-    }
-
-    private static Genotype attr(final String name, final Allele ref, final String key, final Object ... value) {
-        if ( value.length == 0 )
-            return GenotypeBuilder.create(name, Arrays.asList(ref, ref));
-        else {
-            final Object toAdd = value.length == 1 ? value[0] : Arrays.asList(value);
-            return new GenotypeBuilder(name, Arrays.asList(ref, ref)).attribute(key, toAdd).make();
-        }
-    }
-
-    public static List<VariantContextTestData> generateSiteTests() {
-        return TEST_DATAs;
-    }
-
-    public static void testReaderWriterWithMissingGenotypes(final VariantContextIOTest tester, final VariantContextTestData data) throws IOException {
-        final int nSamples = data.header.getNGenotypeSamples();
-        if ( nSamples > 2 ) {
-            for ( final VariantContext vc : data.vcs )
-                if ( vc.isSymbolic() )
-                    // cannot handle symbolic alleles because they may be weird non-call VCFs
-                    return;
-
-            final File tmpFile = File.createTempFile("testReaderWriter", tester.getExtension());
-            tmpFile.deleteOnExit();
-            Tribble.indexFile(tmpFile).deleteOnExit();
-
-            // write expected to disk
-            final EnumSet<Options> options = EnumSet.of(Options.INDEX_ON_THE_FLY);
-            final VariantContextWriter writer = tester.makeWriter(tmpFile, options);
-
-            final Set<String> samplesInVCF = new HashSet<String>(data.header.getGenotypeSamples());
-            final List<String> missingSamples = Arrays.asList("MISSING1", "MISSING2");
-            final List<String> allSamples = new ArrayList<String>(missingSamples);
-            allSamples.addAll(samplesInVCF);
-
-            final VCFHeader header = new VCFHeader(data.header.getMetaDataInInputOrder(), allSamples);
-            writeVCsToFile(writer, header, data.vcs);
-
-            // ensure writing of expected == actual
-            final VariantContextContainer p = tester.readAllVCs(tmpFile);
-            final Iterable<VariantContext> actual = p.getVCs();
-
-            int i = 0;
-            for ( final VariantContext readVC : actual ) {
-                if ( readVC == null ) continue; // sometimes we read null records...
-                final VariantContext expected = data.vcs.get(i++);
-                for ( final Genotype g : readVC.getGenotypes() ) {
-                    Assert.assertTrue(allSamples.contains(g.getSampleName()));
-                    if ( samplesInVCF.contains(g.getSampleName()) ) {
-                        assertEquals(g, expected.getGenotype(g.getSampleName()));
-                    } else {
-                        // missing
-                        Assert.assertTrue(g.isNoCall());
-                    }
-                }
-            }
-
-        }
-    }
-
-    public static void testReaderWriter(final VariantContextIOTest tester, final VariantContextTestData data) throws IOException {
-        testReaderWriter(tester, data.header, data.vcs, data.vcs, true);
-    }
-
-    public static void testReaderWriter(final VariantContextIOTest tester,
-                                        final VCFHeader header,
-                                        final List<VariantContext> expected,
-                                        final Iterable<VariantContext> vcs,
-                                        final boolean recurse) throws IOException {
-        final File tmpFile = File.createTempFile("testReaderWriter", tester.getExtension());
-        tmpFile.deleteOnExit();
-        Tribble.indexFile(tmpFile).deleteOnExit();
-
-        // write expected to disk
-        final EnumSet<Options> options = EnumSet.of(Options.INDEX_ON_THE_FLY);
-        final VariantContextWriter writer = tester.makeWriter(tmpFile, options);
-        writeVCsToFile(writer, header, vcs);
-
-        // ensure writing of expected == actual
-        final VariantContextContainer p = tester.readAllVCs(tmpFile);
-        final Iterable<VariantContext> actual = p.getVCs();
-        assertEquals(actual, expected);
-
-        if ( recurse ) {
-            // if we are doing a recursive test, grab a fresh iterator over the written values
-            final Iterable<VariantContext> read = tester.readAllVCs(tmpFile).getVCs();
-            testReaderWriter(tester, p.getHeader(), expected, read, false);
-        }
-    }
-
-    private static void writeVCsToFile(final VariantContextWriter writer, final VCFHeader header, final Iterable<VariantContext> vcs) {
-        // write
-        writer.writeHeader(header);
-        for ( VariantContext vc : vcs )
-            if (vc != null)
-                writer.add(vc);
-        writer.close();
-    }
-
-    public static abstract class VCIterable<SOURCE> implements Iterable<VariantContext>, Iterator<VariantContext> {
-        final FeatureCodec<VariantContext, SOURCE> codec;
-        final VCFHeader header;
-
-        public VCIterable(final FeatureCodec<VariantContext, SOURCE> codec, final VCFHeader header) {
-            this.codec = codec;
-            this.header = header;
-        }
-
-        @Override
-        public Iterator<VariantContext> iterator() {
-            return this;
-        }
-
-        @Override
-        public abstract boolean hasNext();
-
-        public abstract SOURCE nextSource();
-        
-        @Override
-        public VariantContext next() {
-            try {
-                final VariantContext vc = codec.decode(nextSource());
-                return vc == null ? null : vc.fullyDecode(header, false);
-            } catch ( IOException e ) {
-                throw new RuntimeException(e);
-            }
-        }
-
-        @Override
-        public void remove() { }
-    }
-
-    public static VariantContextContainer readAllVCs(final File input, final BCF2Codec codec) throws IOException {
-        PositionalBufferedStream headerPbs = new PositionalBufferedStream(new FileInputStream(input));
-        FeatureCodecHeader header = codec.readHeader(headerPbs);
-        headerPbs.close();
-
-        final PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(input));
-        pbs.skip(header.getHeaderEnd());
-
-        final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue();
-        return new VariantContextTestProvider.VariantContextContainer(vcfHeader, new VariantContextTestProvider.VCIterable(codec, vcfHeader) {
-            @Override
-            public boolean hasNext() {
-                try {
-                    return !pbs.isDone();
-                } catch (IOException e) {
-                    throw new RuntimeException(e);
-                }
-            }
-
-            @Override
-            public Object nextSource() {
-                return pbs;
-            }
-        });
-    }
-
-    public static VariantContextContainer readAllVCs(final File input, final VCFCodec codec) throws FileNotFoundException {
-        final LineIterator lineIterator = new LineIteratorImpl(LineReaderUtil.fromBufferedStream(new BufferedInputStream(new FileInputStream(input))));
-        final VCFHeader vcfHeader = (VCFHeader) codec.readActualHeader(lineIterator);
-        return new VariantContextTestProvider.VariantContextContainer(vcfHeader, new VariantContextTestProvider.VCIterable<LineIterator>(codec, vcfHeader) {
-            @Override
-            public boolean hasNext() {
-                return lineIterator.hasNext();
-            }
-
-            @Override
-            public LineIterator nextSource() {
-                return lineIterator;
-            }
-        });
-    }
-    
-    public static void assertVCFandBCFFilesAreTheSame(final File vcfFile, final File bcfFile) throws IOException {
-        final VariantContextContainer vcfData = readAllVCs(vcfFile, new VCFCodec());
-        final VariantContextContainer bcfData = readAllVCs(bcfFile, new BCF2Codec());
-        assertEquals(bcfData.getHeader(), vcfData.getHeader());
-        assertEquals(bcfData.getVCs(), vcfData.getVCs());
-    }
-
-    public static void assertEquals(final Iterable<VariantContext> actual, final Iterable<VariantContext> expected) {
-        final Iterator<VariantContext> actualIT = actual.iterator();
-        final Iterator<VariantContext> expectedIT = expected.iterator();
-
-        while ( expectedIT.hasNext() ) {
-            final VariantContext expectedVC = expectedIT.next();
-            if ( expectedVC == null )
-                continue;
-
-            VariantContext actualVC;
-            do {
-                Assert.assertTrue(actualIT.hasNext(), "Too few records found in actual");
-                actualVC = actualIT.next();
-            } while ( actualIT.hasNext() && actualVC == null );
-
-            if ( actualVC == null )
-                Assert.fail("Too few records in actual");
-
-            assertEquals(actualVC, expectedVC);
-        }
-        Assert.assertTrue(! actualIT.hasNext(), "Too many records found in actual");
-    }
-
-    /**
-     * Assert that two variant contexts are actually equal
-     * @param actual
-     * @param expected
-     */
-    public static void assertEquals( final VariantContext actual, final VariantContext expected ) {
-        Assert.assertNotNull(actual, "VariantContext expected not null");
-        Assert.assertEquals(actual.getChr(), expected.getChr(), "chr");
-        Assert.assertEquals(actual.getStart(), expected.getStart(), "start");
-        Assert.assertEquals(actual.getEnd(), expected.getEnd(), "end");
-        Assert.assertEquals(actual.getID(), expected.getID(), "id");
-        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "alleles for " + expected + " vs " + actual);
-
-        assertAttributesEquals(actual.getAttributes(), expected.getAttributes());
-        Assert.assertEquals(actual.filtersWereApplied(), expected.filtersWereApplied(), "filtersWereApplied");
-        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "isFiltered");
-        VariantBaseTest.assertEqualsSet(actual.getFilters(), expected.getFilters(), "filters");
-        VariantBaseTest.assertEqualsDoubleSmart(actual.getPhredScaledQual(), expected.getPhredScaledQual());
-
-        Assert.assertEquals(actual.hasGenotypes(), expected.hasGenotypes(), "hasGenotypes");
-        if ( expected.hasGenotypes() ) {
-            VariantBaseTest.assertEqualsSet(actual.getSampleNames(), expected.getSampleNames(), "sample names set");
-            Assert.assertEquals(actual.getSampleNamesOrderedByName(), expected.getSampleNamesOrderedByName(), "sample names");
-            final Set<String> samples = expected.getSampleNames();
-            for ( final String sample : samples ) {
-                assertEquals(actual.getGenotype(sample), expected.getGenotype(sample));
-            }
-        }
-    }
-
-    public static void assertEquals(final Genotype actual, final Genotype expected) {
-        Assert.assertEquals(actual.getSampleName(), expected.getSampleName(), "Genotype names");
-        Assert.assertEquals(actual.getAlleles(), expected.getAlleles(), "Genotype alleles");
-        Assert.assertEquals(actual.getGenotypeString(), expected.getGenotypeString(), "Genotype string");
-        Assert.assertEquals(actual.getType(), expected.getType(), "Genotype type");
-
-        // filters are the same
-        Assert.assertEquals(actual.getFilters(), expected.getFilters(), "Genotype fields");
-        Assert.assertEquals(actual.isFiltered(), expected.isFiltered(), "Genotype isFiltered");
-
-        // inline attributes
-        Assert.assertEquals(actual.getDP(), expected.getDP(), "Genotype dp");
-        Assert.assertTrue(Arrays.equals(actual.getAD(), expected.getAD()));
-        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype gq");
-        Assert.assertEquals(actual.hasPL(), expected.hasPL(), "Genotype hasPL");
-        Assert.assertEquals(actual.hasAD(), expected.hasAD(), "Genotype hasAD");
-        Assert.assertEquals(actual.hasGQ(), expected.hasGQ(), "Genotype hasGQ");
-        Assert.assertEquals(actual.hasDP(), expected.hasDP(), "Genotype hasDP");
-
-        Assert.assertEquals(actual.hasLikelihoods(), expected.hasLikelihoods(), "Genotype haslikelihoods");
-        Assert.assertEquals(actual.getLikelihoodsString(), expected.getLikelihoodsString(), "Genotype getlikelihoodsString");
-        Assert.assertEquals(actual.getLikelihoods(), expected.getLikelihoods(), "Genotype getLikelihoods");
-        Assert.assertTrue(Arrays.equals(actual.getPL(), expected.getPL()));
-
-        Assert.assertEquals(actual.getGQ(), expected.getGQ(), "Genotype phredScaledQual");
-        assertAttributesEquals(actual.getExtendedAttributes(), expected.getExtendedAttributes());
-        Assert.assertEquals(actual.isPhased(), expected.isPhased(), "Genotype isPhased");
-        Assert.assertEquals(actual.getPloidy(), expected.getPloidy(), "Genotype getPloidy");
-    }
-
-    private static void assertAttributesEquals(final Map<String, Object> actual, Map<String, Object> expected) {
-        final Set<String> expectedKeys = new HashSet<String>(expected.keySet());
-
-        for ( final Map.Entry<String, Object> act : actual.entrySet() ) {
-            final Object actualValue = act.getValue();
-            if ( expected.containsKey(act.getKey()) && expected.get(act.getKey()) != null ) {
-                final Object expectedValue = expected.get(act.getKey());
-                if ( expectedValue instanceof List ) {
-                    final List<Object> expectedList = (List<Object>)expectedValue;
-                    Assert.assertTrue(actualValue instanceof List, act.getKey() + " should be a list but isn't");
-                    final List<Object> actualList = (List<Object>)actualValue;
-                    Assert.assertEquals(actualList.size(), expectedList.size(), act.getKey() + " size");
-                    for ( int i = 0; i < expectedList.size(); i++ )
-                        assertAttributeEquals(act.getKey(), actualList.get(i), expectedList.get(i));
-                } else
-                    assertAttributeEquals(act.getKey(), actualValue, expectedValue);
-            } else {
-                // it's ok to have a binding in x -> null that's absent in y
-                Assert.assertNull(actualValue, act.getKey() + " present in one but not in the other");
-            }
-            expectedKeys.remove(act.getKey());
-        }
-
-        // now expectedKeys contains only the keys found in expected but not in actual,
-        // and they must all be null
-        for ( final String missingExpected : expectedKeys ) {
-            final Object value = expected.get(missingExpected);
-            Assert.assertTrue(isMissing(value), "Attribute " + missingExpected + " missing in one but not in other" );
-        }
-    }
-
-    private static final boolean isMissing(final Object value) {
-        if ( value == null ) return true;
-        else if ( value.equals(VCFConstants.MISSING_VALUE_v4) ) return true;
-        else if ( value instanceof List ) {
-            // handles the case where all elements are null or the list is empty
-            for ( final Object elt : (List)value)
-                if ( elt != null )
-                    return false;
-            return true;
-        } else
-            return false;
-    }
-
-    private static void assertAttributeEquals(final String key, final Object actual, final Object expected) {
-        if ( expected instanceof Double ) {
-            // must be very tolerant because doubles are being rounded to 2 sig figs
-            VariantBaseTest.assertEqualsDoubleSmart(actual, (Double)expected, 1e-2);
-        } else
-            Assert.assertEquals(actual, expected, "Attribute " + key);
-    }
-
-    public static void addComplexGenotypesTest() {
-        final List<Allele> allAlleles = Arrays.asList(
-                Allele.create("A", true),
-                Allele.create("C", false),
-                Allele.create("G", false));
-
-        for ( int nAlleles : Arrays.asList(2, 3) ) {
-            for ( int highestPloidy : Arrays.asList(1, 2, 3) ) {
-                // site alleles
-                final List<Allele> siteAlleles = allAlleles.subList(0, nAlleles);
-
-                // possible alleles for genotypes
-                final List<Allele> possibleGenotypeAlleles = new ArrayList<Allele>(siteAlleles);
-                possibleGenotypeAlleles.add(Allele.NO_CALL);
-
-                // there are n^ploidy possible genotypes
-                final List<List<Allele>> possibleGenotypes = makeAllGenotypes(possibleGenotypeAlleles, highestPloidy);
-                final int nPossibleGenotypes = possibleGenotypes.size();
-
-                VariantContextBuilder vb = new VariantContextBuilder("unittest", "1", 1, 1, siteAlleles);
-
-                // first test -- create n copies of each genotype
-                for ( int i = 0; i < nPossibleGenotypes; i++ ) {
-                    final List<Genotype> samples = new ArrayList<Genotype>(3);
-                    samples.add(GenotypeBuilder.create("sample" + i, possibleGenotypes.get(i)));
-                    add(vb.genotypes(samples));
-                }
-
-                // second test -- create one sample with each genotype
-                {
-                    final List<Genotype> samples = new ArrayList<Genotype>(nPossibleGenotypes);
-                    for ( int i = 0; i < nPossibleGenotypes; i++ ) {
-                        samples.add(GenotypeBuilder.create("sample" + i, possibleGenotypes.get(i)));
-                    }
-                    add(vb.genotypes(samples));
-                }
-
-                // test mixed ploidy
-                for ( int i = 0; i < nPossibleGenotypes; i++ ) {
-                    for ( int ploidy = 1; ploidy < highestPloidy; ploidy++ ) {
-                        final List<Genotype> samples = new ArrayList<Genotype>(highestPloidy);
-                        final List<Allele> genotype = possibleGenotypes.get(i).subList(0, ploidy);
-                        samples.add(GenotypeBuilder.create("sample" + i, genotype));
-                        add(vb.genotypes(samples));
-                    }
-                }
-            }
-        }
-    }
-
-    private static List<List<Allele>> makeAllGenotypes(final List<Allele> alleles, final int highestPloidy) {
-        return GeneralUtils.makePermutations(alleles, highestPloidy, true);
-    }
-
-    public static void assertEquals(final VCFHeader actual, final VCFHeader expected) {
-        Assert.assertEquals(actual.getMetaDataInSortedOrder().size(), expected.getMetaDataInSortedOrder().size(), "No VCF header lines");
-
-        // for some reason set.equals() is returning false but all paired elements are .equals().  Perhaps compare to is busted?
-        //Assert.assertEquals(actual.getMetaDataInInputOrder(), expected.getMetaDataInInputOrder());
-        final List<VCFHeaderLine> actualLines = new ArrayList<VCFHeaderLine>(actual.getMetaDataInSortedOrder());
-        final List<VCFHeaderLine> expectedLines = new ArrayList<VCFHeaderLine>(expected.getMetaDataInSortedOrder());
-        for ( int i = 0; i < actualLines.size(); i++ ) {
-            Assert.assertEquals(actualLines.get(i), expectedLines.get(i), "VCF header lines");
-        }
-    }
-
-    public static void main( String argv[] ) {
-        final File variants1 = new File(argv[0]);
-        final File variants2 = new File(argv[1]);
-        try {
-            VariantContextTestProvider.assertVCFandBCFFilesAreTheSame(variants1, variants2);
-        } catch ( IOException e ) {
-            throw new RuntimeException(e);
-        }
-    }
-}
\ No newline at end of file
diff --git a/src/tests/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java b/src/tests/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java
deleted file mode 100644
index 3e20932..0000000
--- a/src/tests/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java
+++ /dev/null
@@ -1,1450 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext;
-
-
-// the imports for unit testing.
-
-import htsjdk.samtools.util.TestUtil;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.FeatureCodec;
-import htsjdk.variant.VariantBaseTest;
-import htsjdk.variant.bcf2.BCF2Codec;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.tribble.TribbleException;
-import htsjdk.variant.VariantBaseTest;
-import htsjdk.variant.vcf.VCFConstants;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.BeforeSuite;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import java.io.File;
-import java.util.*;
-
-
-public class VariantContextUnitTest extends VariantBaseTest {
-    Allele A, Aref, C, T, Tref;
-    Allele del, delRef, ATC, ATCref;
-
-    // A [ref] / T at 10
-    String snpLoc = "chr1";
-    int snpLocStart = 10;
-    int snpLocStop = 10;
-
-    // - / ATC [ref] from 20-22
-    String delLoc = "chr1";
-    int delLocStart = 20;
-    int delLocStop = 22;
-
-    // - [ref] / ATC from 20-20
-    String insLoc = "chr1";
-    int insLocStart = 20;
-    int insLocStop = 20;
-
-    VariantContextBuilder basicBuilder, snpBuilder, insBuilder;
-
-    @BeforeSuite
-    public void before() {
-        del = Allele.create("A");
-        delRef = Allele.create("A", true);
-
-        A = Allele.create("A");
-        C = Allele.create("C");
-        Aref = Allele.create("A", true);
-        T = Allele.create("T");
-        Tref = Allele.create("T", true);
-
-        ATC = Allele.create("ATC");
-        ATCref = Allele.create("ATC", true);
-    }
-
-    @BeforeMethod
-    public void beforeTest() {
-        basicBuilder = new VariantContextBuilder("test", snpLoc,snpLocStart, snpLocStop, Arrays.asList(Aref, T));
-        snpBuilder = new VariantContextBuilder("test", snpLoc,snpLocStart, snpLocStop, Arrays.asList(Aref, T));
-        insBuilder = new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(delRef, ATC));
-    }
-
-    @Test
-    public void testDetermineTypes() {
-        Allele ACref = Allele.create("AC", true);
-        Allele AC = Allele.create("AC");
-        Allele AT = Allele.create("AT");
-        Allele C = Allele.create("C");
-        Allele CAT = Allele.create("CAT");
-        Allele TAref = Allele.create("TA", true);
-        Allele TA = Allele.create("TA");
-        Allele TC = Allele.create("TC");
-        Allele symbolic = Allele.create("<FOO>");
-
-        // test REF
-        List<Allele> alleles = Arrays.asList(Tref);
-        VariantContext vc = snpBuilder.alleles(alleles).stop(snpLocStop).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.NO_VARIATION);
-
-        // test SNPs
-        alleles = Arrays.asList(Tref, A);
-        vc = snpBuilder.alleles(alleles).stop(snpLocStop).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.SNP);
-
-        alleles = Arrays.asList(Tref, A, C);
-        vc = snpBuilder.alleles(alleles).stop(snpLocStop).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.SNP);
-
-        // test MNPs
-        alleles = Arrays.asList(ACref, TA);
-        vc = snpBuilder.alleles(alleles).stop(snpLocStop+1).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.MNP);
-
-        alleles = Arrays.asList(ATCref, CAT, Allele.create("GGG"));
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop+2).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.MNP);
-
-        // test INDELs
-        alleles = Arrays.asList(Aref, ATC);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
-
-        alleles = Arrays.asList(ATCref, A);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop+2).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
-
-        alleles = Arrays.asList(Tref, TA, TC);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
-
-        alleles = Arrays.asList(ATCref, A, AC);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop+2).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
-
-        alleles = Arrays.asList(ATCref, A, Allele.create("ATCTC"));
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop+2).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
-
-        // test MIXED
-        alleles = Arrays.asList(TAref, T, TC);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop+1).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.MIXED);
-
-        alleles = Arrays.asList(TAref, T, AC);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop+1).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.MIXED);
-
-        alleles = Arrays.asList(ACref, ATC, AT);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop+1).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.MIXED);
-
-        alleles = Arrays.asList(Aref, T, symbolic);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.MIXED);
-
-        // test SYMBOLIC
-        alleles = Arrays.asList(Tref, symbolic);
-        vc = basicBuilder.alleles(alleles).stop(snpLocStop).make();
-        Assert.assertEquals(vc.getType(), VariantContext.Type.SYMBOLIC);
-    }
-
-    @Test
-    public void testMultipleSNPAlleleOrdering() {
-        final List<Allele> allelesNaturalOrder = Arrays.asList(Aref, C, T);
-        final List<Allele> allelesUnnaturalOrder = Arrays.asList(Aref, T, C);
-        VariantContext naturalVC = snpBuilder.alleles(allelesNaturalOrder).make();
-        VariantContext unnaturalVC = snpBuilder.alleles(allelesUnnaturalOrder).make();
-        Assert.assertEquals(new ArrayList<Allele>(naturalVC.getAlleles()), allelesNaturalOrder);
-        Assert.assertEquals(new ArrayList<Allele>(unnaturalVC.getAlleles()), allelesUnnaturalOrder);
-    }
-
-    @Test
-    public void testCreatingSNPVariantContext() {
-
-        List<Allele> alleles = Arrays.asList(Aref, T);
-        VariantContext vc = snpBuilder.alleles(alleles).make();
-
-        Assert.assertEquals(vc.getChr(), snpLoc);
-        Assert.assertEquals(vc.getStart(), snpLocStart);
-        Assert.assertEquals(vc.getEnd(), snpLocStop);
-        Assert.assertEquals(vc.getType(), VariantContext.Type.SNP);
-        Assert.assertTrue(vc.isSNP());
-        Assert.assertFalse(vc.isIndel());
-        Assert.assertFalse(vc.isSimpleInsertion());
-        Assert.assertFalse(vc.isSimpleDeletion());
-        Assert.assertFalse(vc.isSimpleIndel());
-        Assert.assertFalse(vc.isMixed());
-        Assert.assertTrue(vc.isBiallelic());
-        Assert.assertEquals(vc.getNAlleles(), 2);
-
-        Assert.assertEquals(vc.getReference(), Aref);
-        Assert.assertEquals(vc.getAlleles().size(), 2);
-        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
-        Assert.assertEquals(vc.getAlternateAllele(0), T);
-
-        Assert.assertFalse(vc.hasGenotypes());
-
-        Assert.assertEquals(vc.getSampleNames().size(), 0);
-    }
-
-    @Test
-    public void testCreatingRefVariantContext() {
-        List<Allele> alleles = Arrays.asList(Aref);
-        VariantContext vc = snpBuilder.alleles(alleles).make();
-
-        Assert.assertEquals(vc.getChr(), snpLoc);
-        Assert.assertEquals(vc.getStart(), snpLocStart);
-        Assert.assertEquals(vc.getEnd(), snpLocStop);
-        Assert.assertEquals(VariantContext.Type.NO_VARIATION, vc.getType());
-        Assert.assertFalse(vc.isSNP());
-        Assert.assertFalse(vc.isIndel());
-        Assert.assertFalse(vc.isSimpleInsertion());
-        Assert.assertFalse(vc.isSimpleDeletion());
-        Assert.assertFalse(vc.isSimpleIndel());
-        Assert.assertFalse(vc.isMixed());
-        Assert.assertFalse(vc.isBiallelic());
-        Assert.assertEquals(vc.getNAlleles(), 1);
-
-        Assert.assertEquals(vc.getReference(), Aref);
-        Assert.assertEquals(vc.getAlleles().size(), 1);
-        Assert.assertEquals(vc.getAlternateAlleles().size(), 0);
-        //Assert.assertEquals(vc.getAlternateAllele(0), T);
-
-        Assert.assertFalse(vc.hasGenotypes());
-        Assert.assertEquals(vc.getSampleNames().size(), 0);
-    }
-
-    @Test
-    public void testCreatingDeletionVariantContext() {
-        List<Allele> alleles = Arrays.asList(ATCref, del);
-        VariantContext vc = new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, alleles).make();
-
-        Assert.assertEquals(vc.getChr(), delLoc);
-        Assert.assertEquals(vc.getStart(), delLocStart);
-        Assert.assertEquals(vc.getEnd(), delLocStop);
-        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
-        Assert.assertFalse(vc.isSNP());
-        Assert.assertTrue(vc.isIndel());
-        Assert.assertFalse(vc.isSimpleInsertion());
-        Assert.assertTrue(vc.isSimpleDeletion());
-        Assert.assertTrue(vc.isSimpleIndel());
-        Assert.assertFalse(vc.isMixed());
-        Assert.assertTrue(vc.isBiallelic());
-        Assert.assertEquals(vc.getNAlleles(), 2);
-
-        Assert.assertEquals(vc.getReference(), ATCref);
-        Assert.assertEquals(vc.getAlleles().size(), 2);
-        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
-        Assert.assertEquals(vc.getAlternateAllele(0), del);
-
-        Assert.assertFalse(vc.hasGenotypes());
-
-        Assert.assertEquals(vc.getSampleNames().size(), 0);
-    }
-
-    @Test
-    public void testCreatingComplexSubstitutionVariantContext() {
-        List<Allele> alleles = Arrays.asList(Tref, ATC);
-        VariantContext vc = new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, alleles).make();
-
-        Assert.assertEquals(vc.getChr(), insLoc);
-        Assert.assertEquals(vc.getStart(), insLocStart);
-        Assert.assertEquals(vc.getEnd(), insLocStop);
-        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
-        Assert.assertFalse(vc.isSNP());
-        Assert.assertTrue(vc.isIndel());
-        Assert.assertFalse(vc.isSimpleInsertion());
-        Assert.assertFalse(vc.isSimpleDeletion());
-        Assert.assertFalse(vc.isSimpleIndel());
-        Assert.assertFalse(vc.isMixed());
-        Assert.assertTrue(vc.isBiallelic());
-        Assert.assertEquals(vc.getNAlleles(), 2);
-
-        Assert.assertEquals(vc.getReference(), Tref);
-        Assert.assertEquals(vc.getAlleles().size(), 2);
-        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
-        Assert.assertEquals(vc.getAlternateAllele(0), ATC);
-
-        Assert.assertFalse(vc.hasGenotypes());
-
-        Assert.assertEquals(vc.getSampleNames().size(), 0);
-    }
-
-    @Test
-    public void testMatchingAlleles() {
-        List<Allele> alleles = Arrays.asList(ATCref, del);
-        VariantContext vc = new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, alleles).make();
-        VariantContext vc2 = new VariantContextBuilder("test2", delLoc, delLocStart+12, delLocStop+12, alleles).make();
-
-        Assert.assertTrue(vc.hasSameAllelesAs(vc2));
-        Assert.assertTrue(vc.hasSameAlternateAllelesAs(vc2));
-    }
-
-    @Test
-    public void testCreatingInsertionVariantContext() {
-        List<Allele> alleles = Arrays.asList(delRef, ATC);
-        VariantContext vc = insBuilder.alleles(alleles).make();
-
-        Assert.assertEquals(vc.getChr(), insLoc);
-        Assert.assertEquals(vc.getStart(), insLocStart);
-        Assert.assertEquals(vc.getEnd(), insLocStop);
-        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
-        Assert.assertFalse(vc.isSNP());
-        Assert.assertTrue(vc.isIndel());
-        Assert.assertTrue(vc.isSimpleInsertion());
-        Assert.assertFalse(vc.isSimpleDeletion());
-        Assert.assertTrue(vc.isSimpleIndel());
-        Assert.assertFalse(vc.isMixed());
-        Assert.assertTrue(vc.isBiallelic());
-        Assert.assertEquals(vc.getNAlleles(), 2);
-
-        Assert.assertEquals(vc.getReference(), delRef);
-        Assert.assertEquals(vc.getAlleles().size(), 2);
-        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
-        Assert.assertEquals(vc.getAlternateAllele(0), ATC);
-        Assert.assertFalse(vc.hasGenotypes());
-
-        Assert.assertEquals(vc.getSampleNames().size(), 0);
-    }
-
-    @Test
-    public void testCreatingPartiallyCalledGenotype() {
-        List<Allele> alleles = Arrays.asList(Aref, C);
-        Genotype g = GenotypeBuilder.create("foo", Arrays.asList(C, Allele.NO_CALL));
-        VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g).make();
-
-        Assert.assertTrue(vc.isSNP());
-        Assert.assertEquals(vc.getNAlleles(), 2);
-        Assert.assertTrue(vc.hasGenotypes());
-        Assert.assertFalse(vc.isMonomorphicInSamples());
-        Assert.assertTrue(vc.isPolymorphicInSamples());
-        Assert.assertEquals(vc.getGenotype("foo"), g);
-        Assert.assertEquals(vc.getCalledChrCount(), 1); // we only have 1 called chromosomes, we exclude the NO_CALL one isn't called
-        Assert.assertEquals(vc.getCalledChrCount(Aref), 0);
-        Assert.assertEquals(vc.getCalledChrCount(C), 1);
-        Assert.assertFalse(vc.getGenotype("foo").isHet());
-        Assert.assertFalse(vc.getGenotype("foo").isHom());
-        Assert.assertFalse(vc.getGenotype("foo").isNoCall());
-        Assert.assertFalse(vc.getGenotype("foo").isHom());
-        Assert.assertTrue(vc.getGenotype("foo").isMixed());
-        Assert.assertEquals(vc.getGenotype("foo").getType(), GenotypeType.MIXED);
-    }
-
-    @Test (expectedExceptions = Exception.class)
-    public void testBadConstructorArgs1() {
-        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(delRef, ATCref)).make();
-    }
-
-    @Test (expectedExceptions = Exception.class)
-    public void testBadConstructorArgs2() {
-        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(delRef, del)).make();
-    }
-
-    @Test (expectedExceptions = Exception.class)
-    public void testBadConstructorArgs3() {
-        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(del)).make();
-    }
-
-    @Test (expectedExceptions = Throwable.class)
-    public void testBadConstructorArgs4() {
-        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Collections.<Allele>emptyList()).make();
-    }
-
-    @Test (expectedExceptions = Exception.class)
-    public void testBadConstructorArgsDuplicateAlleles1() {
-        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(Aref, T, T)).make();
-    }
-
-    @Test (expectedExceptions = Exception.class)
-    public void testBadConstructorArgsDuplicateAlleles2() {
-        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(Aref, A)).make();
-    }
-
-    @Test (expectedExceptions = Throwable.class)
-    public void testBadLoc1() {
-        List<Allele> alleles = Arrays.asList(Aref, T, del);
-        new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, alleles).make();
-    }
-
-    @Test (expectedExceptions = Throwable.class)
-    public void testBadID1() {
-        new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, Arrays.asList(Aref, T)).id(null).make();
-    }
-
-    @Test (expectedExceptions = Exception.class)
-    public void testBadID2() {
-        new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, Arrays.asList(Aref, T)).id("").make();
-    }
-
-    @Test (expectedExceptions = Throwable.class)
-    public void testBadPError() {
-        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Arrays.asList(delRef, ATCref)).log10PError(0.5).make();
-    }
-
-    @Test
-    public void testAccessingSimpleSNPGenotypes() {
-        List<Allele> alleles = Arrays.asList(Aref, T);
-
-        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
-        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
-
-        VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles)
-                .genotypes(g1, g2, g3).make();
-
-        Assert.assertTrue(vc.hasGenotypes());
-        Assert.assertFalse(vc.isMonomorphicInSamples());
-        Assert.assertTrue(vc.isPolymorphicInSamples());
-        Assert.assertEquals(vc.getSampleNames().size(), 3);
-
-        Assert.assertEquals(vc.getGenotypes().size(), 3);
-        Assert.assertEquals(vc.getGenotypes().get("AA"), g1);
-        Assert.assertEquals(vc.getGenotype("AA"), g1);
-        Assert.assertEquals(vc.getGenotypes().get("AT"), g2);
-        Assert.assertEquals(vc.getGenotype("AT"), g2);
-        Assert.assertEquals(vc.getGenotypes().get("TT"), g3);
-        Assert.assertEquals(vc.getGenotype("TT"), g3);
-
-        Assert.assertTrue(vc.hasGenotype("AA"));
-        Assert.assertTrue(vc.hasGenotype("AT"));
-        Assert.assertTrue(vc.hasGenotype("TT"));
-        Assert.assertFalse(vc.hasGenotype("foo"));
-        Assert.assertFalse(vc.hasGenotype("TTT"));
-        Assert.assertFalse(vc.hasGenotype("at"));
-        Assert.assertFalse(vc.hasGenotype("tt"));
-
-        Assert.assertEquals(vc.getCalledChrCount(), 6);
-        Assert.assertEquals(vc.getCalledChrCount(Aref), 3);
-        Assert.assertEquals(vc.getCalledChrCount(T), 3);
-    }
-
-    @Test
-    public void testAccessingCompleteGenotypes() {
-        List<Allele> alleles = Arrays.asList(Aref, T, ATC);
-
-        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
-        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
-        Genotype g4 = GenotypeBuilder.create("Td", Arrays.asList(T, ATC));
-        Genotype g5 = GenotypeBuilder.create("dd", Arrays.asList(ATC, ATC));
-        Genotype g6 = GenotypeBuilder.create("..", Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
-
-        VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles)
-                .genotypes(g1, g2, g3, g4, g5, g6).make();
-
-        Assert.assertTrue(vc.hasGenotypes());
-        Assert.assertFalse(vc.isMonomorphicInSamples());
-        Assert.assertTrue(vc.isPolymorphicInSamples());
-        Assert.assertEquals(vc.getGenotypes().size(), 6);
-
-        Assert.assertEquals(3, vc.getGenotypes(Arrays.asList("AA", "Td", "dd")).size());
-
-        Assert.assertEquals(10, vc.getCalledChrCount());
-        Assert.assertEquals(3, vc.getCalledChrCount(Aref));
-        Assert.assertEquals(4, vc.getCalledChrCount(T));
-        Assert.assertEquals(3, vc.getCalledChrCount(ATC));
-        Assert.assertEquals(2, vc.getCalledChrCount(Allele.NO_CALL));
-    }
-
-    @Test
-    public void testAccessingRefGenotypes() {
-        List<Allele> alleles1 = Arrays.asList(Aref, T);
-        List<Allele> alleles2 = Arrays.asList(Aref);
-        List<Allele> alleles3 = Arrays.asList(Aref, T);
-        for ( List<Allele> alleles : Arrays.asList(alleles1, alleles2, alleles3)) {
-            Genotype g1 = GenotypeBuilder.create("AA1", Arrays.asList(Aref, Aref));
-            Genotype g2 = GenotypeBuilder.create("AA2", Arrays.asList(Aref, Aref));
-            Genotype g3 = GenotypeBuilder.create("..", Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
-            VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles)
-                    .genotypes(g1, g2, g3).make();
-
-            Assert.assertTrue(vc.hasGenotypes());
-            Assert.assertTrue(vc.isMonomorphicInSamples());
-            Assert.assertFalse(vc.isPolymorphicInSamples());
-            Assert.assertEquals(vc.getGenotypes().size(), 3);
-
-            Assert.assertEquals(4, vc.getCalledChrCount());
-            Assert.assertEquals(4, vc.getCalledChrCount(Aref));
-            Assert.assertEquals(0, vc.getCalledChrCount(T));
-            Assert.assertEquals(2, vc.getCalledChrCount(Allele.NO_CALL));
-        }
-    }
-
-    @Test
-    public void testFilters() {
-        List<Allele> alleles = Arrays.asList(Aref, T);
-        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
-
-        VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1, g2).make();
-
-        Assert.assertTrue(vc.isNotFiltered());
-        Assert.assertFalse(vc.isFiltered());
-        Assert.assertEquals(0, vc.getFilters().size());
-        Assert.assertFalse(vc.filtersWereApplied());
-        Assert.assertNull(vc.getFiltersMaybeNull());
-
-        vc = new VariantContextBuilder(vc).filters("BAD_SNP_BAD!").make();
-
-        Assert.assertFalse(vc.isNotFiltered());
-        Assert.assertTrue(vc.isFiltered());
-        Assert.assertEquals(1, vc.getFilters().size());
-        Assert.assertTrue(vc.filtersWereApplied());
-        Assert.assertNotNull(vc.getFiltersMaybeNull());
-
-        Set<String> filters = new HashSet<String>(Arrays.asList("BAD_SNP_BAD!", "REALLY_BAD_SNP", "CHRIST_THIS_IS_TERRIBLE"));
-        vc = new VariantContextBuilder(vc).filters(filters).make();
-
-        Assert.assertFalse(vc.isNotFiltered());
-        Assert.assertTrue(vc.isFiltered());
-        Assert.assertEquals(3, vc.getFilters().size());
-        Assert.assertTrue(vc.filtersWereApplied());
-        Assert.assertNotNull(vc.getFiltersMaybeNull());
-    }
-
-    @Test
-    public void testGetGenotypeCounts() {
-        List<Allele> alleles = Arrays.asList(Aref, T);
-        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
-        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
-        Genotype g4 = GenotypeBuilder.create("A.", Arrays.asList(Aref, Allele.NO_CALL));
-        Genotype g5 = GenotypeBuilder.create("..", Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
-
-        // we need to create a new VariantContext each time
-        VariantContext vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
-        Assert.assertEquals(1, vc.getHetCount());
-        vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
-        Assert.assertEquals(1, vc.getHomRefCount());
-        vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
-        Assert.assertEquals(1, vc.getHomVarCount());
-        vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
-        Assert.assertEquals(1, vc.getMixedCount());
-        vc = new VariantContextBuilder("foo", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
-        Assert.assertEquals(1, vc.getNoCallCount());
-    }
-
-    @Test
-    public void testVCFfromGenotypes() {
-        List<Allele> alleles = Arrays.asList(Aref, C, T);
-        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
-        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
-        Genotype g4 = GenotypeBuilder.create("..", Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
-        Genotype g5 = GenotypeBuilder.create("AC", Arrays.asList(Aref, C));
-        VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
-
-        VariantContext vc12 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g2.getSampleName())), true);
-        VariantContext vc1 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName())), true);
-        VariantContext vc23 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g2.getSampleName(), g3.getSampleName())), true);
-        VariantContext vc4 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g4.getSampleName())), true);
-        VariantContext vc14 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g4.getSampleName())), true);
-        VariantContext vc125 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g2.getSampleName(), g5.getSampleName())), true);
-
-        Assert.assertTrue(vc12.isPolymorphicInSamples());
-        Assert.assertTrue(vc23.isPolymorphicInSamples());
-        Assert.assertTrue(vc1.isMonomorphicInSamples());
-        Assert.assertTrue(vc4.isMonomorphicInSamples());
-        Assert.assertTrue(vc14.isMonomorphicInSamples());
-        Assert.assertTrue(vc125.isPolymorphicInSamples());
-
-        Assert.assertTrue(vc12.isSNP());
-        Assert.assertTrue(vc12.isVariant());
-        Assert.assertTrue(vc12.isBiallelic());
-
-        Assert.assertFalse(vc1.isSNP());
-        Assert.assertFalse(vc1.isVariant());
-        Assert.assertFalse(vc1.isBiallelic());
-
-        Assert.assertTrue(vc23.isSNP());
-        Assert.assertTrue(vc23.isVariant());
-        Assert.assertTrue(vc23.isBiallelic());
-
-        Assert.assertFalse(vc4.isSNP());
-        Assert.assertFalse(vc4.isVariant());
-        Assert.assertFalse(vc4.isBiallelic());
-
-        Assert.assertFalse(vc14.isSNP());
-        Assert.assertFalse(vc14.isVariant());
-        Assert.assertFalse(vc14.isBiallelic());
-
-        Assert.assertTrue(vc125.isSNP());
-        Assert.assertTrue(vc125.isVariant());
-        Assert.assertFalse(vc125.isBiallelic());
-
-        Assert.assertEquals(3, vc12.getCalledChrCount(Aref));
-        Assert.assertEquals(1, vc23.getCalledChrCount(Aref));
-        Assert.assertEquals(2, vc1.getCalledChrCount(Aref));
-        Assert.assertEquals(0, vc4.getCalledChrCount(Aref));
-        Assert.assertEquals(2, vc14.getCalledChrCount(Aref));
-        Assert.assertEquals(4, vc125.getCalledChrCount(Aref));
-    }
-
-    public void testGetGenotypeMethods() {
-        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
-        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
-        GenotypesContext gc = GenotypesContext.create(g1, g2, g3);
-        VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).genotypes(gc).make();
-
-        Assert.assertEquals(vc.getGenotype("AA"), g1);
-        Assert.assertEquals(vc.getGenotype("AT"), g2);
-        Assert.assertEquals(vc.getGenotype("TT"), g3);
-        Assert.assertEquals(vc.getGenotype("CC"), null);
-
-        Assert.assertEquals(vc.getGenotypes(), gc);
-        Assert.assertEquals(vc.getGenotypes(Arrays.asList("AA", "AT")), Arrays.asList(g1, g2));
-        Assert.assertEquals(vc.getGenotypes(Arrays.asList("AA", "TT")), Arrays.asList(g1, g3));
-        Assert.assertEquals(vc.getGenotypes(Arrays.asList("AA", "AT", "TT")), Arrays.asList(g1, g2, g3));
-        Assert.assertEquals(vc.getGenotypes(Arrays.asList("AA", "AT", "CC")), Arrays.asList(g1, g2));
-
-        Assert.assertEquals(vc.getGenotype(0), g1);
-        Assert.assertEquals(vc.getGenotype(1), g2);
-        Assert.assertEquals(vc.getGenotype(2), g3);
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test allele merging
-    //
-    // --------------------------------------------------------------------------------
-
-    private class GetAllelesTest {
-        List<Allele> alleles;
-        String name;
-
-        private GetAllelesTest(String name, Allele... arg) {
-            this.name = name;
-            this.alleles = Arrays.asList(arg);
-        }
-
-        public String toString() {
-            return String.format("%s input=%s", name, alleles);
-        }
-    }
-
-    @DataProvider(name = "getAlleles")
-    public Object[][] mergeAllelesData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{new GetAllelesTest("A*",   Aref)});
-        tests.add(new Object[]{new GetAllelesTest("A*/C", Aref, C)});
-        tests.add(new Object[]{new GetAllelesTest("A*/C/T", Aref, C, T)});
-        tests.add(new Object[]{new GetAllelesTest("A*/T/C", Aref, T, C)});
-        tests.add(new Object[]{new GetAllelesTest("A*/C/T/ATC", Aref, C, T, ATC)});
-        tests.add(new Object[]{new GetAllelesTest("A*/T/C/ATC", Aref, T, C, ATC)});
-        tests.add(new Object[]{new GetAllelesTest("A*/ATC/T/C", Aref, ATC, T, C)});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "getAlleles")
-    public void testMergeAlleles(GetAllelesTest cfg) {
-        final List<Allele> altAlleles = cfg.alleles.subList(1, cfg.alleles.size());
-        final VariantContext vc = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, cfg.alleles).make();
-
-        Assert.assertEquals(vc.getAlleles(), cfg.alleles, "VC alleles not the same as input alleles");
-        Assert.assertEquals(vc.getNAlleles(), cfg.alleles.size(), "VC getNAlleles not the same as input alleles size");
-        Assert.assertEquals(vc.getAlternateAlleles(), altAlleles, "VC alt alleles not the same as input alt alleles");
-
-
-        for ( int i = 0; i < cfg.alleles.size(); i++ ) {
-            final Allele inputAllele = cfg.alleles.get(i);
-
-            Assert.assertTrue(vc.hasAllele(inputAllele));
-            if ( inputAllele.isReference() ) {
-                final Allele nonRefVersion = Allele.create(inputAllele.getBases(), false);
-                Assert.assertTrue(vc.hasAllele(nonRefVersion, true));
-                Assert.assertFalse(vc.hasAllele(nonRefVersion, false));
-            }
-
-            Assert.assertEquals(inputAllele, vc.getAllele(inputAllele.getBaseString()));
-            Assert.assertEquals(inputAllele, vc.getAllele(inputAllele.getBases()));
-
-            if ( i > 0 ) { // it's an alt allele
-                Assert.assertEquals(inputAllele, vc.getAlternateAllele(i-1));
-            }
-        }
-
-        final Allele missingAllele = Allele.create("AACCGGTT"); // does not exist
-        Assert.assertNull(vc.getAllele(missingAllele.getBases()));
-        Assert.assertFalse(vc.hasAllele(missingAllele));
-        Assert.assertFalse(vc.hasAllele(missingAllele, true));
-    }
-
-    private class SitesAndGenotypesVC {
-        VariantContext vc, copy;
-        String name;
-
-        private SitesAndGenotypesVC(String name, VariantContext original) {
-            this.name = name;
-            this.vc = original;
-            this.copy = new VariantContextBuilder(original).make();
-        }
-
-        public String toString() {
-            return String.format("%s input=%s", name, vc);
-        }
-    }
-
-    @DataProvider(name = "SitesAndGenotypesVC")
-    public Object[][] MakeSitesAndGenotypesVCs() {
-        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
-        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
-
-        VariantContext sites = new VariantContextBuilder("sites", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).make();
-        VariantContext genotypes = new VariantContextBuilder(sites).source("genotypes").genotypes(g1, g2, g3).make();
-
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{new SitesAndGenotypesVC("sites", sites)});
-        tests.add(new Object[]{new SitesAndGenotypesVC("genotypes", genotypes)});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test modifying routines
-    //
-    // --------------------------------------------------------------------------------
-    @Test(dataProvider = "SitesAndGenotypesVC")
-    public void runModifyVCTests(SitesAndGenotypesVC cfg) {
-        VariantContext modified = new VariantContextBuilder(cfg.vc).loc("chr2", 123, 123).make();
-        Assert.assertEquals(modified.getChr(), "chr2");
-        Assert.assertEquals(modified.getStart(), 123);
-        Assert.assertEquals(modified.getEnd(), 123);
-
-        modified = new VariantContextBuilder(cfg.vc).id("newID").make();
-        Assert.assertEquals(modified.getID(), "newID");
-
-        Set<String> newFilters = Collections.singleton("newFilter");
-        modified = new VariantContextBuilder(cfg.vc).filters(newFilters).make();
-        Assert.assertEquals(modified.getFilters(), newFilters);
-
-        // test the behavior when the builder's attribute object is null
-        modified = new VariantContextBuilder(modified).attributes(null).make();
-        Assert.assertTrue(modified.getAttributes().isEmpty());
-        modified = new VariantContextBuilder(modified).attributes(null).rmAttribute("AC").make();
-        Assert.assertTrue(modified.getAttributes().isEmpty());
-        modified = new VariantContextBuilder(modified).attributes(null).attribute("AC", 1).make();
-        Assert.assertEquals(modified.getAttribute("AC"), 1);
-
-        // test the behavior when the builder's attribute object is not initialized
-        modified = new VariantContextBuilder(modified.getSource(), modified.getChr(), modified.getStart(), modified.getEnd(), modified.getAlleles()).attribute("AC", 1).make();
-
-        // test normal attribute modification
-        modified = new VariantContextBuilder(cfg.vc).attribute("AC", 1).make();
-        Assert.assertEquals(modified.getAttribute("AC"), 1);
-        modified = new VariantContextBuilder(modified).attribute("AC", 2).make();
-        Assert.assertEquals(modified.getAttribute("AC"), 2);
-
-        Genotype g1 = GenotypeBuilder.create("AA2", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT2", Arrays.asList(Aref, T));
-        Genotype g3 = GenotypeBuilder.create("TT2", Arrays.asList(T, T));
-        GenotypesContext gc = GenotypesContext.create(g1,g2,g3);
-        modified = new VariantContextBuilder(cfg.vc).genotypes(gc).make();
-        Assert.assertEquals(modified.getGenotypes(), gc);
-        modified = new VariantContextBuilder(cfg.vc).noGenotypes().make();
-        Assert.assertTrue(modified.getGenotypes().isEmpty());
-
-        // test that original hasn't changed
-        Assert.assertEquals(cfg.vc.getChr(), cfg.copy.getChr());
-        Assert.assertEquals(cfg.vc.getStart(), cfg.copy.getStart());
-        Assert.assertEquals(cfg.vc.getEnd(), cfg.copy.getEnd());
-        Assert.assertEquals(cfg.vc.getAlleles(), cfg.copy.getAlleles());
-        Assert.assertEquals(cfg.vc.getAttributes(), cfg.copy.getAttributes());
-        Assert.assertEquals(cfg.vc.getID(), cfg.copy.getID());
-        Assert.assertEquals(cfg.vc.getGenotypes(), cfg.copy.getGenotypes());
-        Assert.assertEquals(cfg.vc.getLog10PError(), cfg.copy.getLog10PError());
-        Assert.assertEquals(cfg.vc.getFilters(), cfg.copy.getFilters());
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test subcontext
-    //
-    // --------------------------------------------------------------------------------
-    private class SubContextTest {
-        Set<String> samples;
-        boolean updateAlleles;
-
-        private SubContextTest(Collection<String> samples, boolean updateAlleles) {
-            this.samples = new HashSet<String>(samples);
-            this.updateAlleles = updateAlleles;
-        }
-
-        public String toString() {
-            return String.format("%s samples=%s updateAlleles=%b", "SubContextTest", samples, updateAlleles);
-        }
-    }
-
-    @DataProvider(name = "SubContextTest")
-    public Object[][] MakeSubContextTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        for ( boolean updateAlleles : Arrays.asList(true, false)) {
-            tests.add(new Object[]{new SubContextTest(Collections.<String>emptySet(), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Collections.singleton("MISSING"), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Collections.singleton("AA"), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Collections.singleton("AT"), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Collections.singleton("TT"), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT"), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT", "TT"), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT", "MISSING"), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT", "TT", "MISSING"), updateAlleles)});
-            tests.add(new Object[]{new SubContextTest(Arrays.asList("AA", "AT", "AC"), updateAlleles)});
-        }
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    @Test(dataProvider = "SubContextTest")
-    public void runSubContextTest(SubContextTest cfg) {
-        Genotype g1 = GenotypeBuilder.create("AA", Arrays.asList(Aref, Aref));
-        Genotype g2 = GenotypeBuilder.create("AT", Arrays.asList(Aref, T));
-        Genotype g3 = GenotypeBuilder.create("TT", Arrays.asList(T, T));
-        Genotype g4 = GenotypeBuilder.create("AC", Arrays.asList(Aref, C));
-
-        GenotypesContext gc = GenotypesContext.create(g1, g2, g3, g4);
-        VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, C, T)).genotypes(gc).make();
-        VariantContext sub = vc.subContextFromSamples(cfg.samples, cfg.updateAlleles);
-
-        // unchanged attributes should be the same
-        Assert.assertEquals(sub.getChr(), vc.getChr());
-        Assert.assertEquals(sub.getStart(), vc.getStart());
-        Assert.assertEquals(sub.getEnd(), vc.getEnd());
-        Assert.assertEquals(sub.getLog10PError(), vc.getLog10PError());
-        Assert.assertEquals(sub.getFilters(), vc.getFilters());
-        Assert.assertEquals(sub.getID(), vc.getID());
-        Assert.assertEquals(sub.getAttributes(), vc.getAttributes());
-
-        Set<Genotype> expectedGenotypes = new HashSet<Genotype>();
-        if ( cfg.samples.contains(g1.getSampleName()) ) expectedGenotypes.add(g1);
-        if ( cfg.samples.contains(g2.getSampleName()) ) expectedGenotypes.add(g2);
-        if ( cfg.samples.contains(g3.getSampleName()) ) expectedGenotypes.add(g3);
-        if ( cfg.samples.contains(g4.getSampleName()) ) expectedGenotypes.add(g4);
-        GenotypesContext expectedGC = GenotypesContext.copy(expectedGenotypes);
-
-        // these values depend on the results of sub
-        if ( cfg.updateAlleles ) {
-            // do the work to see what alleles should be here, and which not
-            List<Allele> expectedAlleles = new ArrayList<Allele>();
-            expectedAlleles.add(Aref);
-
-            Set<Allele> genotypeAlleles = new HashSet<Allele>();
-            for ( final Genotype g : expectedGC )
-                genotypeAlleles.addAll(g.getAlleles());
-            genotypeAlleles.remove(Aref);
-
-            // ensure original allele order
-            for (Allele allele: vc.getAlleles())
-                if (genotypeAlleles.contains(allele))
-                    expectedAlleles.add(allele);
-
-            Assert.assertEquals(sub.getAlleles(), expectedAlleles);
-        } else {
-            // not updating alleles -- should be the same
-            Assert.assertEquals(sub.getAlleles(), vc.getAlleles());
-        }
-
-        // same sample names => success
-        Assert.assertTrue(sub.getGenotypes().getSampleNames().equals(expectedGC.getSampleNames()));
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test sample name functions
-    //
-    // --------------------------------------------------------------------------------
-    private class SampleNamesTest {
-        List<String> sampleNames;
-        List<String> sampleNamesInOrder;
-
-        private SampleNamesTest(List<String> sampleNames, List<String> sampleNamesInOrder) {
-            this.sampleNamesInOrder = sampleNamesInOrder;
-            this.sampleNames = sampleNames;
-        }
-
-        public String toString() {
-            return String.format("%s samples=%s order=%s", "SampleNamesTest", sampleNames, sampleNamesInOrder);
-        }
-    }
-
-    @DataProvider(name = "SampleNamesTest")
-    public Object[][] MakeSampleNamesTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("1"), Arrays.asList("1"))});
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("2", "1"), Arrays.asList("1", "2"))});
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("1", "2"), Arrays.asList("1", "2"))});
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("1", "2", "3"), Arrays.asList("1", "2", "3"))});
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("2", "1", "3"), Arrays.asList("1", "2", "3"))});
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("2", "3", "1"), Arrays.asList("1", "2", "3"))});
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("3", "1", "2"), Arrays.asList("1", "2", "3"))});
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("3", "2", "1"), Arrays.asList("1", "2", "3"))});
-        tests.add(new Object[]{new SampleNamesTest(Arrays.asList("NA2", "NA1"), Arrays.asList("NA1", "NA2"))});
-
-        return tests.toArray(new Object[][]{});
-    }
-
-    private final static void assertGenotypesAreInOrder(Iterable<Genotype> gIt, List<String> names) {
-        int i = 0;
-        for ( final Genotype g : gIt ) {
-            Assert.assertEquals(g.getSampleName(), names.get(i), "Unexpected genotype ordering");
-            i++;
-        }
-    }
-
-
-    @Test(dataProvider = "SampleNamesTest")
-    public void runSampleNamesTest(SampleNamesTest cfg) {
-        GenotypesContext gc = GenotypesContext.create(cfg.sampleNames.size());
-        for ( final String name : cfg.sampleNames ) {
-            gc.add(GenotypeBuilder.create(name, Arrays.asList(Aref, T)));
-        }
-
-        VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).genotypes(gc).make();
-
-        // same sample names => success
-        Assert.assertTrue(vc.getSampleNames().equals(new HashSet<String>(cfg.sampleNames)), "vc.getSampleNames() = " + vc.getSampleNames());
-        Assert.assertEquals(vc.getSampleNamesOrderedByName(), cfg.sampleNamesInOrder, "vc.getSampleNamesOrderedByName() = " + vc.getSampleNamesOrderedByName());
-
-        assertGenotypesAreInOrder(vc.getGenotypesOrderedByName(), cfg.sampleNamesInOrder);
-        assertGenotypesAreInOrder(vc.getGenotypesOrderedBy(cfg.sampleNames), cfg.sampleNames);
-    }
-
-    @Test
-    public void testGenotypeCounting() {
-        Genotype noCall = GenotypeBuilder.create("nocall", Arrays.asList(Allele.NO_CALL));
-        Genotype mixed  = GenotypeBuilder.create("mixed", Arrays.asList(Aref, Allele.NO_CALL));
-        Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(Aref, Aref));
-        Genotype het    = GenotypeBuilder.create("het", Arrays.asList(Aref, T));
-        Genotype homVar = GenotypeBuilder.create("homVar", Arrays.asList(T, T));
-
-        List<Genotype> allGenotypes = Arrays.asList(noCall, mixed, homRef, het, homVar);
-        final int nCycles = allGenotypes.size() * 10;
-
-        for ( int i = 0; i < nCycles; i++ ) {
-            int nNoCall = 0, nNoCallAlleles = 0, nA = 0, nT = 0, nMixed = 0, nHomRef = 0, nHet = 0, nHomVar = 0;
-            int nSamples = 0;
-            GenotypesContext gc = GenotypesContext.create();
-            for ( int j = 0; j < i; j++ ) {
-                nSamples++;
-                Genotype g = allGenotypes.get(j % allGenotypes.size());
-                final String name = String.format("%s_%d%d", g.getSampleName(), i, j);
-                gc.add(GenotypeBuilder.create(name, g.getAlleles()));
-                switch ( g.getType() ) {
-                    case NO_CALL: nNoCall++; nNoCallAlleles++; break;
-                    case HOM_REF: nA += 2; nHomRef++; break;
-                    case HET: nA++; nT++; nHet++; break;
-                    case HOM_VAR: nT += 2; nHomVar++; break;
-                    case MIXED: nA++; nNoCallAlleles++; nMixed++; break;
-                    default: throw new RuntimeException("Unexpected genotype type " + g.getType());
-                }
-
-            }
-
-            VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).genotypes(gc).make();
-            Assert.assertEquals(vc.getNSamples(), nSamples);
-            if ( nSamples > 0 ) {
-                Assert.assertEquals(vc.isPolymorphicInSamples(), nT > 0);
-                Assert.assertEquals(vc.isMonomorphicInSamples(), nT == 0);
-            }
-            Assert.assertEquals(vc.getCalledChrCount(), nA + nT);
-
-            Assert.assertEquals(vc.getCalledChrCount(Allele.NO_CALL), nNoCallAlleles);
-            Assert.assertEquals(vc.getCalledChrCount(Aref), nA);
-            Assert.assertEquals(vc.getCalledChrCount(T), nT);
-
-            Assert.assertEquals(vc.getNoCallCount(), nNoCall);
-            Assert.assertEquals(vc.getHomRefCount(), nHomRef);
-            Assert.assertEquals(vc.getHetCount(), nHet);
-            Assert.assertEquals(vc.getHomVarCount(), nHomVar);
-            Assert.assertEquals(vc.getMixedCount(), nMixed);
-        }
-    }
-    @Test
-    public void testSetAttribute() {
-    	VariantContextBuilder builder = new VariantContextBuilder();
-    	builder.attribute("Test", "value");
-    }
-
-    // --------------------------------------------------------------------------------
-    //
-    // Test validation methods
-    //
-    // --------------------------------------------------------------------------------
-
-    // create a VariantContext object for various alleles and genotypes to test against
-    private VariantContext createTestVariantContext(final List<Allele> alleles, final Map<String, Object> attributes, final Genotype... genotypes) {
-        final EnumSet<VariantContext.Validation> toValidate = EnumSet.noneOf(VariantContext.Validation.class);
-        final Set<String> filters = null;
-        final boolean fullyDecoded = false;
-
-        // no genotypes needs to use GenotypesContext.NO_GENOTYPES,
-        // otherwise we build up a GenotypesContext from the passed genotypes
-        final GenotypesContext gc;
-        if (genotypes == null || genotypes.length == 0) {
-            gc = GenotypesContext.NO_GENOTYPES;
-        } else {
-            gc = new GenotypesContext();
-            for (final Genotype genotype : genotypes) {
-                gc.add(genotype);
-            }
-        }
-        // most of the fields are not important to the tests, we just need alleles and gc set properly
-        return new VariantContext("genotypes", VCFConstants.EMPTY_ID_FIELD, snpLoc, snpLocStart, snpLocStop, alleles,
-                gc, VariantContext.NO_LOG10_PERROR, filters, attributes,
-                fullyDecoded, toValidate);
-    }
-
-    // validateReferenceBases: PASS conditions
-    @DataProvider
-    public Object[][] testValidateReferencesBasesDataProvider() {
-        final VariantContext vc = createValidateReferencesContext(Arrays.asList(Aref, T));
-        return new Object[][]{
-                // null ref will pass validation
-                {vc, null, A},
-                // A vs A-ref will pass validation
-                {vc, Aref, A}
-        };
-    }
-    @Test(dataProvider = "testValidateReferencesBasesDataProvider")
-    public void testValidateReferenceBases(final VariantContext vc, final Allele allele1, final Allele allele2) {
-        // validateReferenceBases throws exceptions if it fails, so no Asserts here...
-        vc.validateReferenceBases(allele1, allele2);
-    }
-    // validateReferenceBases: FAIL conditions
-    @DataProvider
-    public Object[][] testValidateReferencesBasesFailureDataProvider() {
-        final VariantContext vc = createValidateReferencesContext(Arrays.asList(Aref, T));
-
-        final Allele symbolicAllele = Allele.create("<A>");
-
-        return new Object[][]{
-                // T vs A-ref will NOT pass validation
-                {vc, Aref, T},
-                // symbolic alleles will NOT pass validation
-                {vc, Aref, symbolicAllele}
-        };
-    }
-    @Test(dataProvider = "testValidateReferencesBasesFailureDataProvider", expectedExceptions = TribbleException.class)
-    public void testValidateReferenceBasesFailure(final VariantContext vc, final Allele allele1, final Allele allele2) {
-        // validateReferenceBases throws exceptions if it fails, so no Asserts here...
-        vc.validateReferenceBases(allele1, allele2);
-    }
-    private VariantContext createValidateReferencesContext(final List<Allele> alleles) {
-        return createTestVariantContext(alleles, null);
-    }
-
-
-    // validateRSIDs: PASS conditions
-    @DataProvider
-    public Object[][] testValidateRSIDsDataProvider() {
-        final VariantContext vcNoId = createTestVariantContextRsIds(VCFConstants.EMPTY_ID_FIELD);
-        final VariantContext vcNonRs = createTestVariantContextRsIds("abc456");
-        final VariantContext vc = createTestVariantContextRsIds("rs123");
-        final VariantContext vcMultipleRs = createTestVariantContextRsIds("rs123;rs456;rs789");
-
-        return new Object[][]{
-                // no ID will pass validation
-                {vcNoId, makeRsIDsSet("rs123")},
-                // non-rs ID will pass validation
-                {vcNonRs, makeRsIDsSet("rs123")},
-                // matching ID will pass validation
-                {vc, makeRsIDsSet("rs123")},
-                // null rsIDs to check will pass validation
-                {vc, null},
-                // context with multiple rsIDs that are contained within the rsID list will pass
-                {vcMultipleRs, makeRsIDsSet("rs123", "rs321", "rs456", "rs654", "rs789")}
-        };
-    }
-    @Test(dataProvider = "testValidateRSIDsDataProvider")
-    public void testValidateRSIDs(final VariantContext vc, final Set<String> rsIDs) {
-        // validateRSIDs throws exceptions if it fails, so no Asserts here...
-        vc.validateRSIDs(rsIDs);
-    }
-    // validateRSIDs: FAIL conditions
-    @DataProvider
-    public Object[][] testValidateRSIDsFailureDataProvider() {
-        final VariantContext vc = createTestVariantContextRsIds("rs123");
-        final VariantContext vcMultipleRs = createTestVariantContextRsIds("rs123;rs456;rs789");
-
-        return new Object[][]{
-                // mismatching ID will fail validation
-                {vc, makeRsIDsSet("rs123456")},
-                // null rsIDs to check will pass validation
-                {vcMultipleRs, makeRsIDsSet("rs456")}
-        };
-    }
-    @Test(dataProvider = "testValidateRSIDsFailureDataProvider", expectedExceptions = TribbleException.class)
-    public void testValidateRSIDsFailure(final VariantContext vc, final Set<String> rsIDs) {
-        // validateRSIDs throws exceptions if it fails, so no Asserts here...
-        vc.validateRSIDs(rsIDs);
-    }
-    // create a VariantContext appropriate for testing rsIDs
-    private VariantContext createTestVariantContextRsIds(final String rsId) {
-        final EnumSet<VariantContext.Validation> toValidate = EnumSet.noneOf(VariantContext.Validation.class);
-        final Set<String> filters = null;
-        final Map<String, Object> attributes = null;
-        final boolean fullyDecoded = false;
-
-        return new VariantContext("genotypes", rsId, snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T),
-                GenotypesContext.NO_GENOTYPES, VariantContext.NO_LOG10_PERROR, filters, attributes,
-                fullyDecoded, toValidate);
-    }
-    private Set<String> makeRsIDsSet(final String... rsIds) {
-        return new HashSet<String>(Arrays.asList(rsIds));
-    }
-
-
-    // validateAlternateAlleles: PASS conditions
-    @DataProvider
-    public Object[][] testValidateAlternateAllelesDataProvider() {
-        final Genotype homVarT = GenotypeBuilder.create("homVarT", Arrays.asList(T, T));
-
-        // no genotypes passes validateAlternateAlleles
-        final VariantContext vcNoGenotypes =
-                // A-ref/T with no GT
-                createValidateAlternateAllelesContext(Arrays.asList(Aref, T));
-
-        // genotypes that match ALTs will pass
-        final VariantContext vcHasGenotypes =
-                // A-ref/T vs T/T
-                createValidateAlternateAllelesContext(Arrays.asList(Aref, T), homVarT);
-
-        return new Object[][]{
-                {vcNoGenotypes},
-                {vcHasGenotypes}
-        };
-    }
-    @Test(dataProvider = "testValidateAlternateAllelesDataProvider")
-    public void testValidateAlternateAlleles(final VariantContext vc) {
-        // validateAlternateAlleles throws exceptions if it fails, so no Asserts here...
-        vc.validateAlternateAlleles();
-    }
-    // validateAlternateAlleles: FAIL conditions
-    @DataProvider
-    public Object[][] testValidateAlternateAllelesFailureDataProvider() {
-        final Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(Aref, Aref));
-        final Genotype homVarA = GenotypeBuilder.create("homVarA", Arrays.asList(A, A));
-
-        // alts not observed in the genotypes will fail validation
-        // this is the throw in VariantContext from: if ( reportedAlleles.size() != observedAlleles.size() )
-        final VariantContext vcHasAltNotObservedInGT =
-                // A-ref/T vs A-ref/A-ref
-                createValidateAlternateAllelesContext(Arrays.asList(Aref, T), homRef);
-
-        // alts not observed in the genotypes will fail validation
-        // but this time it is the second throw in VariantContext after: observedAlleles.retainAll(reportedAlleles);
-        final VariantContext vcHasAltNotObservedInGTIntersection =
-                // A-ref/T vs A/A
-                createValidateAlternateAllelesContext(Arrays.asList(Aref, T), homVarA);
-
-        return new Object[][]{
-                {vcHasAltNotObservedInGT},
-                {vcHasAltNotObservedInGTIntersection}
-        };
-    }
-    @Test(dataProvider = "testValidateAlternateAllelesFailureDataProvider", expectedExceptions = TribbleException.class)
-    public void testValidateAlternateAllelesFailure(final VariantContext vc) {
-        // validateAlternateAlleles throws exceptions if it fails, so no Asserts here...
-        vc.validateAlternateAlleles();
-    }
-    private VariantContext createValidateAlternateAllelesContext(final List<Allele> alleles, final Genotype... genotypes) {
-        return createTestVariantContext(alleles, null, genotypes);
-    }
-
-
-
-    // validateChromosomeCounts: PASS conditions
-    @DataProvider
-    public Object[][] testValidateChromosomeCountsDataProvider() {
-        final Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(Aref, Aref));
-        final Genotype homVarT = GenotypeBuilder.create("homVarT", Arrays.asList(T, T));
-        final Genotype hetVarTC = GenotypeBuilder.create("hetVarTC", Arrays.asList(T, C));
-        final Genotype homRefNoCall = GenotypeBuilder.create("homRefNoCall", Arrays.asList(Aref, Allele.NO_CALL));
-
-
-        // no genotypes passes validateChromosomeCounts
-        final VariantContext vcNoGenotypes =
-                // A-ref/T with no GT
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T), null);
-
-        /** AN : total number of alleles in called genotypes **/
-        // with AN set and hom-ref, we expect AN to be 2 for Aref/Aref
-        final Map<String, Object> attributesAN = new HashMap<String, Object>();
-        attributesAN.put(VCFConstants.ALLELE_NUMBER_KEY, "2");
-        final VariantContext vcANSet =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesAN, homRef);
-
-        // with AN set, one no-call (no-calls get ignored by getCalledChrCount() in VariantContext)
-        // we expect AN to be 1 for Aref/no-call
-        final Map<String, Object> attributesANNoCall = new HashMap<String, Object>();
-        attributesANNoCall.put(VCFConstants.ALLELE_NUMBER_KEY, "1");
-        final VariantContext vcANSetNoCall =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesANNoCall, homRefNoCall);
-
-
-        /** AC : allele count in genotypes, for each ALT allele, in the same order as listed **/
-        // with AC set, and T/T, we expect AC to be 2 (for 2 counts of ALT T)
-        final Map<String, Object> attributesAC = new HashMap<String, Object>();
-        attributesAC.put(VCFConstants.ALLELE_COUNT_KEY, "2");
-        final VariantContext vcACSet =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T), attributesAC, homVarT);
-
-        // with AC set and no ALT (GT is 0/0), we expect AC count to be 0
-        final Map<String, Object> attributesACNoAlts = new HashMap<String, Object>();
-        attributesACNoAlts.put(VCFConstants.ALLELE_COUNT_KEY, "0");
-        final VariantContext vcACSetNoAlts =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesACNoAlts, homRef);
-
-        // with AC set, and two different ALTs (T and C), with GT of 1/2, we expect a count of 1 for each.
-        // With two ALTs, a list is expected, so we set the attribute as a list of 1,1
-        final Map<String, Object> attributesACTwoAlts = new HashMap<String, Object>();
-        attributesACTwoAlts.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1", "1"));
-        final VariantContext vcACSetTwoAlts =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAlts, hetVarTC);
-
-        return new Object[][]{
-                {vcNoGenotypes},
-                {vcANSet},
-                {vcANSetNoCall},
-                {vcACSet},
-                {vcACSetNoAlts},
-                {vcACSetTwoAlts}
-        };
-    }
-    @Test(dataProvider = "testValidateChromosomeCountsDataProvider")
-    public void testValidateChromosomeCounts(final VariantContext vc) {
-        // validateChromosomeCounts throws exceptions if it fails, so no Asserts here...
-        vc.validateChromosomeCounts();
-    }
-    // validateChromosomeCounts: FAIL conditions
-    @DataProvider
-    public Object[][] testValidateChromosomeCountsFailureDataProvider() {
-        final Genotype homRef = GenotypeBuilder.create("homRef", Arrays.asList(Aref, Aref));
-        final Genotype hetVarTC = GenotypeBuilder.create("hetVarTC", Arrays.asList(T, C));
-        final Genotype homRefNoCall = GenotypeBuilder.create("homRefNoCall", Arrays.asList(Aref, Allele.NO_CALL));
-
-        /** AN : total number of alleles in called genotypes **/
-        // with AN set and hom-ref, we expect AN to be 2 for Aref/Aref, so 3 will fail
-        final Map<String, Object> attributesAN = new HashMap<String, Object>();
-        attributesAN.put(VCFConstants.ALLELE_NUMBER_KEY, "3");
-        final VariantContext vcANSet =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesAN, homRef);
-
-        // with AN set, one no-call (no-calls get ignored by getCalledChrCount() in VariantContext)
-        // we expect AN to be 1 for Aref/no-call, so 2 will fail
-        final Map<String, Object> attributesANNoCall = new HashMap<String, Object>();
-        attributesANNoCall.put(VCFConstants.ALLELE_NUMBER_KEY, "2");
-        final VariantContext vcANSetNoCall =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesANNoCall, homRefNoCall);
-
-        /** AC : allele count in genotypes, for each ALT allele, in the same order as listed **/
-        // with AC set but no ALTs, we expect a count of 0, so the wrong count will fail here
-        final Map<String, Object> attributesACWrongCount = new HashMap<String, Object>();
-        attributesACWrongCount.put(VCFConstants.ALLELE_COUNT_KEY, "2");
-        final VariantContext vcACWrongCount =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesACWrongCount, homRef);
-
-        // with AC set, two ALTs, but AC is not a list with count for each ALT
-        final Map<String, Object> attributesACTwoAlts = new HashMap<String, Object>();
-        attributesACTwoAlts.put(VCFConstants.ALLELE_COUNT_KEY, "1");
-        final VariantContext vcACSetTwoAlts =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAlts, hetVarTC);
-
-        // with AC set, two ALTs, and a list is correctly used, but wrong counts (we expect counts to be 1,1)
-        final Map<String, Object> attributesACTwoAltsWrongCount = new HashMap<String, Object>();
-        attributesACTwoAltsWrongCount.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1", "2"));
-        final VariantContext vcACSetTwoAltsWrongCount =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAltsWrongCount, hetVarTC);
-
-        // with AC set, two ALTs, but only count for one ALT (we expect two items in the list: 1,1)
-        final Map<String, Object> attributesACTwoAltsOneAltCount = new HashMap<String, Object>();
-        attributesACTwoAltsOneAltCount.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1"));
-        final VariantContext vcACSetTwoAltsOneAltCount =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAltsOneAltCount, hetVarTC);
-
-        return new Object[][]{
-                {vcANSet},
-                {vcANSetNoCall},
-                {vcACWrongCount},
-                {vcACSetTwoAlts},
-                {vcACSetTwoAltsWrongCount},
-                {vcACSetTwoAltsOneAltCount}
-        };
-    }
-    @Test(dataProvider = "testValidateChromosomeCountsFailureDataProvider", expectedExceptions = TribbleException.class)
-    public void testValidateChromosomeCountsFailure(final VariantContext vc) {
-        // validateChromosomeCounts throws exceptions if it fails, so no Asserts here...
-        vc.validateChromosomeCounts();
-    }
-    private VariantContext createValidateChromosomeCountsContext(final List<Allele> alleles, final Map<String, Object> attributes, final Genotype... genotypes) {
-        return createTestVariantContext(alleles, attributes, genotypes);
-    }
-
-
-    // the extraStrictValidation method calls the other validation methods
-    @DataProvider
-    public Object[][] testExtraStrictValidationDataProvider() {
-        // get the data providers for each of the passing tests of the individual methods
-        final Object[][] passingValidateReferenceBasesData = testValidateReferencesBasesDataProvider();
-        final Object[][] passingValidateRSIDsData = testValidateRSIDsDataProvider();
-        final Object[][] passingValidateAlternateAllelesData = testValidateAlternateAllelesDataProvider();
-        final Object[][] passingValidateChromosomeCountsData = testValidateChromosomeCountsDataProvider();
-
-        // the total number of tests we will run here is the sum of each of the test cases
-        final int numDataPoints =
-                passingValidateReferenceBasesData.length +
-                        passingValidateRSIDsData.length +
-                        passingValidateAlternateAllelesData.length +
-                        passingValidateChromosomeCountsData.length;
-
-        // create the data provider structure for this extra strict test
-        final Object[][] extraStrictData = new Object[numDataPoints][];
-
-        int testNum = 0;
-        for (final Object[] testRefBases : passingValidateReferenceBasesData) {
-            final VariantContext vc = (VariantContext) testRefBases[0];
-            final Allele refAllele = (Allele) testRefBases[1];
-            final Allele allele = (Allele) testRefBases[2];
-
-            // for this test, rsIds does not matter, so we hold it constant
-            extraStrictData[testNum++] = new Object[]{vc, refAllele, allele, null};
-        }
-
-        for (final Object[] testRsIDs : passingValidateRSIDsData) {
-            final VariantContext vc = (VariantContext) testRsIDs[0];
-            final Set<String> rsIDs = (Set<String>) testRsIDs[1];
-
-            // for this test, reportedReference and observedReference does not matter,
-            // so we hold it constant
-            extraStrictData[testNum++] = new Object[]{vc, Tref, T, rsIDs};
-        }
-
-        for (final Object[] testAlternateAlleles : passingValidateAlternateAllelesData) {
-            final VariantContext vc = (VariantContext) testAlternateAlleles[0];
-
-            // for this test, only VariantContext is used, so we hold
-            // reportedReference, observedReference and rsIds constant
-            extraStrictData[testNum++] = new Object[]{vc, Tref, T, null};
-        }
-
-        for (final Object[] testChromomeCounts : passingValidateChromosomeCountsData) {
-            final VariantContext vc = (VariantContext) testChromomeCounts[0];
-
-            // for this test, only VariantContext is used, so we hold
-            // reportedReference, observedReference and rsIds constant
-            extraStrictData[testNum++] = new Object[]{vc, Tref, T, null};
-        }
-
-        return extraStrictData;
-    }
-
-    @DataProvider(name = "serializationTestData")
-    public Object[][] getSerializationTestData() {
-        return new Object[][] {
-                { new File("testdata/htsjdk/variant/HiSeq.10000.vcf"), new VCFCodec() },
-                { new File("testdata/htsjdk/variant/serialization_test.bcf"), new BCF2Codec() }
-        };
-    }
-
-    @Test(dataProvider = "serializationTestData")
-    public void testSerialization( final File testFile, final FeatureCodec<VariantContext, ?> codec ) throws Exception {
-        final AbstractFeatureReader<VariantContext, ?> featureReader = AbstractFeatureReader.getFeatureReader(testFile.getAbsolutePath(), codec, false);
-        final VariantContext initialVC = featureReader.iterator().next();
-
-        final VariantContext vcDeserialized = TestUtil.serializeAndDeserialize(initialVC);
-
-        assertVariantContextsAreEqual(vcDeserialized, initialVC);
-    }
-
-    @Test(dataProvider = "testExtraStrictValidationDataProvider")
-    public void testExtraStrictValidation(final VariantContext vc, final Allele reportedReference, final Allele observedReference, final Set<String> rsIDs) {
-        // extraStrictValidation throws exceptions if it fails, so no Asserts here...
-        vc.extraStrictValidation(reportedReference, observedReference, rsIDs);
-    }
-    @DataProvider
-    public Object[][] testExtraStrictValidationFailureDataProvider() {
-        // get the data providers for each of the failure tests of the individual methods
-        final Object[][] failingValidateReferenceBasesData = testValidateReferencesBasesFailureDataProvider();
-        final Object[][] failingValidateRSIDsData = testValidateRSIDsFailureDataProvider();
-        final Object[][] failingValidateAlternateAllelesData = testValidateAlternateAllelesFailureDataProvider();
-        final Object[][] failingValidateChromosomeCountsData = testValidateChromosomeCountsFailureDataProvider();
-
-        // the total number of tests we will run here is the sum of each of the test cases
-        final int numDataPoints =
-                failingValidateReferenceBasesData.length +
-                        failingValidateRSIDsData.length +
-                        failingValidateAlternateAllelesData.length +
-                        failingValidateChromosomeCountsData.length;
-
-        // create the data provider structure for this extra strict test
-        final Object[][] extraStrictData = new Object[numDataPoints][];
-
-        int testNum = 0;
-        for (final Object[] testRefBases : failingValidateReferenceBasesData) {
-            final VariantContext vc = (VariantContext) testRefBases[0];
-            final Allele refAllele = (Allele) testRefBases[1];
-            final Allele allele = (Allele) testRefBases[2];
-
-            // for this test, rsIds does not matter, so we hold it constant
-            extraStrictData[testNum++] = new Object[]{vc, refAllele, allele, null};
-        }
-
-        for (final Object[] testRsIDs : failingValidateRSIDsData) {
-            final VariantContext vc = (VariantContext) testRsIDs[0];
-            final Set<String> rsIDs = (Set<String>) testRsIDs[1];
-
-            // for this test, reportedReference and observedReference does not matter,
-            // so we hold it constant
-            extraStrictData[testNum++] = new Object[]{vc, Tref, T, rsIDs};
-        }
-
-        for (final Object[] testAlternateAlleles : failingValidateAlternateAllelesData) {
-            final VariantContext vc = (VariantContext) testAlternateAlleles[0];
-
-            // for this test, only VariantContext is used, so we hold
-            // reportedReference, observedReference and rsIds constant
-            extraStrictData[testNum++] = new Object[]{vc, Tref, T, null};
-        }
-
-        for (final Object[] testChromomeCounts : failingValidateChromosomeCountsData) {
-            final VariantContext vc = (VariantContext) testChromomeCounts[0];
-
-            // for this test, only VariantContext is used, so we hold
-            // reportedReference, observedReference and rsIds constant
-            extraStrictData[testNum++] = new Object[]{vc, Tref, T, null};
-        }
-
-        return extraStrictData;
-    }
-    @Test(dataProvider = "testExtraStrictValidationFailureDataProvider", expectedExceptions = TribbleException.class)
-    public void testExtraStrictValidationFailure(final VariantContext vc, final Allele reportedReference, final Allele observedReference, final Set<String> rsIDs) {
-        // extraStrictValidation throws exceptions if it fails, so no Asserts here...
-        vc.extraStrictValidation(reportedReference, observedReference, rsIDs);
-    }
-}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java b/src/tests/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java
deleted file mode 100644
index ffb214b..0000000
--- a/src/tests/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2015 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-package htsjdk.variant.variantcontext.filter;
-
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.vcf.VCFFileReader;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-/**
- * Tests for testing the (VariantContext)FilteringVariantContextIterator, and the HeterozygosityFilter
- */
-
-public class FilteringVariantContextIteratorTest {
-    final File testDir = new File("testdata/htsjdk/variant");
-
-    @DataProvider
-    public Object [][] filteringIteratorData() {
-        return new Object[][] {
-                {new HeterozygosityFilter(true, "NA00001"), 2},
-                {new HeterozygosityFilter(false, "NA00001"), 3},
-                {new HeterozygosityFilter(true, null), 2},
-                {new HeterozygosityFilter(false, null), 3},
-                {new AllPassFilter(), 5},
-                {new HeterozygosityFilter(true, "NA00002"), 4},
-                {new HeterozygosityFilter(false, "NA00002"), 1},
-        };
-    }
-
-    @Test(dataProvider = "filteringIteratorData")
-    public void testFilteringIterator(final VariantContextFilter filter, final int expectedCount) {
-
-        final File vcf = new File(testDir,"ex2.vcf");
-        final VCFFileReader vcfReader = new VCFFileReader(vcf, false);
-        final FilteringVariantContextIterator filteringIterator = new FilteringVariantContextIterator(vcfReader.iterator(), filter);
-        int count = 0;
-
-        for(final VariantContext vc : filteringIterator) {
-            count++;
-        }
-
-        Assert.assertEquals(count, expectedCount);
-    }
-
-    @DataProvider
-    public Object [][] badSampleData() {
-        return new Object[][] {
-                {"ex2.vcf", "DOES_NOT_EXIST"},
-                {"breakpoint.vcf", null},
-        };
-    }
-
-    @Test(dataProvider = "badSampleData", expectedExceptions = IllegalArgumentException.class)
-    public void testMissingSample(final String file, final String sample) {
-
-        final File vcf = new File(testDir, file);
-        final VCFFileReader vcfReader = new VCFFileReader(vcf, false);
-        final HeterozygosityFilter heterozygosityFilter = new HeterozygosityFilter(true, sample);
-
-        new FilteringVariantContextIterator(vcfReader.iterator(), heterozygosityFilter).next();
-    }
-}
-
diff --git a/src/tests/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java b/src/tests/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java
deleted file mode 100644
index 5c3cf54..0000000
--- a/src/tests/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2015 Pierre Lindenbaum @yokofakun Institut du Thorax - Nantes - France
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.variant.variantcontext.filter;
-
-import htsjdk.variant.vcf.VCFFileReader;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * @author Pierre Lindenbaum PhD Institut du Thorax - INSERM - Nantes - France
- */
-
-public class JavascriptVariantFilterTest {
-    final File testDir = new File("testdata/htsjdk/variant");
-
-    @DataProvider
-    public Object[][] jsData() {
-        return new Object[][] {
-                { "ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf", "variantFilter01.js",61 },
-                { "ILLUMINA.wex.broad_phase2_baseline.20111114.both.exome.genotypes.1000.vcf", "variantFilter02.js",38 }, };
-    }
-
-    @Test(dataProvider = "jsData")
-    public void testJavascriptFilters(final String vcfFile, final String javascriptFile, final int expectCount) {
-        final File vcfInput = new File(testDir, vcfFile);
-        final File jsInput = new File(testDir, javascriptFile);
-        final VCFFileReader vcfReader = new VCFFileReader(vcfInput, false);
-        final JavascriptVariantFilter filter;
-        try {
-            filter = new JavascriptVariantFilter(jsInput, vcfReader.getFileHeader());
-        } catch (IOException err) {
-            Assert.fail("cannot read script "+jsInput, err);
-            vcfReader.close();
-            return;
-        }
-        final FilteringVariantContextIterator iter = new FilteringVariantContextIterator(vcfReader.iterator(), filter);
-        int count = 0;
-        while (iter.hasNext()) {
-            iter.next();
-            ++count;
-        }
-        iter.close();
-        vcfReader.close();
-        Assert.assertEquals(count, expectCount, "Expected number of variants " + expectCount + " but got " + count);
-    }
-}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java b/src/tests/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java
deleted file mode 100644
index 87818c3..0000000
--- a/src/tests/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2014 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.variant.variantcontext.writer;
-
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.CloseableTribbleIterator;
-import htsjdk.tribble.FeatureReader;
-import htsjdk.tribble.index.tabix.TabixIndex;
-import htsjdk.tribble.util.TabixUtils;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.vcf.VCF3Codec;
-import htsjdk.variant.vcf.VCFHeader;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.EnumSet;
-
-public class TabixOnTheFlyIndexCreationTest {
-    private static final File SMALL_VCF = new File("testdata/htsjdk/tribble/tabix/trioDup.vcf.gz");
-    @Test
-    public void simpleTest() throws Exception {
-        final VCF3Codec codec = new VCF3Codec();
-        final FeatureReader<VariantContext> reader = AbstractFeatureReader.getFeatureReader(SMALL_VCF.getAbsolutePath(), codec, false);
-        final VCFHeader headerFromFile = (VCFHeader)reader.getHeader();
-        final File vcf = File.createTempFile("TabixOnTheFlyIndexCreationTest.", ".vcf.gz");
-        final File tabix = new File(vcf.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION);
-        vcf.deleteOnExit();
-        tabix.deleteOnExit();
-        final VariantContextWriter vcfWriter = new VariantContextWriterBuilder()
-                .setOutputFile(vcf)
-                .setReferenceDictionary(headerFromFile.getSequenceDictionary())
-                .setOptions(EnumSet.of(Options.INDEX_ON_THE_FLY, Options.ALLOW_MISSING_FIELDS_IN_HEADER))
-                .build();
-        vcfWriter.writeHeader(headerFromFile);
-        final CloseableTribbleIterator<VariantContext> it = reader.iterator();
-        while (it.hasNext()) {
-            vcfWriter.add(it.next());
-        }
-        it.close();
-        vcfWriter.close();
-
-        // Hard to validate, so just confirm that index can be read.
-        new TabixIndex(tabix);
-    }
-}
diff --git a/src/tests/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java b/src/tests/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java
deleted file mode 100644
index 16be59e..0000000
--- a/src/tests/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext.writer;
-
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.TestUtil;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.FeatureReader;
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.util.TabixUtils;
-import htsjdk.variant.VariantBaseTest;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.Genotype;
-import htsjdk.variant.variantcontext.GenotypeBuilder;
-import htsjdk.variant.variantcontext.GenotypesContext;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.VariantContextBuilder;
-import htsjdk.variant.vcf.VCFCodec;
-import htsjdk.variant.vcf.VCFFileReader;
-import htsjdk.variant.vcf.VCFHeader;
-import htsjdk.variant.vcf.VCFHeaderLine;
-import htsjdk.variant.vcf.VCFHeaderVersion;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-/**
- * @author aaron
- *         <p/>
- *         Class VCFWriterUnitTest
- *         <p/>
- *         This class tests out the ability of the VCF writer to correctly write VCF files
- */
-public class VCFWriterUnitTest extends VariantBaseTest {
-    private Set<VCFHeaderLine> metaData;
-    private Set<String> additionalColumns;
-    private File tempDir;
-
-    @BeforeClass
-    private void createTemporaryDirectory() {
-        tempDir = TestUtil.getTempDirectory("VCFWriter", "StaleIndex");
-    }
-
-    @AfterClass
-    private void deleteTemporaryDirectory() {
-        for (File f : tempDir.listFiles()) {
-            f.delete();
-        }
-        tempDir.delete();
-    }
-
-    /** test, using the writer and reader, that we can output and input a VCF file without problems */
-    @Test(dataProvider = "vcfExtensionsDataProvider")
-    public void testBasicWriteAndRead(final String extension) throws IOException {
-        final File fakeVCFFile = File.createTempFile("testBasicWriteAndRead.", extension);
-        fakeVCFFile.deleteOnExit();
-        if (".vcf.gz".equals(extension)) {
-            new File(fakeVCFFile.getAbsolutePath() + ".tbi").deleteOnExit();
-        } else {
-            Tribble.indexFile(fakeVCFFile).deleteOnExit();
-        }
-        metaData = new HashSet<VCFHeaderLine>();
-        additionalColumns = new HashSet<String>();
-        final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
-        final VCFHeader header = createFakeHeader(metaData, additionalColumns, sequenceDict);
-        final VariantContextWriter writer = new VariantContextWriterBuilder()
-                .setOutputFile(fakeVCFFile)
-                .setReferenceDictionary(sequenceDict)
-                .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
-                .build();
-        writer.writeHeader(header);
-        writer.add(createVC(header));
-        writer.add(createVC(header));
-        writer.close();
-        final VCFCodec codec = new VCFCodec();
-        final FeatureReader<VariantContext> reader = AbstractFeatureReader.getFeatureReader(fakeVCFFile.getAbsolutePath(), codec, false);
-        final VCFHeader headerFromFile = (VCFHeader)reader.getHeader();
-
-        int counter = 0;
-
-        // validate what we're reading in
-        validateHeader(headerFromFile, sequenceDict);
-        
-        try {
-            final Iterator<VariantContext> it = reader.iterator();
-            while(it.hasNext()) {
-                it.next();
-                counter++;
-            }
-            Assert.assertEquals(counter, 2);
-        }
-        catch (final IOException e ) {
-            throw new RuntimeException(e.getMessage());
-        }
-
-    }
-
-    /**
-     * create a fake header of known quantity
-     * @param metaData           the header lines
-     * @param additionalColumns  the additional column names
-     * @return a fake VCF header
-     */
-    public static VCFHeader createFakeHeader(final Set<VCFHeaderLine> metaData, final Set<String> additionalColumns,
-                                             final SAMSequenceDictionary sequenceDict) {
-        metaData.add(new VCFHeaderLine(VCFHeaderVersion.VCF4_0.getFormatString(), VCFHeaderVersion.VCF4_0.getVersionString()));
-        metaData.add(new VCFHeaderLine("two", "2"));
-        additionalColumns.add("extra1");
-        additionalColumns.add("extra2");
-        final VCFHeader ret = new VCFHeader(metaData, additionalColumns);
-        ret.setSequenceDictionary(sequenceDict);
-        return ret;
-    }
-
-    /**
-     * create a fake VCF record
-     * @param header the VCF header
-     * @return a VCFRecord
-     */
-    private VariantContext createVC(final VCFHeader header) {
-
-       return createVCGeneral(header,"1",1);
-    }
-
-    private VariantContext createVCGeneral(final VCFHeader header, final String chrom, final int position) {
-        final List<Allele> alleles = new ArrayList<Allele>();
-        final Map<String, Object> attributes = new HashMap<String,Object>();
-        final GenotypesContext genotypes = GenotypesContext.create(header.getGenotypeSamples().size());
-
-        alleles.add(Allele.create("A",true));
-        alleles.add(Allele.create("ACC",false));
-
-        attributes.put("DP","50");
-        for (final String name : header.getGenotypeSamples()) {
-            final Genotype gt = new GenotypeBuilder(name,alleles.subList(1,2)).GQ(0).attribute("BB", "1").phased(true).make();
-            genotypes.add(gt);
-        }
-        return new VariantContextBuilder("RANDOM", chrom, position, position, alleles)
-                .genotypes(genotypes).attributes(attributes).make();
-    }
-
-
-    /**
-     * validate a VCF header
-     * @param header the header to validate
-     */
-    public void validateHeader(final VCFHeader header, final SAMSequenceDictionary sequenceDictionary) {
-        // check the fields
-        int index = 0;
-        for (final VCFHeader.HEADER_FIELDS field : header.getHeaderFields()) {
-            Assert.assertEquals(VCFHeader.HEADER_FIELDS.values()[index], field);
-            index++;
-        }
-        Assert.assertEquals(header.getMetaDataInSortedOrder().size(), metaData.size() + sequenceDictionary.size());
-        index = 0;
-        for (final String key : header.getGenotypeSamples()) {
-            Assert.assertTrue(additionalColumns.contains(key));
-            index++;
-        }
-        Assert.assertEquals(index, additionalColumns.size());
-    }
-
-    @Test(dataProvider = "vcfExtensionsDataProvider")
-    public void TestWritingLargeVCF(final String extension) throws FileNotFoundException, InterruptedException {
-
-        final Set<VCFHeaderLine> metaData = new HashSet<VCFHeaderLine>();
-        final Set<String> Columns = new HashSet<String>();
-        for (int i = 0; i < 123; i++) {
-
-            Columns.add(String.format("SAMPLE_%d", i));
-        }
-
-        final SAMSequenceDictionary dict = createArtificialSequenceDictionary();
-        final VCFHeader header = createFakeHeader(metaData,Columns, dict);
-
-        final File vcf = new File(tempDir, "test" + extension);
-        final String indexExtension;
-        if (extension.equals(".vcf.gz")) {
-            indexExtension = TabixUtils.STANDARD_INDEX_EXTENSION;
-        } else {
-            indexExtension = Tribble.STANDARD_INDEX_EXTENSION;
-        }
-        final File vcfIndex = new File(vcf.getAbsolutePath() + indexExtension);
-        vcfIndex.deleteOnExit();
-
-        for(int count=1;count<2; count++){
-            final VariantContextWriter writer =  new VariantContextWriterBuilder()
-                    .setOutputFile(vcf)
-                    .setReferenceDictionary(dict)
-                    .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
-                    .build();
-            writer.writeHeader(header);
-
-            for (int i = 1; i < 17 ; i++) { // write 17 chromosomes
-                for (int j = 1; j < 10; j++) { //10 records each
-                    writer.add(createVCGeneral(header, String.format("%d", i), j * 100));
-                }
-            }
-            writer.close();
-
-            Assert.assertTrue(vcf.lastModified() <= vcfIndex.lastModified());
-        }
-    }
-
-    @DataProvider(name = "vcfExtensionsDataProvider")
-    public Object[][]vcfExtensionsDataProvider() {
-        return new Object[][] {
-                // TODO: BCF doesn't work because header is not properly constructed.
-                // {".bcf"},
-                {".vcf"},
-                {".vcf.gz"}
-        };
-    }
-
-
-    /**
-     * A test to ensure that if we add a line to a VCFHeader it will persist through
-     * a round-trip write/read cycle via VariantContextWriter/VCFFileReader
-     */
-    @Test
-    public void testModifyHeader() {
-        final File originalVCF = new File("testdata/htsjdk/variant/HiSeq.10000.vcf");
-        final VCFFileReader reader = new VCFFileReader(originalVCF, false);
-        final VCFHeader header = reader.getFileHeader();
-        reader.close();
-
-        header.addMetaDataLine(new VCFHeaderLine("FOOBAR", "foovalue"));
-
-        final File outputVCF = createTempFile("testModifyHeader", ".vcf");
-        final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile(outputVCF).setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER)).build();
-        writer.writeHeader(header);
-        writer.close();
-
-        final VCFFileReader roundtripReader = new VCFFileReader(outputVCF, false);
-        final VCFHeader roundtripHeader = roundtripReader.getFileHeader();
-        roundtripReader.close();
-
-        Assert.assertNotNull(roundtripHeader.getOtherHeaderLine("FOOBAR"), "Could not find FOOBAR header line after a write/read cycle");
-        Assert.assertEquals(roundtripHeader.getOtherHeaderLine("FOOBAR").getValue(), "foovalue", "Wrong value for FOOBAR header line after a write/read cycle");
-    }
-}
-
diff --git a/src/tests/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java b/src/tests/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java
deleted file mode 100644
index 8e460c2..0000000
--- a/src/tests/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/*
-* Copyright (c) 2014 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext.writer;
-
-import htsjdk.samtools.Defaults;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.Tribble;
-import htsjdk.tribble.util.TabixUtils;
-import htsjdk.variant.VariantBaseTest;
-import htsjdk.variant.variantcontext.writer.Options;
-import org.testng.Assert;
-import org.testng.annotations.BeforeSuite;
-import org.testng.annotations.Test;
-
-import java.io.BufferedOutputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.List;
-
-public class VariantContextWriterBuilderUnitTest extends VariantBaseTest {
-	private static final String TEST_BASENAME = "htsjdk-test.VariantContextWriterBuilderUnitTest";
-    private SAMSequenceDictionary dictionary;
-
-    private File vcf;
-    private File vcfIdx;
-    private File vcfMD5;
-    private File bcf;
-    private File bcfIdx;
-    private File unknown;
-
-    private List<File> blockCompressedVCFs;
-    private List<File> blockCompressedIndices;
-
-    @BeforeSuite
-    public void before() throws IOException {
-        dictionary = createArtificialSequenceDictionary();
-        vcf = File.createTempFile(TEST_BASENAME, ".vcf");
-        vcf.deleteOnExit();
-        vcfIdx = Tribble.indexFile(vcf);
-        vcfIdx.deleteOnExit();
-        vcfMD5 = new File(vcf.getAbsolutePath() + ".md5");
-        vcfMD5.deleteOnExit();
-        bcf = File.createTempFile(TEST_BASENAME, ".bcf");
-        bcf.deleteOnExit();
-        bcfIdx = Tribble.indexFile(bcf);
-        bcfIdx.deleteOnExit();
-        unknown = File.createTempFile(TEST_BASENAME, ".unknown");
-        unknown.deleteOnExit();
-
-        blockCompressedVCFs = new ArrayList<File>();
-        blockCompressedIndices = new ArrayList<File>();
-        for (final String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS) {
-            final File blockCompressed = File.createTempFile(TEST_BASENAME, ".vcf" + extension);
-            blockCompressed.deleteOnExit();
-            blockCompressedVCFs.add(blockCompressed);
-
-            final File index = new File(blockCompressed.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION);
-            index.deleteOnExit();
-            blockCompressedIndices.add(index);
-        }
-    }
-
-    @Test
-    public void testSetOutputFile() throws IOException {
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary);
-
-        VariantContextWriter writer = builder.setOutputFile(vcf.getAbsolutePath()).build();
-        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile VCF String");
-        Assert.assertFalse(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFile VCF String was compressed");
-
-        writer = builder.setOutputFile(vcf).build();
-        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile VCF File");
-        Assert.assertFalse(((VCFWriter)writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFile VCF File was compressed");
-
-        for (final String extension : AbstractFeatureReader.BLOCK_COMPRESSED_EXTENSIONS) {
-            final File file = File.createTempFile(TEST_BASENAME + ".setoutput", extension);
-            file.deleteOnExit();
-            final String filename = file.getAbsolutePath();
-
-            writer = builder.setOutputFile(filename).build();
-            Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile " + extension + " String");
-            Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFile " + extension + " String was not compressed");
-
-            writer = builder.setOutputFile(file).build();
-            Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile " + extension + " File");
-            Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFile " + extension + " File was not compressed");
-        }
-
-        writer = builder.setOutputFile(bcf).build();
-        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFile BCF String");
-
-        writer = builder.setOutputFile(bcf.getAbsolutePath()).build();
-        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFile BCF File");
-    }
-
-    @Test
-    public void testSetOutputFileType() {
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputFile(unknown);
-
-        VariantContextWriter writer = builder.setOutputFileType(VariantContextWriterBuilder.OutputType.VCF).build();
-        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFileType VCF");
-        Assert.assertFalse(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFileType VCF was compressed");
-
-        writer = builder.setOption(Options.FORCE_BCF).build();
-        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFileType FORCE_BCF set -> expected BCF, was VCF");
-
-        // test that FORCE_BCF remains in effect, overriding the explicit setting of VCF
-        writer = builder.setOutputFileType(VariantContextWriterBuilder.OutputType.VCF).build();
-        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFileType FORCE_BCF set 2 -> expected BCF, was VCF");
-
-        writer = builder.unsetOption(Options.FORCE_BCF).build();
-        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFileType FORCE_BCF unset -> expected VCF, was BCF");
-        Assert.assertFalse(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFileType FORCE_BCF unset was compressed");
-
-        writer = builder.setOutputFileType(VariantContextWriterBuilder.OutputType.BLOCK_COMPRESSED_VCF).build();
-        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputFile BLOCK_COMPRESSED_VCF");
-        Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BlockCompressedOutputStream, "testSetOutputFileType BLOCK_COMPRESSED_VCF was not compressed");
-
-        writer = builder.setOutputFileType(VariantContextWriterBuilder.OutputType.BCF).build();
-        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputFileType BCF");
-    }
-
-    @Test
-    public void testSetOutputStream() {
-        final OutputStream stream = new ByteArrayOutputStream();
-
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .unsetOption(Options.INDEX_ON_THE_FLY)
-                .setOutputStream(stream);
-
-        VariantContextWriter writer = builder.build();
-        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputStream default");
-
-        writer = builder.setOption(Options.FORCE_BCF).build();
-        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputStream FORCE_BCF set -> expected BCF stream, was VCF stream");
-
-        // test that FORCE_BCF remains in effect, overriding the explicit setting of VCF
-        writer = builder.setOutputVCFStream(stream).build();
-        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputStream FORCE_BCF set 2 -> expected BCF stream, was VCF stream");
-
-        writer = builder.unsetOption(Options.FORCE_BCF).build();
-        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputStream FORCE_BCF unset -> expected VCF stream, was BCF stream");
-
-        writer = builder.setOutputBCFStream(stream).build();
-        Assert.assertTrue(writer instanceof BCF2Writer, "testSetOutputStream BCF");
-
-        writer = builder.setOutputVCFStream(stream).build();
-        Assert.assertTrue(writer instanceof VCFWriter, "testSetOutputStream VCF");
-    }
-
-    @Test
-    public void testAsync() {
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputFile(vcf);
-
-        VariantContextWriter writer = builder.build();
-        Assert.assertEquals(writer instanceof AsyncVariantContextWriter, Defaults.USE_ASYNC_IO_FOR_TRIBBLE, "testAsync default");
-
-        writer = builder.setOption(Options.USE_ASYNC_IO).build();
-        Assert.assertTrue(writer instanceof AsyncVariantContextWriter, "testAsync option=set");
-
-        writer = builder.unsetOption(Options.USE_ASYNC_IO).build();
-        Assert.assertFalse(writer instanceof AsyncVariantContextWriter, "testAsync option=unset");
-    }
-
-    @Test
-    public void testBuffering() {
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputFile(vcf)
-                .unsetOption(Options.INDEX_ON_THE_FLY);     // so the potential BufferedOutputStream is not wrapped in a PositionalOutputStream
-
-        VariantContextWriter writer = builder.build();
-        Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BufferedOutputStream, "testBuffering was not buffered by default");
-
-        writer = builder.unsetBuffering().build();
-        Assert.assertFalse(((VCFWriter) writer).getOutputStream() instanceof BufferedOutputStream, "testBuffering was buffered when unset");
-
-        writer = builder.setBuffer(8192).build();
-        Assert.assertTrue(((VCFWriter) writer).getOutputStream() instanceof BufferedOutputStream, "testBuffering was not buffered when set");
-    }
-
-    @Test
-    public void testMD5() {
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputFile(vcf);
-
-        VariantContextWriter writer = builder.build();
-        writer.close();
-        Assert.assertEquals(vcfMD5.exists(), Defaults.CREATE_MD5, "MD5 default setting not respected");
-
-        if (vcfMD5.exists())
-            vcfMD5.delete();
-
-        writer = builder.setCreateMD5().build();
-        writer.close();
-        Assert.assertTrue(vcfMD5.exists(), "MD5 not created when requested");
-        vcfMD5.delete();
-
-        writer = builder.unsetCreateMD5().build();
-        writer.close();
-        Assert.assertFalse(vcfMD5.exists(), "MD5 created when not requested");
-
-        writer = builder.setCreateMD5(false).build();
-        writer.close();
-        Assert.assertFalse(vcfMD5.exists(), "MD5 created when not requested via boolean parameter");
-
-        writer = builder.setCreateMD5(true).build();
-        writer.close();
-        Assert.assertTrue(vcfMD5.exists(), "MD5 not created when requested via boolean parameter");
-        vcfMD5.delete();
-
-        for (final File blockCompressed : blockCompressedVCFs) {
-            final File md5 = new File(blockCompressed + ".md5");
-            if (md5.exists())
-                md5.delete();
-            md5.deleteOnExit();
-            writer = builder.setOutputFile(blockCompressed).build();
-            writer.close();
-            Assert.assertTrue(md5.exists(), "MD5 digest not created for " + blockCompressed);
-        }
-    }
-
-    @Test
-    public void testIndexingOnTheFly() {
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOption(Options.INDEX_ON_THE_FLY);
-
-        if (vcfIdx.exists())
-            vcfIdx.delete();
-        VariantContextWriter writer = builder.setOutputFile(vcf).build();
-        writer.close();
-        Assert.assertTrue(vcfIdx.exists(), String.format("VCF index not created for %s / %s", vcf, vcfIdx));
-
-        if (bcfIdx.exists())
-            bcfIdx.delete();
-        writer = builder.setOutputFile(bcf).build();
-        writer.close();
-        Assert.assertTrue(bcfIdx.exists(), String.format("BCF index not created for %s / %s", bcf, bcfIdx));
-
-        for (int i = 0; i < blockCompressedVCFs.size(); i++) {
-            final File blockCompressed = blockCompressedVCFs.get(i);
-            final File index = blockCompressedIndices.get(i);
-            if (index.exists())
-                index.delete();
-            writer = builder.setOutputFile(blockCompressed).setReferenceDictionary(dictionary).build();
-            writer.close();
-            Assert.assertTrue(index.exists(), String.format("Block-compressed index not created for %s / %s", blockCompressed, index));
-
-            // Tabix does not require a reference dictionary.
-            // Tribble does: see tests testRefDictRequiredForVCFIndexOnTheFly / testRefDictRequiredForBCFIndexOnTheFly
-
-            index.delete();
-            writer = builder.setReferenceDictionary(null).build();
-            writer.close();
-            Assert.assertTrue(index.exists(), String.format("Block-compressed index not created for %s / %s", blockCompressed, index));
-        }
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testInvalidImplicitFileType() {
-        new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputFile("test.bam")
-                .build();
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testSetInvalidFileType() {
-        new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputFile("test.bam")
-                .setOutputFileType(VariantContextWriterBuilder.OutputType.VCF_STREAM)
-                .build();
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testInvalidSetFileTypeForStream() {
-        new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputStream(new ByteArrayOutputStream())
-                .setOutputFileType(VariantContextWriterBuilder.OutputType.VCF)
-                .build();
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testUnsupportedIndexOnTheFlyForStreaming() {
-        new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputStream(new ByteArrayOutputStream())
-                .setOption(Options.INDEX_ON_THE_FLY)
-                .build();
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testUnsupportedDefaultIndexOnTheFlyForStreaming() {
-        new VariantContextWriterBuilder()
-                .setReferenceDictionary(dictionary)
-                .setOutputStream(new ByteArrayOutputStream())
-                .build();
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testRefDictRequiredForVCFIndexOnTheFly() {
-        new VariantContextWriterBuilder()
-                .setOutputFile(vcf)
-                .setOption(Options.INDEX_ON_THE_FLY)
-                .build();
-    }
-
-    @Test(expectedExceptions = IllegalArgumentException.class)
-    public void testRefDictRequiredForBCFIndexOnTheFly() {
-        new VariantContextWriterBuilder()
-                .setOutputFile(bcf)
-                .setOption(Options.INDEX_ON_THE_FLY)
-                .build();
-    }
-
-    @Test
-    public void testClearOptions() {
-        // Verify that clearOptions doesn't have a side effect of carrying previously set options
-        // forward to subsequent builders
-        VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder();
-        vcwb.clearOptions().setOption(Options.INDEX_ON_THE_FLY);
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder().clearOptions();
-        Assert.assertTrue(builder.options.isEmpty());
-    }
-
-    @Test
-    public void testModifyOption() {
-        final VariantContextWriterBuilder builder = new VariantContextWriterBuilder().clearOptions();
-        for (final Options option : Options.values()) {
-            Assert.assertFalse(builder.isOptionSet(option)); // shouldn't be set
-            builder.modifyOption(option, false);
-            Assert.assertFalse(builder.isOptionSet(option)); // still shouldn't be set
-            builder.modifyOption(option, true);
-            Assert.assertTrue(builder.isOptionSet(option)); // now is set
-            builder.modifyOption(option, false);
-            Assert.assertFalse(builder.isOptionSet(option)); // has been unset
-        }
-    }
-}
\ No newline at end of file
diff --git a/src/tests/java/htsjdk/variant/vcf/AbstractVCFCodecTest.java b/src/tests/java/htsjdk/variant/vcf/AbstractVCFCodecTest.java
deleted file mode 100644
index 044be8a..0000000
--- a/src/tests/java/htsjdk/variant/vcf/AbstractVCFCodecTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package htsjdk.variant.vcf;
-
-import htsjdk.tribble.TribbleException;
-import htsjdk.variant.VariantBaseTest;
-import htsjdk.variant.variantcontext.Allele;
-import htsjdk.variant.variantcontext.VariantContext;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.List;
-
-
-
-public class AbstractVCFCodecTest extends VariantBaseTest {
-	@Test
-	public void shouldPreserveSymbolicAlleleCase() {
-		VCFFileReader reader = new VCFFileReader(new File(VariantBaseTest.variantTestDataRoot + "breakpoint.vcf"), false);
-		VariantContext variant = reader.iterator().next();
-		reader.close();
-		
-		// VCF v4.1 s1.4.5
-		// Tools processing VCF files are not required to preserve case in the allele String, except for IDs, which are case sensitive.
-		Assert.assertTrue(variant.getAlternateAllele(0).getDisplayString().contains("chr12"));
-	}
-
-	@Test
-	public void TestSpanDelParseAlleles(){
-		List<Allele> list = VCF3Codec.parseAlleles("A", Allele.SPAN_DEL_STRING, 0);
-	}
-
-	@Test(expectedExceptions = TribbleException.class)
-	public void TestSpanDelParseAllelesException(){
-		List<Allele> list1 = VCF3Codec.parseAlleles(Allele.SPAN_DEL_STRING, "A", 0);
-	}
-
-	@DataProvider(name="thingsToTryToDecode")
-	public Object[][] getThingsToTryToDecode(){
-		return new Object[][] {
-				{"testdata/htsjdk/tribble/tabix/testTabixIndex.vcf", true},
-				{"testdata/htsjdk/tribble/tabix/testTabixIndex.vcf.gz", true},
-				{"testdata/htsjdk/tribble/nonexistant.garbage", false},
-				{"testdata/htsjdk/tribble/testIntervalList.list", false}
-		};
-	}
-
-	@Test(dataProvider = "thingsToTryToDecode")
-	public void testCanDecodeFile(String potentialInput, boolean canDecode) {
-		Assert.assertEquals(AbstractVCFCodec.canDecodeFile(potentialInput, VCFCodec.VCF4_MAGIC_HEADER), canDecode);
-	}
-
-}
diff --git a/src/tests/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java b/src/tests/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java
deleted file mode 100644
index 7f2437e..0000000
--- a/src/tests/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java
+++ /dev/null
@@ -1,491 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.vcf;
-
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.TestUtil;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.FeatureReader;
-import htsjdk.tribble.TribbleException;
-import htsjdk.tribble.readers.AsciiLineReader;
-import htsjdk.tribble.readers.AsciiLineReaderIterator;
-import htsjdk.tribble.readers.LineIteratorImpl;
-import htsjdk.tribble.readers.LineReaderUtil;
-import htsjdk.variant.VariantBaseTest;
-import htsjdk.variant.variantcontext.VariantContext;
-import htsjdk.variant.variantcontext.writer.Options;
-import htsjdk.variant.variantcontext.writer.VariantContextWriter;
-import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.*;
-import java.math.BigInteger;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.*;
-
-/**
- * Created by IntelliJ IDEA.
- * User: aaron
- * Date: Jun 30, 2010
- * Time: 3:32:08 PM
- * To change this template use File | Settings | File Templates.
- */
-public class VCFHeaderUnitTest extends VariantBaseTest {
-
-    private File tempDir;
-
-    private VCFHeader createHeader(String headerStr) {
-        VCFCodec codec = new VCFCodec();
-        VCFHeader header = (VCFHeader) codec.readActualHeader(new LineIteratorImpl(LineReaderUtil.fromStringReader(
-                new StringReader(headerStr), LineReaderUtil.LineReaderOption.SYNCHRONOUS)));
-        Assert.assertEquals(header.getMetaDataInInputOrder().size(), VCF4headerStringCount);
-        return header;
-    }
-
-    @BeforeClass
-    private void createTemporaryDirectory() {
-        tempDir = TestUtil.getTempDirectory("VCFHeader", "VCFHeaderTest");
-    }
-
-    @AfterClass
-    private void deleteTemporaryDirectory() {
-        for (File f : tempDir.listFiles()) {
-            f.delete();
-        }
-        tempDir.delete();
-    }
-
-    @Test
-    public void testVCF4ToVCF4() {
-        VCFHeader header = createHeader(VCF4headerStrings);
-        checkMD5ofHeaderFile(header, "91c33dadb92e01ea349bd4bcdd02d6be");
-    }
-
-    @Test
-    public void testVCF4ToVCF4_alternate() {
-        VCFHeader header = createHeader(VCF4headerStrings_with_negativeOne);
-        checkMD5ofHeaderFile(header, "39318d9713897d55be5ee32a2119853f");
-    }
-
-    @Test
-    public void testVCFHeaderSampleRenamingSingleSampleVCF() throws Exception {
-        final VCFCodec codec = new VCFCodec();
-        codec.setRemappedSampleName("FOOSAMPLE");
-        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "HiSeq.10000.vcf")));
-        final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
-
-        Assert.assertEquals(header.getNGenotypeSamples(), 1, "Wrong number of samples in remapped header");
-        Assert.assertEquals(header.getGenotypeSamples().get(0), "FOOSAMPLE", "Sample name in remapped header has incorrect value");
-
-        int recordCount = 0;
-        while (vcfIterator.hasNext() && recordCount < 10) {
-            recordCount++;
-            final VariantContext vcfRecord = codec.decode(vcfIterator.next());
-
-            Assert.assertEquals(vcfRecord.getSampleNames().size(), 1, "Wrong number of samples in vcf record after remapping");
-            Assert.assertEquals(vcfRecord.getSampleNames().iterator().next(), "FOOSAMPLE", "Wrong sample in vcf record after remapping");
-        }
-    }
-
-    @Test
-    public void testVCFHeaderDictionaryMerging() {
-        VCFHeader headerOne = new VCFFileReader(new File(variantTestDataRoot + "dbsnp_135.b37.1000.vcf"), false).getFileHeader();
-        VCFHeader headerTwo = new VCFHeader(headerOne); // deep copy
-        final List<String> sampleList = new ArrayList<String>();
-        sampleList.addAll(headerOne.getSampleNamesInOrder());
-
-        // Check that the two dictionaries start out the same
-        headerOne.getSequenceDictionary().assertSameDictionary(headerTwo.getSequenceDictionary());
-
-        // Run the merge command
-        final VCFHeader mergedHeader = new VCFHeader(VCFUtils.smartMergeHeaders(Arrays.asList(headerOne, headerTwo), false), sampleList);
-
-        // Check that the mergedHeader's sequence dictionary matches the first two
-        mergedHeader.getSequenceDictionary().assertSameDictionary(headerOne.getSequenceDictionary());
-    }
-
-    @Test(expectedExceptions = TribbleException.class)
-    public void testVCFHeaderSampleRenamingMultiSampleVCF() throws Exception {
-        final VCFCodec codec = new VCFCodec();
-        codec.setRemappedSampleName("FOOSAMPLE");
-        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "ex2.vcf")));
-        final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
-    }
-
-    @Test(expectedExceptions = TribbleException.class)
-    public void testVCFHeaderSampleRenamingSitesOnlyVCF() throws Exception {
-        final VCFCodec codec = new VCFCodec();
-        codec.setRemappedSampleName("FOOSAMPLE");
-        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "dbsnp_135.b37.1000.vcf")));
-        final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
-    }
-
-    private VCFHeader getHiSeqVCFHeader() {
-        final File vcf = new File("testdata/htsjdk/variant/HiSeq.10000.vcf");
-        final VCFFileReader reader = new VCFFileReader(vcf, false);
-        final VCFHeader header = reader.getFileHeader();
-        reader.close();
-        return header;
-    }
-
-    @Test
-    public void testVCFHeaderAddInfoLine() {
-        final VCFHeader header = getHiSeqVCFHeader();
-        final VCFInfoHeaderLine infoLine = new VCFInfoHeaderLine("TestInfoLine", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "test info line");
-        header.addMetaDataLine(infoLine);
-
-        Assert.assertTrue(header.getInfoHeaderLines().contains(infoLine), "TestInfoLine not found in info header lines");
-        Assert.assertTrue(header.getMetaDataInInputOrder().contains(infoLine), "TestInfoLine not found in set of all header lines");
-        Assert.assertNotNull(header.getInfoHeaderLine("TestInfoLine"), "Lookup for TestInfoLine by key failed");
-
-        Assert.assertFalse(header.getFormatHeaderLines().contains(infoLine), "TestInfoLine present in format header lines");
-        Assert.assertFalse(header.getFilterLines().contains(infoLine), "TestInfoLine present in filter header lines");
-        Assert.assertFalse(header.getContigLines().contains(infoLine), "TestInfoLine present in contig header lines");
-        Assert.assertFalse(header.getOtherHeaderLines().contains(infoLine), "TestInfoLine present in other header lines");
-    }
-
-    @Test
-    public void testVCFHeaderAddFormatLine() {
-        final VCFHeader header = getHiSeqVCFHeader();
-        final VCFFormatHeaderLine formatLine = new VCFFormatHeaderLine("TestFormatLine", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "test format line");
-        header.addMetaDataLine(formatLine);
-
-        Assert.assertTrue(header.getFormatHeaderLines().contains(formatLine), "TestFormatLine not found in format header lines");
-        Assert.assertTrue(header.getMetaDataInInputOrder().contains(formatLine), "TestFormatLine not found in set of all header lines");
-        Assert.assertNotNull(header.getFormatHeaderLine("TestFormatLine"), "Lookup for TestFormatLine by key failed");
-
-        Assert.assertFalse(header.getInfoHeaderLines().contains(formatLine), "TestFormatLine present in info header lines");
-        Assert.assertFalse(header.getFilterLines().contains(formatLine), "TestFormatLine present in filter header lines");
-        Assert.assertFalse(header.getContigLines().contains(formatLine), "TestFormatLine present in contig header lines");
-        Assert.assertFalse(header.getOtherHeaderLines().contains(formatLine), "TestFormatLine present in other header lines");
-    }
-
-    @Test
-    public void testVCFHeaderAddFilterLine() {
-        final VCFHeader header = getHiSeqVCFHeader();
-        final VCFFilterHeaderLine filterLine = new VCFFilterHeaderLine("TestFilterLine");
-        header.addMetaDataLine(filterLine);
-
-        Assert.assertTrue(header.getFilterLines().contains(filterLine), "TestFilterLine not found in filter header lines");
-        Assert.assertTrue(header.getMetaDataInInputOrder().contains(filterLine), "TestFilterLine not found in set of all header lines");
-        Assert.assertNotNull(header.getFilterHeaderLine("TestFilterLine"), "Lookup for TestFilterLine by key failed");
-
-        Assert.assertFalse(header.getInfoHeaderLines().contains(filterLine), "TestFilterLine present in info header lines");
-        Assert.assertFalse(header.getFormatHeaderLines().contains(filterLine), "TestFilterLine present in format header lines");
-        Assert.assertFalse(header.getContigLines().contains(filterLine), "TestFilterLine present in contig header lines");
-        Assert.assertFalse(header.getOtherHeaderLines().contains(filterLine), "TestFilterLine present in other header lines");
-    }
-
-    @Test
-    public void testVCFHeaderAddContigLine() {
-        final VCFHeader header = getHiSeqVCFHeader();
-        final VCFContigHeaderLine contigLine = new VCFContigHeaderLine("<ID=chr1,length=1234567890,assembly=FAKE,md5=f126cdf8a6e0c7f379d618ff66beb2da,species=\"Homo sapiens\">", VCFHeaderVersion.VCF4_0, "chr1", 0);
-        header.addMetaDataLine(contigLine);
-
-        Assert.assertTrue(header.getContigLines().contains(contigLine), "Test contig line not found in contig header lines");
-        Assert.assertTrue(header.getMetaDataInInputOrder().contains(contigLine), "Test contig line not found in set of all header lines");
-
-        Assert.assertFalse(header.getInfoHeaderLines().contains(contigLine), "Test contig line present in info header lines");
-        Assert.assertFalse(header.getFormatHeaderLines().contains(contigLine), "Test contig line present in format header lines");
-        Assert.assertFalse(header.getFilterLines().contains(contigLine), "Test contig line present in filter header lines");
-        Assert.assertFalse(header.getOtherHeaderLines().contains(contigLine), "Test contig line present in other header lines");
-    }
-
-    @Test
-    public void testVCFHeaderAddOtherLine() {
-        final VCFHeader header = getHiSeqVCFHeader();
-        final VCFHeaderLine otherLine = new VCFHeaderLine("TestOtherLine", "val");
-        header.addMetaDataLine(otherLine);
-
-        Assert.assertTrue(header.getOtherHeaderLines().contains(otherLine), "TestOtherLine not found in other header lines");
-        Assert.assertTrue(header.getMetaDataInInputOrder().contains(otherLine), "TestOtherLine not found in set of all header lines");
-        Assert.assertNotNull(header.getOtherHeaderLine("TestOtherLine"), "Lookup for TestOtherLine by key failed");
-
-        Assert.assertFalse(header.getInfoHeaderLines().contains(otherLine), "TestOtherLine present in info header lines");
-        Assert.assertFalse(header.getFormatHeaderLines().contains(otherLine), "TestOtherLine present in format header lines");
-        Assert.assertFalse(header.getContigLines().contains(otherLine), "TestOtherLine present in contig header lines");
-        Assert.assertFalse(header.getFilterLines().contains(otherLine), "TestOtherLine present in filter header lines");
-    }
-
-    @Test
-    public void testVCFHeaderAddMetaDataLineDoesNotDuplicateContigs() {
-        File input = new File("testdata/htsjdk/variant/ex2.vcf");
-
-        VCFFileReader reader = new VCFFileReader(input, false);
-        VCFHeader header = reader.getFileHeader();
-
-        final int numContigLinesBefore = header.getContigLines().size();
-
-        VCFInfoHeaderLine newInfoField = new VCFInfoHeaderLine("test", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "test info field");
-        header.addMetaDataLine(newInfoField);
-
-        // getting the sequence dictionary was failing due to duplicating contigs in issue #214,
-        // we expect this to not throw an exception
-        header.getSequenceDictionary();
-
-        final int numContigLinesAfter = header.getContigLines().size();
-        // assert that we have the same number of contig lines before and after
-        Assert.assertEquals(numContigLinesBefore, numContigLinesAfter);
-    }
-
-    @Test
-    public void testVCFHeaderAddDuplicateContigLine() {
-        File input = new File("testdata/htsjdk/variant/ex2.vcf");
-
-        VCFFileReader reader = new VCFFileReader(input, false);
-        VCFHeader header = reader.getFileHeader();
-
-
-        final int numContigLinesBefore = header.getContigLines().size();
-        // try to readd the first contig line
-        header.addMetaDataLine(header.getContigLines().get(0));
-        final int numContigLinesAfter = header.getContigLines().size();
-
-        // assert that we have the same number of contig lines before and after
-        Assert.assertEquals(numContigLinesBefore, numContigLinesAfter);
-    }
-
-    @Test
-    public void testVCFHeaderAddDuplicateHeaderLine() {
-        File input = new File("testdata/htsjdk/variant/ex2.vcf");
-
-        VCFFileReader reader = new VCFFileReader(input, false);
-        VCFHeader header = reader.getFileHeader();
-
-        VCFHeaderLine newHeaderLine = new VCFHeaderLine("key", "value");
-        // add this new header line
-        header.addMetaDataLine(newHeaderLine);
-
-        final int numHeaderLinesBefore = header.getOtherHeaderLines().size();
-        // readd the same header line
-        header.addMetaDataLine(newHeaderLine);
-        final int numHeaderLinesAfter = header.getOtherHeaderLines().size();
-
-        // assert that we have the same number of other header lines before and after
-        Assert.assertEquals(numHeaderLinesBefore, numHeaderLinesAfter);
-    }
-
-    @Test
-    public void testVCFHeaderSerialization() throws Exception {
-        final VCFFileReader reader = new VCFFileReader(new File("testdata/htsjdk/variant/HiSeq.10000.vcf"), false);
-        final VCFHeader originalHeader = reader.getFileHeader();
-        reader.close();
-
-        final VCFHeader deserializedHeader = TestUtil.serializeAndDeserialize(originalHeader);
-
-        Assert.assertEquals(deserializedHeader.getMetaDataInInputOrder(), originalHeader.getMetaDataInInputOrder(), "Header metadata does not match before/after serialization");
-        Assert.assertEquals(deserializedHeader.getContigLines(), originalHeader.getContigLines(), "Contig header lines do not match before/after serialization");
-        Assert.assertEquals(deserializedHeader.getFilterLines(), originalHeader.getFilterLines(), "Filter header lines do not match before/after serialization");
-        Assert.assertEquals(deserializedHeader.getFormatHeaderLines(), originalHeader.getFormatHeaderLines(), "Format header lines do not match before/after serialization");
-        Assert.assertEquals(deserializedHeader.getIDHeaderLines(), originalHeader.getIDHeaderLines(), "ID header lines do not match before/after serialization");
-        Assert.assertEquals(deserializedHeader.getInfoHeaderLines(), originalHeader.getInfoHeaderLines(), "Info header lines do not match before/after serialization");
-        Assert.assertEquals(deserializedHeader.getOtherHeaderLines(), originalHeader.getOtherHeaderLines(), "Other header lines do not match before/after serialization");
-        Assert.assertEquals(deserializedHeader.getGenotypeSamples(), originalHeader.getGenotypeSamples(), "Genotype samples not the same before/after serialization");
-        Assert.assertEquals(deserializedHeader.samplesWereAlreadySorted(), originalHeader.samplesWereAlreadySorted(), "Sortedness of samples not the same before/after serialization");
-        Assert.assertEquals(deserializedHeader.getSampleNamesInOrder(), originalHeader.getSampleNamesInOrder(), "Sorted list of sample names in header not the same before/after serialization");
-        Assert.assertEquals(deserializedHeader.getSampleNameToOffset(), originalHeader.getSampleNameToOffset(), "Sample name to offset map not the same before/after serialization");
-        Assert.assertEquals(deserializedHeader.toString(), originalHeader.toString(), "String representation of header not the same before/after serialization");
-    }
-
-    @Test
-    public void testVCFHeaderQuoteEscaping() throws Exception {
-        // this test ensures that the end-to-end process of quote escaping is stable when headers are
-        // read and re-written; ie that quotes that are already escaped won't be re-escaped. It does
-        // this by reading a test file, adding a header line with an unescaped quote, writing out a copy
-        // of the file, reading it back in and writing a second copy, and finally reading back the second
-        // copy and comparing it to the first.
-
-        // read an existing VCF
-        final VCFFileReader originalFileReader = new VCFFileReader(new File("testdata/htsjdk/variant/HiSeq.10000.vcf"), false);
-        final VCFHeader originalHeader = originalFileReader.getFileHeader();
-
-        // add a header line with quotes to the header
-        final Map<String, String> attributes = new LinkedHashMap<>();
-        attributes.put("ID", "VariantFiltration");
-        attributes.put("CommandLineOptions", "filterName=[ANNOTATION] filterExpression=[ANNOTATION == \"NA\" || ANNOTATION <= 2.0]");
-        final VCFSimpleHeaderLine addedHeaderLine = new VCFSimpleHeaderLine("GATKCommandLine.Test", attributes);
-        originalHeader.addMetaDataLine(addedHeaderLine);
-
-        // write the file out into a new copy
-        final File firstCopyVCFFile = File.createTempFile("testEscapeHeaderQuotes1.", ".vcf");
-        firstCopyVCFFile.deleteOnExit();
-
-        final VariantContextWriter firstCopyWriter = new VariantContextWriterBuilder()
-                .setOutputFile(firstCopyVCFFile)
-                .setReferenceDictionary(createArtificialSequenceDictionary())
-                .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
-                .build();
-        firstCopyWriter.writeHeader(originalHeader);
-        final CloseableIterator<VariantContext> firstCopyVariantIterator = originalFileReader.iterator();
-        while (firstCopyVariantIterator.hasNext()) {
-            VariantContext variantContext = firstCopyVariantIterator.next();
-            firstCopyWriter.add(variantContext);
-        }
-        originalFileReader.close();
-        firstCopyWriter.close();
-
-        // read the copied file back in
-        final VCFFileReader firstCopyReader = new VCFFileReader(firstCopyVCFFile, false);
-        final VCFHeader firstCopyHeader = firstCopyReader.getFileHeader();
-        final VCFHeaderLine firstCopyNewHeaderLine = firstCopyHeader.getOtherHeaderLine("GATKCommandLine.Test");
-        Assert.assertNotNull(firstCopyNewHeaderLine);
-
-        // write one more copy to make sure things don't get double escaped
-        final File secondCopyVCFFile = File.createTempFile("testEscapeHeaderQuotes2.", ".vcf");
-        secondCopyVCFFile.deleteOnExit();
-        final VariantContextWriter secondCopyWriter = new VariantContextWriterBuilder()
-                .setOutputFile(secondCopyVCFFile)
-                .setReferenceDictionary(createArtificialSequenceDictionary())
-                .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
-                .build();
-        secondCopyWriter.writeHeader(firstCopyHeader);
-        final CloseableIterator<VariantContext> secondCopyVariantIterator = firstCopyReader.iterator();
-        while (secondCopyVariantIterator.hasNext()) {
-            VariantContext variantContext = secondCopyVariantIterator.next();
-            secondCopyWriter.add(variantContext);
-        }
-        secondCopyWriter.close();
-
-        // read the second copy back in and verify that the two files have the same header line
-        final VCFFileReader secondCopyReader = new VCFFileReader(secondCopyVCFFile, false);
-        final VCFHeader secondCopyHeader = secondCopyReader.getFileHeader();
-
-        final VCFHeaderLine secondCopyNewHeaderLine = secondCopyHeader.getOtherHeaderLine("GATKCommandLine.Test");
-        Assert.assertNotNull(secondCopyNewHeaderLine);
-        Assert.assertEquals(firstCopyNewHeaderLine, secondCopyNewHeaderLine);
-        Assert.assertEquals(firstCopyNewHeaderLine.toStringEncoding(), "GATKCommandLine.Test=<ID=VariantFiltration,CommandLineOptions=\"filterName=[ANNOTATION] filterExpression=[ANNOTATION == \\\"NA\\\" || ANNOTATION <= 2.0]\">");
-        Assert.assertEquals(secondCopyNewHeaderLine.toStringEncoding(), "GATKCommandLine.Test=<ID=VariantFiltration,CommandLineOptions=\"filterName=[ANNOTATION] filterExpression=[ANNOTATION == \\\"NA\\\" || ANNOTATION <= 2.0]\">");
-
-        firstCopyReader.close();
-        secondCopyReader.close();
-
-    }
-
-    /**
-     * a little utility function for all tests to md5sum a file
-     * Shameless taken from:
-     * <p/>
-     * http://www.javalobby.org/java/forums/t84420.html
-     *
-     * @param file the file
-     * @return a string
-     */
-    private static String md5SumFile(File file) {
-        MessageDigest digest;
-        try {
-            digest = MessageDigest.getInstance("MD5");
-        } catch (NoSuchAlgorithmException e) {
-            throw new RuntimeException("Unable to find MD5 digest");
-        }
-        InputStream is;
-        try {
-            is = new FileInputStream(file);
-        } catch (FileNotFoundException e) {
-            throw new RuntimeException("Unable to open file " + file);
-        }
-        byte[] buffer = new byte[8192];
-        int read;
-        try {
-            while ((read = is.read(buffer)) > 0) {
-                digest.update(buffer, 0, read);
-            }
-            byte[] md5sum = digest.digest();
-            BigInteger bigInt = new BigInteger(1, md5sum);
-            return bigInt.toString(16);
-
-        } catch (IOException e) {
-            throw new RuntimeException("Unable to process file for MD5", e);
-        } finally {
-            try {
-                is.close();
-            } catch (IOException e) {
-                throw new RuntimeException("Unable to close input stream for MD5 calculation", e);
-            }
-        }
-    }
-
-    private void checkMD5ofHeaderFile(VCFHeader header, String md5sum) {
-        File myTempFile = null;
-        PrintWriter pw = null;
-        try {
-            myTempFile = File.createTempFile("VCFHeader", "vcf");
-            myTempFile.deleteOnExit();
-            pw = new PrintWriter(myTempFile);
-        } catch (IOException e) {
-            Assert.fail("Unable to make a temp file!");
-        }
-        for (VCFHeaderLine line : header.getMetaDataInSortedOrder())
-            pw.println(line);
-        pw.close();
-        Assert.assertEquals(md5SumFile(myTempFile), md5sum);
-    }
-
-    public static int VCF4headerStringCount = 16;
-
-    public static String VCF4headerStrings =
-            "##fileformat=VCFv4.2\n" +
-                    "##filedate=2010-06-21\n" +
-                    "##reference=NCBI36\n" +
-                    "##INFO=<ID=GC, Number=0, Type=Flag, Description=\"Overlap with Gencode CCDS coding sequence\">\n" +
-                    "##INFO=<ID=DP, Number=1, Type=Integer, Description=\"Total number of reads in haplotype window\">\n" +
-                    "##INFO=<ID=AF, Number=A, Type=Float, Description=\"Dindel estimated population allele frequency\">\n" +
-                    "##INFO=<ID=CA, Number=1, Type=String, Description=\"Pilot 1 callability mask\">\n" +
-                    "##INFO=<ID=HP, Number=1, Type=Integer, Description=\"Reference homopolymer tract length\">\n" +
-                    "##INFO=<ID=NS, Number=1, Type=Integer, Description=\"Number of samples with data\">\n" +
-                    "##INFO=<ID=DB, Number=0, Type=Flag, Description=\"dbSNP membership build 129 - type match and indel sequence length match within 25 bp\">\n" +
-                    "##INFO=<ID=NR, Number=1, Type=Integer, Description=\"Number of reads covering non-ref variant on reverse strand\">\n" +
-                    "##INFO=<ID=NF, Number=1, Type=Integer, Description=\"Number of reads covering non-ref variant on forward strand\">\n" +
-                    "##FILTER=<ID=NoQCALL, Description=\"Variant called by Dindel but not confirmed by QCALL\">\n" +
-                    "##FORMAT=<ID=GT, Number=1, Type=String, Description=\"Genotype\">\n" +
-                    "##FORMAT=<ID=HQ, Number=2, Type=Integer, Description=\"Haplotype quality\">\n" +
-                    "##FORMAT=<ID=GQ, Number=1, Type=Integer, Description=\"Genotype quality\">\n" +
-                    "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n";
-
-
-    public static String VCF4headerStrings_with_negativeOne =
-            "##fileformat=VCFv4.2\n" +
-                    "##filedate=2010-06-21\n" +
-                    "##reference=NCBI36\n" +
-                    "##INFO=<ID=GC, Number=0, Type=Flag, Description=\"Overlap with Gencode CCDS coding sequence\">\n" +
-                    "##INFO=<ID=YY, Number=., Type=Integer, Description=\"Some weird value that has lots of parameters\">\n" +
-                    "##INFO=<ID=AF, Number=A, Type=Float, Description=\"Dindel estimated population allele frequency\">\n" +
-                    "##INFO=<ID=CA, Number=1, Type=String, Description=\"Pilot 1 callability mask\">\n" +
-                    "##INFO=<ID=HP, Number=1, Type=Integer, Description=\"Reference homopolymer tract length\">\n" +
-                    "##INFO=<ID=NS, Number=1, Type=Integer, Description=\"Number of samples with data\">\n" +
-                    "##INFO=<ID=DB, Number=0, Type=Flag, Description=\"dbSNP membership build 129 - type match and indel sequence length match within 25 bp\">\n" +
-                    "##INFO=<ID=NR, Number=1, Type=Integer, Description=\"Number of reads covering non-ref variant on reverse strand\">\n" +
-                    "##INFO=<ID=NF, Number=1, Type=Integer, Description=\"Number of reads covering non-ref variant on forward strand\">\n" +
-                    "##FILTER=<ID=NoQCALL, Description=\"Variant called by Dindel but not confirmed by QCALL\">\n" +
-                    "##FORMAT=<ID=GT, Number=1, Type=String, Description=\"Genotype\">\n" +
-                    "##FORMAT=<ID=HQ, Number=2, Type=Integer, Description=\"Haplotype quality\">\n" +
-                    "##FORMAT=<ID=TT, Number=., Type=Integer, Description=\"Lots of TTs\">\n" +
-                    "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n";
-
-}

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/htsjdk.git



More information about the debian-med-commit mailing list