[med-svn] [htsjdk] 01/05: New upstream version 2.12.0+dfsg

Andreas Tille tille at debian.org
Wed Oct 11 09:13:36 UTC 2017


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository htsjdk.

commit 2329a1c1acef70856a264b8c39ec3d5c364789c4
Author: Andreas Tille <tille at debian.org>
Date:   Wed Oct 11 10:14:08 2017 +0200

    New upstream version 2.12.0+dfsg
---
 .codecov.yml                                       |   29 +
 .travis.yml                                        |    5 +-
 README.md                                          |   34 +-
 build.gradle                                       |  103 +-
 gradle/wrapper/gradle-wrapper.properties           |    4 +-
 gradlew                                            |    5 +
 scripts/checkScalaAndJavaFiles.sh                  |   17 +
 scripts/release_picard.sh                          |  152 -
 settings.gradle                                    |    1 +
 .../java/htsjdk/samtools/AbstractBAMFileIndex.java |    3 +
 .../htsjdk/samtools/AbstractSAMHeaderRecord.java   |   12 +-
 .../java/htsjdk/samtools/AsyncSAMFileWriter.java   |    2 +
 src/main/java/htsjdk/samtools/BAMFileReader.java   |  265 +-
 src/main/java/htsjdk/samtools/BAMFileSpan.java     |   50 +-
 src/main/java/htsjdk/samtools/BAMFileWriter.java   |   35 +-
 src/main/java/htsjdk/samtools/BAMIndex.java        |    1 +
 src/main/java/htsjdk/samtools/BAMIndexWriter.java  |    1 +
 src/main/java/htsjdk/samtools/BAMIndexer.java      |   37 +-
 src/main/java/htsjdk/samtools/BAMRecord.java       |    8 +-
 src/main/java/htsjdk/samtools/BAMRecordCodec.java  |   12 +-
 src/main/java/htsjdk/samtools/Bin.java             |    1 +
 src/main/java/htsjdk/samtools/BinList.java         |    4 +
 .../java/htsjdk/samtools/BinaryBAMIndexWriter.java |   14 +-
 .../java/htsjdk/samtools/BinningIndexBuilder.java  |   17 +-
 .../java/htsjdk/samtools/BinningIndexContent.java  |    4 +
 .../htsjdk/samtools/CRAMContainerStreamWriter.java |    9 +-
 src/main/java/htsjdk/samtools/CRAMFileReader.java  |   12 +-
 src/main/java/htsjdk/samtools/CRAMFileWriter.java  |    4 +-
 src/main/java/htsjdk/samtools/CRAMIterator.java    |    4 +-
 .../java/htsjdk/samtools/CachingBAMFileIndex.java  |    4 +
 src/main/java/htsjdk/samtools/Chunk.java           |    2 +
 src/main/java/htsjdk/samtools/CigarElement.java    |    1 +
 .../samtools/ComparableSamRecordIterator.java      |    1 +
 .../samtools/CoordinateSortedPairInfoMap.java      |    5 +
 .../htsjdk/samtools/DefaultSAMRecordFactory.java   |    2 +
 src/main/java/htsjdk/samtools/Defaults.java        |   14 +-
 .../htsjdk/samtools/DiskBasedBAMFileIndex.java     |    2 +
 .../htsjdk/samtools/DuplicateScoringStrategy.java  |   40 +-
 .../java/htsjdk/samtools/DuplicateSetIterator.java |    6 +-
 src/main/java/htsjdk/samtools/FixBAMFile.java      |    4 +
 .../htsjdk/samtools/MergingSamRecordIterator.java  |    7 +
 src/main/java/htsjdk/samtools/QueryInterval.java   |   27 +-
 src/main/java/htsjdk/samtools/SAMFileHeader.java   |  123 +-
 src/main/java/htsjdk/samtools/SAMFileWriter.java   |    1 +
 .../java/htsjdk/samtools/SAMFileWriterFactory.java |  213 +-
 .../java/htsjdk/samtools/SAMFileWriterImpl.java    |   14 +-
 .../java/htsjdk/samtools/SAMProgramRecord.java     |    8 +
 .../java/htsjdk/samtools/SAMReadGroupRecord.java   |    7 +
 src/main/java/htsjdk/samtools/SAMRecord.java       |  199 +-
 .../samtools/SAMRecordCoordinateComparator.java    |    2 +
 .../samtools/SAMRecordDuplicateComparator.java     |    2 +
 .../samtools/SAMRecordQueryNameComparator.java     |    2 +
 .../java/htsjdk/samtools/SAMRecordSetBuilder.java  |   22 +-
 src/main/java/htsjdk/samtools/SAMRecordUtil.java   |   27 +-
 .../htsjdk/samtools/SAMSequenceDictionary.java     |   50 +-
 .../java/htsjdk/samtools/SAMSequenceRecord.java    |   14 +-
 src/main/java/htsjdk/samtools/SAMTestUtil.java     |   50 +-
 .../java/htsjdk/samtools/SAMTextHeaderCodec.java   |   48 +-
 src/main/java/htsjdk/samtools/SAMTextReader.java   |   20 +
 src/main/java/htsjdk/samtools/SAMTextWriter.java   |    4 +
 src/main/java/htsjdk/samtools/SAMTools.java        |    3 +
 src/main/java/htsjdk/samtools/SAMUtils.java        |  323 +-
 .../java/htsjdk/samtools/SAMValidationError.java   |   20 +-
 .../java/htsjdk/samtools/SamFileHeaderMerger.java  |    3 +
 .../java/htsjdk/samtools/SamFileValidator.java     |  100 +-
 .../java/htsjdk/samtools/SamInputResource.java     |   45 +-
 src/main/java/htsjdk/samtools/SamPairUtil.java     |    3 +
 src/main/java/htsjdk/samtools/SamReader.java       |   12 +-
 .../java/htsjdk/samtools/SamReaderFactory.java     |   75 +-
 .../StreamInflatingIndexingOutputStream.java       |   15 +-
 src/main/java/htsjdk/samtools/TextTagCodec.java    |    3 +
 .../htsjdk/samtools/TextualBAMIndexWriter.java     |    3 +
 .../htsjdk/samtools/cram/build/CramNormalizer.java |   11 +-
 .../samtools/cram/build/Sam2CramRecordFactory.java |  136 +-
 .../cram/encoding/ByteArrayLenEncoding.java        |    2 +
 .../cram/encoding/ByteArrayStopEncoding.java       |    2 +
 .../cram/encoding/ExternalByteArrayEncoding.java   |    2 +
 .../cram/encoding/ExternalByteEncoding.java        |    2 +
 .../cram/encoding/ExternalIntegerEncoding.java     |    2 +
 .../cram/encoding/ExternalLongEncoding.java        |    2 +
 .../cram/encoding/GolombRiceIntegerCodec.java      |    1 +
 .../cram/encoding/huffman/HuffmanTree.java         |    1 +
 .../encoding/readfeatures/BaseQualityScore.java    |    1 +
 .../cram/encoding/readfeatures/HardClip.java       |    2 +
 .../cram/encoding/readfeatures/InsertBase.java     |    2 +
 .../cram/encoding/readfeatures/Insertion.java      |    2 +
 .../cram/encoding/readfeatures/Padding.java        |    2 +
 .../cram/encoding/readfeatures/ReadBase.java       |    1 +
 .../cram/encoding/readfeatures/RefSkip.java        |    2 +
 .../cram/encoding/readfeatures/SoftClip.java       |    2 +
 .../cram/encoding/readfeatures/Substitution.java   |   17 +-
 .../samtools/cram/io/CountingInputStream.java      |    8 +
 .../samtools/cram/io/DefaultBitInputStream.java    |    4 +
 .../samtools/cram/io/DefaultBitOutputStream.java   |    4 +
 .../htsjdk/samtools/cram/ref/ReferenceSource.java  |   26 +-
 .../htsjdk/samtools/fastq/BasicFastqWriter.java    |   10 +-
 .../java/htsjdk/samtools/fastq/FastqConstants.java |    4 +-
 .../java/htsjdk/samtools/fastq/FastqEncoder.java   |  113 +
 .../java/htsjdk/samtools/fastq/FastqReader.java    |   56 +-
 .../java/htsjdk/samtools/fastq/FastqRecord.java    |  235 +-
 .../java/htsjdk/samtools/fastq/FastqWriter.java    |    1 +
 .../htsjdk/samtools/filter/AggregateFilter.java    |    2 +
 .../java/htsjdk/samtools/filter/AlignedFilter.java |    2 +
 .../samtools/filter/DuplicateReadFilter.java       |    2 +
 .../filter/FailsVendorReadQualityFilter.java       |    2 +
 .../htsjdk/samtools/filter/FilteringIterator.java  |    2 +-
 .../samtools/filter/FilteringSamIterator.java      |    4 +
 .../htsjdk/samtools/filter/IntervalFilter.java     |    2 +
 .../samtools/filter/IntervalKeepPairFilter.java    |    2 +
 .../samtools/filter/NotPrimaryAlignmentFilter.java |    2 +
 .../htsjdk/samtools/filter/ReadNameFilter.java     |    4 +-
 .../samtools/filter/SecondaryAlignmentFilter.java  |    2 +
 .../filter/SecondaryOrSupplementaryFilter.java     |    2 +
 .../htsjdk/samtools/filter/SolexaNoiseFilter.java  |    2 +
 .../java/htsjdk/samtools/filter/TagFilter.java     |    2 +
 .../samtools/filter/WholeReadClippedFilter.java    |    1 +
 .../java/htsjdk/samtools/metrics/StringHeader.java |    1 +
 .../htsjdk/samtools/metrics/VersionHeader.java     |    1 +
 .../reference/AbstractFastaSequenceFile.java       |   31 +-
 .../samtools/reference/FastaSequenceFile.java      |    3 +
 .../samtools/reference/FastaSequenceIndex.java     |   27 +-
 .../reference/FastaSequenceIndexCreator.java       |  180 ++
 .../reference/IndexedFastaSequenceFile.java        |   14 +-
 .../samtools/reference/ReferenceSequenceFile.java  |    1 +
 .../reference/ReferenceSequenceFileFactory.java    |   71 +-
 .../reference/ReferenceSequenceFileWalker.java     |    5 +-
 .../seekablestream/ByteArraySeekableStream.java    |   23 +-
 .../seekablestream/ISeekableStreamFactory.java     |   22 +
 .../seekablestream/SeekableBufferedStream.java     |    6 +
 .../samtools/seekablestream/SeekableFTPStream.java |    4 +
 .../seekablestream/SeekableFileStream.java         |    7 +
 .../seekablestream/SeekableHTTPStream.java         |    7 +
 .../seekablestream/SeekablePathStream.java         |   11 +-
 .../samtools/seekablestream/SeekableStream.java    |    2 +
 .../seekablestream/SeekableStreamFactory.java      |   23 +
 .../java/htsjdk/samtools/sra/SRALazyRecord.java    |    1 +
 .../htsjdk/samtools/util/AbstractAsyncWriter.java  |    5 +-
 .../htsjdk/samtools/util/AbstractLocusInfo.java    |    2 +
 .../samtools/util/AbstractLocusIterator.java       |    5 +
 .../samtools/util/AbstractRecordAndOffset.java     |   17 -
 .../java/htsjdk/samtools/util/AsciiWriter.java     |    3 +
 .../util/AsyncBlockCompressedInputStream.java      |  234 ++
 .../java/htsjdk/samtools/util/BinaryCodec.java     |   33 +-
 .../samtools/util/BlockCompressedInputStream.java  |  474 ++-
 .../samtools/util/BlockCompressedOutputStream.java |    4 +-
 .../java/htsjdk/samtools/util/BlockGunzipper.java  |   35 +-
 .../htsjdk/samtools/util/BufferedLineReader.java   |   61 +-
 .../htsjdk/samtools/util/CloseableIterator.java    |    1 +
 .../samtools/util/CustomGzipOutputStream.java      |    4 +-
 src/main/java/htsjdk/samtools/util/DateParser.java |   41 -
 .../htsjdk/samtools/util/DelegatingIterator.java   |    4 +
 .../java/htsjdk/samtools/util/DiskBackedQueue.java |    3 +
 .../samtools/util/EdgingRecordAndOffset.java       |    4 +
 .../java/htsjdk/samtools/util/FastLineReader.java  |    1 +
 .../samtools/util/FileAppendStreamLRUCache.java    |    2 +
 src/main/java/htsjdk/samtools/util/IOUtil.java     |   81 +-
 src/main/java/htsjdk/samtools/util/Interval.java   |    1 +
 .../java/htsjdk/samtools/util/IntervalList.java    |   84 +-
 .../util/IntervalListReferenceSequenceMask.java    |    4 +
 .../java/htsjdk/samtools/util/IntervalTree.java    |   19 +-
 .../java/htsjdk/samtools/util/IntervalTreeMap.java |   31 +-
 .../java/htsjdk/samtools/util/Iso8601Date.java     |    1 +
 src/main/java/htsjdk/samtools/util/Lazy.java       |   21 +-
 src/main/java/htsjdk/samtools/util/LineReader.java |    1 +
 src/main/java/htsjdk/samtools/util/Locatable.java  |   54 +
 .../java/htsjdk/samtools/util/LocusComparator.java |    1 +
 src/main/java/htsjdk/samtools/util/LocusImpl.java  |    2 +
 src/main/java/htsjdk/samtools/util/Log.java        |   35 +-
 .../samtools/util/Md5CalculatingInputStream.java   |    9 +
 .../samtools/util/Md5CalculatingOutputStream.java  |   17 +-
 .../java/htsjdk/samtools/util/PeekIterator.java    |    3 +
 .../htsjdk/samtools/util/PeekableIterator.java     |    4 +
 .../samtools/util/PositionalOutputStream.java      |    4 +
 .../samtools/util/QualityEncodingDetector.java     |    3 +
 .../htsjdk/samtools/util/SamLocusIterator.java     |   12 +-
 .../util/SamRecordIntervalIteratorFactory.java     |    4 +
 .../java/htsjdk/samtools/util/SequenceUtil.java    |   92 +-
 .../java/htsjdk/samtools/util/SnappyLoader.java    |  131 +-
 .../htsjdk/samtools/util/SortingCollection.java    |   14 +
 .../samtools/util/SortingLongCollection.java       |    1 +
 .../htsjdk/samtools/util/StringLineReader.java     |   71 +-
 src/main/java/htsjdk/samtools/util/StringUtil.java |    6 +
 .../htsjdk/samtools/util/TempStreamFactory.java    |    4 +-
 .../util/WholeGenomeReferenceSequenceMask.java     |    4 +
 .../htsjdk/samtools/util/zip/DeflaterFactory.java  |    6 +-
 .../{DeflaterFactory.java => InflaterFactory.java} |   25 +-
 .../java/htsjdk/tribble/AbstractFeatureReader.java |   73 +-
 .../java/htsjdk/tribble/AsciiFeatureCodec.java     |   13 +-
 src/main/java/htsjdk/tribble/Feature.java          |    5 +-
 src/main/java/htsjdk/tribble/FeatureCodec.java     |   41 +-
 src/main/java/htsjdk/tribble/FeatureReader.java    |    1 +
 src/main/java/htsjdk/tribble/SimpleFeature.java    |    3 +
 .../java/htsjdk/tribble/TabixFeatureReader.java    |   42 +-
 src/main/java/htsjdk/tribble/Tribble.java          |   45 +-
 src/main/java/htsjdk/tribble/TribbleException.java |    8 +
 .../tribble/TribbleIndexedFeatureReader.java       |   82 +-
 src/main/java/htsjdk/tribble/bed/BEDCodec.java     |   33 +-
 .../java/htsjdk/tribble/bed/FullBEDFeature.java    |    1 +
 .../java/htsjdk/tribble/bed/SimpleBEDFeature.java  |   10 +
 .../java/htsjdk/tribble/example/CountRecords.java  |    3 -
 .../htsjdk/tribble/gelitext/DiploidGenotype.java   |   66 -
 .../htsjdk/tribble/gelitext/GeliTextCodec.java     |  117 -
 .../htsjdk/tribble/gelitext/GeliTextFeature.java   |  148 -
 .../java/htsjdk/tribble/index/AbstractIndex.java   |   80 +-
 .../htsjdk/tribble/index/DynamicIndexCreator.java  |   25 +-
 src/main/java/htsjdk/tribble/index/Index.java      |   33 +-
 .../java/htsjdk/tribble/index/IndexFactory.java    |  115 +-
 .../htsjdk/tribble/index/interval/Interval.java    |    1 +
 .../index/interval/IntervalIndexCreator.java       |   21 +-
 .../tribble/index/interval/IntervalTreeIndex.java  |   16 +
 .../htsjdk/tribble/index/linear/LinearIndex.java   |   29 +-
 .../tribble/index/linear/LinearIndexCreator.java   |   19 +-
 .../htsjdk/tribble/index/tabix/TabixFormat.java    |    3 +-
 .../htsjdk/tribble/index/tabix/TabixIndex.java     |   31 +-
 .../htsjdk/tribble/readers/AsciiLineReader.java    |  131 +-
 .../readers/BlockCompressedAsciiLineReader.java    |   44 +
 .../java/htsjdk/tribble/readers/LineReader.java    |    1 +
 .../htsjdk/tribble/readers/LineReaderUtil.java     |   56 -
 .../tribble/readers/LongLineBufferedReader.java    |    8 +
 .../tribble/readers/PositionalBufferedStream.java  |    9 +-
 .../tribble/readers/TabixIteratorLineReader.java   |    2 +
 .../java/htsjdk/tribble/readers/TabixReader.java   |  136 +-
 src/main/java/htsjdk/tribble/util/HTTPHelper.java  |    8 +
 .../tribble/util/LittleEndianOutputStream.java     |    2 +
 .../java/htsjdk/tribble/util/ParsingUtils.java     |   30 +-
 src/main/java/htsjdk/tribble/util/TabixUtils.java  |    1 +
 src/main/java/htsjdk/variant/bcf2/BCF2Codec.java   |   21 +-
 .../java/htsjdk/variant/variantcontext/Allele.java |    1 +
 .../variant/variantcontext/FastGenotype.java       |    1 +
 .../variantcontext/GenotypeLikelihoods.java        |    6 +
 .../htsjdk/variant/variantcontext/JEXLMap.java     |   12 +
 .../variant/variantcontext/VariantContext.java     |    2 +
 .../variantcontext/VariantContextUtils.java        |   15 +-
 .../variant/variantcontext/VariantJEXLContext.java |    3 +
 .../variantcontext/filter/FilteringIterator.java   |    2 +-
 .../writer/AsyncVariantContextWriter.java          |    8 +
 .../variantcontext/writer/BCF2FieldWriter.java     |    1 +
 .../variant/variantcontext/writer/BCF2Writer.java  |   69 +-
 .../writer/IndexingVariantContextWriter.java       |    7 +-
 .../writer/SortingVariantContextWriter.java        |    5 +
 .../writer/SortingVariantContextWriterBase.java    |   12 +-
 .../variant/variantcontext/writer/VCFWriter.java   |   25 +-
 .../writer/VariantContextWriter.java               |   18 +
 .../writer/VariantContextWriterBuilder.java        |    6 +-
 .../writer/VariantContextWriterFactory.java        |  282 --
 .../java/htsjdk/variant/vcf/AbstractVCFCodec.java  |   13 +-
 src/main/java/htsjdk/variant/vcf/VCF3Codec.java    |    2 +
 src/main/java/htsjdk/variant/vcf/VCFCodec.java     |    1 +
 .../htsjdk/variant/vcf/VCFCompoundHeaderLine.java  |    2 +
 src/main/java/htsjdk/variant/vcf/VCFEncoder.java   |  715 ++--
 .../java/htsjdk/variant/vcf/VCFFileReader.java     |   15 +-
 src/main/java/htsjdk/variant/vcf/VCFHeader.java    |   26 +-
 .../java/htsjdk/variant/vcf/VCFHeaderLine.java     |    1 +
 .../variant/vcf/VCFHeaderLineTranslator.java       |    2 +
 .../htsjdk/variant/vcf/VCFSimpleHeaderLine.java    |    2 +
 src/main/java/htsjdk/variant/vcf/VCFUtils.java     |   10 +-
 src/test/java/htsjdk/HtsjdkTest.java               |   10 +
 .../htsjdk/cram/io/ExternalCompressionTest.java    |    9 +-
 .../htsjdk/samtools/AbstractBAMFileIndexTest.java  |    5 +-
 .../java/htsjdk/samtools/BAMCigarOverflowTest.java |    3 +-
 .../java/htsjdk/samtools/BAMFileIndexTest.java     |   21 +-
 src/test/java/htsjdk/samtools/BAMFileSpanTest.java |   72 +
 .../java/htsjdk/samtools/BAMFileWriterTest.java    |   42 +-
 .../java/htsjdk/samtools/BAMIndexWriterTest.java   |    9 +-
 src/test/java/htsjdk/samtools/BAMIteratorTest.java |    3 +-
 ...AMQueryMultipleIntervalsIteratorFilterTest.java |    3 +-
 .../java/htsjdk/samtools/BAMRemoteFileTest.java    |    3 +-
 src/test/java/htsjdk/samtools/BinTest.java         |    3 +-
 .../htsjdk/samtools/BinningIndexBuilderTest.java   |  105 +
 .../java/htsjdk/samtools/CRAMBAIIndexerTest.java   |    3 +-
 .../java/htsjdk/samtools/CRAMCRAIIndexerTest.java  |    9 +-
 .../java/htsjdk/samtools/CRAMComplianceTest.java   |  328 +-
 .../samtools/CRAMContainerStreamWriterTest.java    |    3 +-
 .../java/htsjdk/samtools/CRAMEdgeCasesTest.java    |    3 +-
 .../java/htsjdk/samtools/CRAMFileBAIIndexTest.java |    3 +-
 .../htsjdk/samtools/CRAMFileCRAIIndexTest.java     |    4 +-
 .../java/htsjdk/samtools/CRAMFileReaderTest.java   |    5 +-
 .../java/htsjdk/samtools/CRAMFileWriterTest.java   |    4 +-
 .../samtools/CRAMFileWriterWithIndexTest.java      |    3 +-
 .../java/htsjdk/samtools/CRAMIndexQueryTest.java   |    9 +-
 .../java/htsjdk/samtools/CRAMSliceMD5Test.java     |  136 +
 src/test/java/htsjdk/samtools/ChunkTest.java       |    3 +-
 src/test/java/htsjdk/samtools/CigarCodecTest.java  |    3 +-
 .../htsjdk/samtools/CigarOperatorUnitTest.java     |  137 +
 src/test/java/htsjdk/samtools/CigarTest.java       |  129 +-
 .../htsjdk/samtools/DownsamplingIteratorTests.java |    3 +-
 .../samtools/DuplicateScoringStrategyTest.java     |   26 +
 .../htsjdk/samtools/DuplicateSetIteratorTest.java  |    3 +-
 .../java/htsjdk/samtools/GenomicIndexUtilTest.java |    5 +-
 ...MergingSamRecordIteratorGroupCollisionTest.java |   19 +-
 .../samtools/MergingSamRecordIteratorTest.java     |    3 +-
 .../htsjdk/samtools/PathInputResourceTest.java     |   48 +
 .../htsjdk/samtools/ProgramRecordChainingTest.java |    3 +-
 .../java/htsjdk/samtools/QueryIntervalTest.java    |   42 +
 .../samtools/SAMBinaryTagAndValueUnitTest.java     |    3 +-
 src/test/java/htsjdk/samtools/SAMCloneTest.java    |    3 +-
 .../java/htsjdk/samtools/SAMFileHeaderTest.java    |   64 +
 .../htsjdk/samtools/SAMFileWriterFactoryTest.java  |  142 +-
 src/test/java/htsjdk/samtools/SAMFlagTest.java     |    3 +-
 .../java/htsjdk/samtools/SAMIntegerTagTest.java    |    3 +-
 .../{BinTest.java => SAMProgramRecordTest.java}    |   20 +-
 .../htsjdk/samtools/SAMReadGroupRecordTest.java    |  148 +
 .../samtools/SAMRecordDuplicateComparatorTest.java |    3 +-
 ....java => SAMRecordQueryHashComparatorTest.java} |   50 +-
 .../samtools/SAMRecordQueryNameComparatorTest.java |  124 +
 .../java/htsjdk/samtools/SAMRecordUnitTest.java    |   76 +-
 .../samtools/SAMSequenceDictionaryCodecTest.java   |   17 +-
 .../htsjdk/samtools/SAMSequenceDictionaryTest.java |   36 +-
 .../htsjdk/samtools/SAMSequenceRecordTest.java     |   86 +
 .../java/htsjdk/samtools/SAMTextReaderTest.java    |    3 +-
 .../java/htsjdk/samtools/SAMTextWriterTest.java    |    3 +-
 src/test/java/htsjdk/samtools/SAMUtilsTest.java    |   39 +-
 .../htsjdk/samtools/SamFileHeaderMergerTest.java   |    3 +-
 src/test/java/htsjdk/samtools/SamFilesTest.java    |    4 +-
 .../java/htsjdk/samtools/SamFlagFieldTest.java     |    5 +-
 .../samtools/SamHeaderRecordComparatorTest.java    |    3 +-
 src/test/java/htsjdk/samtools/SamIndexesTest.java  |    3 +-
 src/test/java/htsjdk/samtools/SamPairUtilTest.java |    3 +-
 .../java/htsjdk/samtools/SamReaderFactoryTest.java |   77 +-
 .../java/htsjdk/samtools/SamReaderSortTest.java    |   61 +-
 src/test/java/htsjdk/samtools/SamReaderTest.java   |    6 +-
 src/test/java/htsjdk/samtools/SamSpecIntTest.java  |    3 +-
 src/test/java/htsjdk/samtools/SamStreamsTest.java  |    5 +-
 .../samtools/SeekableByteChannelFromBuffer.java    |   85 +
 .../SequenceNameTruncationAndValidationTest.java   |    3 +-
 .../java/htsjdk/samtools/ValidateSamFileTest.java  |   77 +-
 .../java/htsjdk/samtools/cram/CRAIEntryTest.java   |    3 +-
 .../java/htsjdk/samtools/cram/CRAIIndexTest.java   |   17 +-
 .../samtools/cram/LosslessRoundTripTest.java       |   21 +-
 .../java/htsjdk/samtools/cram/VersionTest.java     |    3 +-
 .../cram/build/CompressionHeaderFactoryTest.java   |    3 +-
 .../samtools/cram/build/ContainerFactoryTest.java  |    3 +-
 .../samtools/cram/build/ContainerParserTest.java   |    3 +-
 .../htsjdk/samtools/cram/build/CramIOTest.java     |    3 +-
 .../cram/build/Sam2CramRecordFactoryTest.java      |  109 +
 .../cram/encoding/huffman/codec/HuffmanTest.java   |    3 +-
 .../samtools/cram/encoding/rans/RansTest.java      |    3 +-
 .../java/htsjdk/samtools/cram/io/ITF8Test.java     |    3 +-
 .../java/htsjdk/samtools/cram/io/LTF8Test.java     |    3 +-
 .../cram/lossy/QualityScorePreservationTest.java   |    9 +-
 .../samtools/cram/ref/EnaRefServiceTest.java       |    3 +-
 .../samtools/cram/ref/ReferenceSourceTest.java     |   33 +
 .../cram/structure/CramCompressionRecordTest.java  |    9 +-
 .../samtools/cram/structure/ReadTagTest.java       |   10 +-
 .../htsjdk/samtools/cram/structure/SliceTests.java |    4 +-
 .../cram/structure/SubstitutionMatrixTest.java     |    6 +-
 .../htsjdk/samtools/fastq/FastqEncoderTest.java    |   76 +
 .../htsjdk/samtools/fastq/FastqRecordTest.java     |   31 +-
 .../htsjdk/samtools/fastq/FastqWriterTest.java     |   73 -
 .../filter/FailsVendorReadQualityFilterTest.java   |    3 +-
 .../samtools/filter/InsertSizeFilterTest.java      |    3 +-
 .../htsjdk/samtools/filter/IntervalFilterTest.java |   96 +
 .../filter/IntervalKeepPairFilterTest.java         |    3 +-
 .../filter/JavascriptSamRecordFilterTest.java      |    3 +-
 .../samtools/filter/MappingQualityFilterTest.java  |    3 +-
 .../filter/NotPrimaryAlignmentFilterTest.java      |  109 +
 .../samtools/filter/OverclippedReadFilterTest.java |    3 +-
 .../htsjdk/samtools/filter/ReadNameFilterTest.java |   52 +
 .../filter/SecondaryOrSupplementaryFilterTest.java |  107 +
 .../samtools/filter/SolexaNoiseFilterTest.java     |    3 +-
 .../java/htsjdk/samtools/filter/TagFilterTest.java |    5 +-
 .../htsjdk/samtools/liftover/LiftOverTest.java     |    3 +-
 .../htsjdk/samtools/metrics/MetricBaseTest.java    |    3 +-
 .../htsjdk/samtools/metrics/MetricsFileTest.java   |    3 +-
 .../htsjdk/samtools/metrics/StringHeaderTest.java  |    5 +-
 .../htsjdk/samtools/metrics/VersionHeaderTest.java |    5 +-
 .../samtools/reference/FastaSequenceFileTest.java  |    3 +-
 .../reference/FastaSequenceIndexCreatorTest.java   |   90 +
 .../samtools/reference/FastaSequenceIndexTest.java |   35 +-
 .../reference/IndexedFastaSequenceFileTest.java    |    3 +-
 .../ReferenceSequenceFileFactoryTests.java         |   21 +-
 .../reference/ReferenceSequenceFileWalkerTest.java |    3 +-
 .../samtools/reference/ReferenceSequenceTests.java |    3 +-
 .../ByteArraySeekableStreamTest.java               |  116 +
 .../seekablestream/SeekableBufferedStreamTest.java |    3 +-
 .../seekablestream/SeekableFTPStreamTest.java      |    3 +-
 .../seekablestream/SeekableFileStreamTest.java     |    3 +-
 .../seekablestream/SeekableMemoryStreamTest.java   |    3 +-
 .../seekablestream/SeekablePathStreamTest.java     |    4 +-
 .../seekablestream/SeekableStreamFactoryTest.java  |    3 +-
 .../java/htsjdk/samtools/sra/AbstractSRATest.java  |    7 +-
 .../samtools/util/AbstractLocusInfoTest.java       |    5 +-
 .../util/AbstractLocusIteratorTestTemplate.java    |    5 +-
 .../samtools/util/AbstractRecordAndOffsetTest.java |   14 +-
 .../util/AsyncBlockCompressedInputStreamTest.java  |   92 +
 .../samtools/util/AsyncBufferedIteratorTest.java   |   13 +-
 .../java/htsjdk/samtools/util/AsyncWriterTest.java |    3 +-
 .../java/htsjdk/samtools/util/BinaryCodecTest.java |    3 +-
 .../util/BlockCompressedFilePointerUtilTest.java   |    4 +-
 .../util/BlockCompressedInputStreamTest.java       |  201 ++
 .../util/BlockCompressedOutputStreamTest.java      |   32 +-
 .../util/BlockCompressedTerminatorTest.java        |   90 +-
 ...ReaderTest.java => BufferedLineReaderTest.java} |   53 +-
 .../htsjdk/samtools/util/CigarElementUnitTest.java |   43 +
 .../java/htsjdk/samtools/util/CigarUtilTest.java   |    3 +-
 .../samtools/util/CloseableIteratorTest.java       |    3 +-
 .../java/htsjdk/samtools/util/CodeUtilTest.java    |    3 +-
 .../htsjdk/samtools/util/ComparableTupleTest.java  |    3 +-
 .../samtools/util/CoordSpanInputSteamTest.java     |    3 +-
 .../java/htsjdk/samtools/util/DateParserTest.java  |  151 +
 .../htsjdk/samtools/util/DiskBackedQueueTest.java  |    3 +
 .../htsjdk/samtools/util/EdgeReadIteratorTest.java |   20 +-
 .../samtools/util/EdgingRecordAndOffsetTest.java   |   34 +-
 .../java/htsjdk/samtools/util/HistogramTest.java   |    3 +-
 .../htsjdk/samtools/util/IntervalListTest.java     |  115 +-
 .../htsjdk/samtools/util/IntervalTreeMapTest.java  |    7 +-
 .../htsjdk/samtools/util/IntervalTreeTest.java     |  158 +-
 src/test/java/htsjdk/samtools/util/IoUtilTest.java |   31 +-
 .../java/htsjdk/samtools/util/Iso8601DateTest.java |    3 +-
 src/test/java/htsjdk/samtools/util/IupacTest.java  |    3 +-
 .../htsjdk/samtools/util/LocatableUnitTest.java    |  160 +
 src/test/java/htsjdk/samtools/util/LogTest.java    |   41 +
 .../htsjdk/samtools/util/MergingIteratorTest.java  |    3 +-
 .../htsjdk/samtools/util/OverlapDetectorTest.java  |    3 +-
 .../samtools/util/PositionalOutputStreamTest.java  |    7 +-
 .../samtools/util/QualityEncodingDetectorTest.java |    3 +-
 .../samtools/util/RelativeIso8601DateTest.java     |    3 +-
 .../htsjdk/samtools/util/SamLocusIteratorTest.java |   44 +-
 .../htsjdk/samtools/util/SequenceUtilTest.java     |  187 +-
 .../htsjdk/samtools/util/SnappyLoaderUnitTest.java |   49 +
 .../samtools/util/SolexaQualityConverterTest.java  |    3 +-
 .../samtools/util/SortingCollectionTest.java       |   13 +-
 .../samtools/util/SortingLongCollectionTest.java   |    3 +-
 .../java/htsjdk/samtools/util/StringUtilTest.java  |  122 -
 .../htsjdk/samtools/util/TrimmingUtilTest.java     |    3 +-
 src/test/java/htsjdk/samtools/util/TupleTest.java  |    5 +-
 .../htsjdk/tribble/AbstractFeatureReaderTest.java  |  169 +-
 .../java/htsjdk/tribble/AsciiFeatureCodecTest.java |   35 +
 .../java/htsjdk/tribble/BinaryFeaturesTest.java    |    3 +-
 .../java/htsjdk/tribble/FeatureReaderTest.java     |    3 +-
 .../tribble/TribbleIndexFeatureReaderTest.java     |   53 +-
 src/test/java/htsjdk/tribble/TribbleTest.java      |    3 +-
 src/test/java/htsjdk/tribble/bed/BEDCodecTest.java |   67 +-
 .../java/htsjdk/tribble/gelitext/GeliTextTest.java |  100 -
 .../htsjdk/tribble/index/IndexFactoryTest.java     |  131 +-
 src/test/java/htsjdk/tribble/index/IndexTest.java  |   50 +-
 .../tribble/index/interval/IntervalTreeTest.java   |    3 +-
 .../tribble/index/linear/LinearIndexTest.java      |    3 +-
 .../htsjdk/tribble/index/tabix/TabixIndexTest.java |   96 +-
 .../tribble/readers/AsciiLineReaderTest.java       |   65 +-
 .../BlockCompressedAsciiLineReaderTest.java        |   76 +
 .../readers/LongLineBufferedReaderTest.java        |    3 +-
 .../readers/PositionalBufferedStreamTest.java      |    3 +-
 .../java/htsjdk/tribble/readers/ReaderTest.java    |    3 +-
 .../readers/SynchronousLineReaderUnitTest.java     |    3 +-
 .../htsjdk/tribble/readers/TabixReaderTest.java    |   26 +-
 .../java/htsjdk/tribble/util/ParsingUtilsTest.java |   64 +-
 .../htsjdk/tribble/util/ftp/FTPClientTest.java     |    3 +-
 .../java/htsjdk/tribble/util/ftp/FTPUtilsTest.java |    3 +-
 .../util/popgen/HardyWeinbergCalculationTest.java  |    3 +-
 .../htsjdk/variant/PrintVariantsExampleTest.java   |    5 +-
 src/test/java/htsjdk/variant/VariantBaseTest.java  |    3 +-
 .../htsjdk/variant/bcf2/BCF2WriterUnitTest.java    |  282 ++
 .../utils/SAMSequenceDictionaryExtractorTest.java  |    3 +-
 .../variantcontext/VariantContextTestProvider.java |    5 +-
 .../variantcontext/VariantContextUnitTest.java     |   89 +-
 .../variantcontext/filter/CompoundFilterTest.java  |    5 +-
 .../FilteringVariantContextIteratorTest.java       |    3 +-
 .../filter/GenotypeQualityFilterTest.java          |    3 +-
 .../filter/HeterozygosityFilterTest.java           |    3 +-
 .../filter/JavascriptVariantFilterTest.java        |    3 +-
 .../filter/PassingVariantFilterTest.java           |    5 +-
 .../variantcontext/filter/SnpFilterTest.java       |    3 +-
 .../writer/AsyncVariantContextWriterUnitTest.java  |  146 +
 .../writer/TabixOnTheFlyIndexCreationTest.java     |    3 +-
 .../variantcontext/writer/VCFWriterUnitTest.java   |  116 +-
 .../VariantContextWriterBuilderUnitTest.java       |    8 +
 .../java/htsjdk/variant/vcf/VCFEncoderTest.java    |    3 +-
 .../java/htsjdk/variant/vcf/VCFHeaderUnitTest.java |   56 +-
 .../SequenceUtil/upper_and_lowercase_read.sam      |    2 +-
 .../ValidateSamFileTest/nm_tag_validation.cram     |  Bin 0 -> 39392 bytes
 .../ValidateSamFileTest/nm_tag_validation.fa       |   71 +
 .../ValidateSamFileTest/nm_tag_validation.fa.fai   |    1 +
 .../ValidateSamFileTest/seq_qual_len_mismatch.sam  |   21 +
 ...CEUTrio.HiSeq.WGS.b37.NA12878.20.first.8000.bam |  Bin 0 -> 997323 bytes
 .../htsjdk/samtools/cram/amb#amb.2.1.cram          |  Bin 0 -> 11045 bytes
 .../htsjdk/samtools/cram/amb#amb.3.0.cram          |  Bin 0 -> 1210 bytes
 .../resources/htsjdk/samtools/cram/amb#amb.sam     |   57 +
 src/test/resources/htsjdk/samtools/cram/amb.fa     |    2 +
 src/test/resources/htsjdk/samtools/cram/amb.fa.fai |    1 +
 .../htsjdk/samtools/cram/ambiguityCodes.fasta      |    2 +
 .../htsjdk/samtools/cram/ambiguityCodes.fasta.fai  |    1 +
 .../resources/htsjdk/samtools/cram/fieldarith.sam  |   15 -
 .../samtools/cram/human_g1k_v37.20.subset.dict     |    2 +
 .../samtools/cram/human_g1k_v37.20.subset.fasta    |  151 +
 .../cram/human_g1k_v37.20.subset.fasta.fai         |    1 +
 .../samtoolsSliceMD5WithAmbiguityCodesTest.cram    |  Bin 0 -> 1826 bytes
 src/test/resources/htsjdk/samtools/cram/test.dict  |    2 -
 src/test/resources/htsjdk/samtools/cram/test.fa    |    2 -
 .../resources/htsjdk/samtools/cram/test.fa.fai     |    1 -
 src/test/resources/htsjdk/samtools/cram/test2.dict |    2 -
 src/test/resources/htsjdk/samtools/cram/test2.fa   |    2 -
 .../resources/htsjdk/samtools/cram/test2.fa.fai    |    1 -
 .../resources/htsjdk/samtools/filter/names.txt     |    3 +
 src/test/resources/htsjdk/samtools/noheader.sam    |   10 +
 .../Homo_sapiens_assembly18.trimmed.fasta.gz       |  Bin 0 -> 335008 bytes
 .../Homo_sapiens_assembly18.trimmed.fasta.gz.fai   |    2 +
 .../Homo_sapiens_assembly18.trimmed.fasta.gz.gzi   |  Bin 0 -> 264 bytes
 .../resources/htsjdk/samtools/reference/crlf.fasta |    4 +
 .../htsjdk/samtools/reference/crlf.fasta.fai       |    2 +
 .../reference/header_with_white_space.fasta        |    4 +
 .../reference/header_with_white_space.fasta.fai    |    2 +
 src/test/resources/htsjdk/samtools/util/random.bin |  Bin 0 -> 131072 bytes
 .../resources/htsjdk/samtools/util/random.bin.gz   |  Bin 0 -> 131254 bytes
 .../baseVariants.mangled.vcf.gz                    |  Bin 0 -> 517 bytes
 .../baseVariants.mangled.vcf.gz.tbi                |  Bin 0 -> 155 bytes
 .../AbstractFeatureReaderTest/baseVariants.vcf     |   37 +
 .../AbstractFeatureReaderTest/baseVariants.vcf.gz  |  Bin 0 -> 516 bytes
 .../baseVariants.vcf.gz.tbi                        |  Bin 0 -> 154 bytes
 .../AbstractFeatureReaderTest/baseVariants.vcf.idx |  Bin 0 -> 507 bytes
 .../corruptedBaseVariants.vcf.idx                  |  Bin 0 -> 529 bytes
 .../mangledBaseVariants.vcf                        |   37 +
 .../mangledBaseVariants.vcf.idx                    |  Bin 0 -> 509 bytes
 .../htsjdk/tribble/bed/2featuresNoHeader.bed.gz    |  Bin 0 -> 73 bytes
 .../tribble/bed/2featuresNoHeader.bed.gz.tbi       |  Bin 0 -> 123 bytes
 .../htsjdk/tribble/bed/2featuresWithHeader.bed.gz  |  Bin 0 -> 81 bytes
 .../tribble/bed/2featuresWithHeader.bed.gz.tbi     |  Bin 0 -> 124 bytes
 .../htsjdk/tribble/bed/3featuresNoHeader.bed.gz    |  Bin 0 -> 78 bytes
 .../tribble/bed/3featuresNoHeader.bed.gz.tbi       |  Bin 0 -> 123 bytes
 .../htsjdk/tribble/bed/Unigene.sample.bed.gz       |  Bin 0 -> 3908 bytes
 .../htsjdk/tribble/tabix/4featuresHG38Header.vcf   | 3412 ++++++++++++++++++++
 .../tribble/tabix/4featuresHG38Header.vcf.gz       |  Bin 0 -> 25163 bytes
 .../tribble/tabix/4featuresHG38Header.vcf.gz.tbi   |  Bin 0 -> 258 bytes
 src/test/scala/htsjdk/UnitSpec.scala               |   25 +
 .../samtools/fastq/FastqReaderWriterTest.scala     |  180 ++
 .../htsjdk/samtools/util/StringUtilTest.scala      |  134 +
 526 files changed, 15182 insertions(+), 3944 deletions(-)

diff --git a/.codecov.yml b/.codecov.yml
new file mode 100644
index 0000000..7167553
--- /dev/null
+++ b/.codecov.yml
@@ -0,0 +1,29 @@
+codecov:
+  branch: master
+
+coverage:
+  precision: 3
+  round: nearest
+  range: "50...100"
+
+  status:
+    project:
+      default:
+        target: auto
+        threshold: .01
+        branches: null
+
+    patch:
+      default:
+        target: auto
+        branches: null
+
+    changes:
+      default:
+        branches: null
+
+
+comment:
+  layout: "header, diff, changes, sunburst, uncovered, tree"
+  branches: null
+  behavior: default
diff --git a/.travis.yml b/.travis.yml
index 1e92599..a1c1b2b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -10,11 +10,12 @@ cache:
     - $HOME/.m2
 jdk:
   - oraclejdk8
-script: ./gradlew jacocoTestReport testSRA;
+  - openjdk8
+script: ./gradlew test jacocoTestReport;
 after_success:
+  - bash <(curl -s https://codecov.io/bash)
   - echo "TRAVIS_BRANCH='$TRAVIS_BRANCH'";
     echo "JAVA_HOME='$JAVA_HOME'";
-    ./gradlew coveralls;
     if [ "$TRAVIS_BRANCH" == "master" ]; then
        ./gradlew uploadArchives;
     fi
diff --git a/README.md b/README.md
index 0e468d3..afe901e 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,9 @@
-[![Coverage Status](https://coveralls.io/repos/github/samtools/htsjdk/badge.svg?branch=master)](https://coveralls.io/github/samtools/htsjdk?branch=master)
+[![Coverage Status](https://codecov.io/gh/samtools/htsjdk/branch/master/graph/badge.svg)](https://codecov.io/gh/samtools/htsjdk)
 [![Build Status](https://travis-ci.org/samtools/htsjdk.svg?branch=master)](https://travis-ci.org/samtools/htsjdk)
 [![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.github.samtools/htsjdk/badge.svg)](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.github.samtools%22%20AND%20a%3A%22htsjdk%22)
 [![License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/samtools/htsjdk)
 [![Language](http://img.shields.io/badge/language-java-brightgreen.svg)](https://www.java.com/)
+[![Join the chat at https://gitter.im/samtools/htsjdk](https://badges.gitter.im/samtools/htsjdk.svg)](https://gitter.im/samtools/htsjdk?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
 
 Status of downstream projects automatically built on top of the current htsjdk master branch. See [gatk-jenkins](https://gatk-jenkins.broadinstitute.org/view/HTSJDK%20Release%20Tests/) for detailed logs. Failure may indicate problems  in htsjdk, but may also be due to expected incompatibilities between versions, or unrelated failures in downstream projects.
 - [Picard](https://github.com/broadinstitute/picard):  [![Build Status](https://gatk-jenkins.broadinstitute.org/buildStatus/icon?job=picard-on-htsjdk-master)](https://gatk-jenkins.broadinstitute.org/job/picard-on-htsjdk-master/)
@@ -15,11 +16,19 @@ common file formats, such as [SAM][1] and [VCF][2], used for high-throughput
 sequencing data.  There are also an number of useful utilities for 
 manipulating HTS data.
 
-Please see the [HTSJDK Documentation](http://samtools.github.io/htsjdk) for more information.
-
 > **NOTE: _HTSJDK does not currently support the latest Variant Call Format Specification (VCFv4.3 and BCFv2.2)._**
 
-#### Building HTSJDK
+### Documentation & Getting Help
+
+API documentation for all versions of HTSJDK since `1.128` are available through [javadoc.io](http://www.javadoc.io/doc/com.github.samtools/htsjdk).
+
+If you believe you have found a bug or have an issue with the library please a) search the open and recently closed issues to ensure it has not already been reported, then b) log an issue.
+
+The project has a [gitter chat room](https://gitter.im/samtools/htsjdk) if you would like to chat with the developers and others involved in the project.
+
+To receive announcements of releases and other significant project news please subscribe to the [htsjdk-announce](https://groups.google.com/forum/#!forum/htsjdk-announce) google group.
+
+### Building HTSJDK
 
 HTSJDK is now built using [gradle](http://gradle.org/).
 
@@ -73,7 +82,7 @@ Example gradle usage from the htsjdk root directory:
  ./gradlew tasks
  ```
 
-#### Create an HTSJDK project in IntelliJ
+### Create an HTSJDK project in IntelliJ
 To create a project in IntelliJ IDE for htsjdk do the following:
 
 1. Select fom the menu: `File -> New -> Project from Existing Sources`
@@ -82,13 +91,17 @@ To create a project in IntelliJ IDE for htsjdk do the following:
 
 From time to time if dependencies change in htsjdk you may need to refresh the project from the `View -> Gradle` menu.
 
-#### Licensing Information
+### Licensing Information
 
-Not all sub-packages of htsjdk are subject to the same license, so a license notice is included in each source file or sub-package as appropriate. Please check the relevant license notice whenever you start working with a part of htsjdk that you have not previously worked with to avoid any surprises. 
+Not all sub-packages of htsjdk are subject to the same license, so a license notice is included in each source file or sub-package as appropriate. 
+Please check the relevant license notice whenever you start working with a part of htsjdk that you have not previously worked with to avoid any surprises. 
+Broadly speaking the majority of the code is covered under the MIT license with the following notable exceptions:
 
-#### Java Minimum Version Support Policy
+* Much of the CRAM code is under the Apache License, Version 2
+* Core `tribble` code (underlying VCF reading/writing amongst other things) is under LGPL
+* Code supporting the reading/writing of SRA format is uncopyrighted & public domain
 
-> **NOTE: _Effective November 24th 2015, HTSJDK has ended support of Java 7 and previous versions. Java 8 is now required_.**
+### Java Minimum Version Support Policy
 
 We will support all Java SE versions supported by Oracle until at least six months after Oracle's Public Updates period has ended ([see this link](http://www.oracle.com/technetwork/java/eol-135779.html)).
 
@@ -96,9 +109,8 @@ Java SE Major Release | End of Java SE Oracle Public Updates | Proposed End of S
 ---- | ---- | ---- | ----
 6 | Feb 2013 | Aug 2013 | Oct 2015
 7 | Apr 2015 | Oct 2015 | Oct 2015
-8* | Mar 2017 | Sep 2017 | Sep 2017
+8 | Jul 2018 | Jul 2018 | TBD
 
-* to be finalized
 
 HTSJDK is migrating to semantic versioning (http://semver.org/). We will eventually adhere to it strictly and bump our major version whenever there are breaking changes to our API, but until we more clearly define what constitutes our official API, clients should assume that every release potentially contains at least minor changes to public methods.
 
diff --git a/build.gradle b/build.gradle
index 9e8f351..46271a1 100644
--- a/build.gradle
+++ b/build.gradle
@@ -5,13 +5,14 @@ buildscript {
 }
 
 plugins {
-    id "java"
+    id 'java'
+    id 'scala'
     id 'maven'
     id 'signing'
     id 'jacoco'
     id 'com.palantir.git-version' version '0.5.1'
     id 'com.github.johnrengelman.shadow' version '1.2.3'
-    id "com.github.kt3k.coveralls" version "2.6.3"
+    id 'com.github.maiflai.scalatest' version '0.15'
 }
 
 repositories {
@@ -19,30 +20,29 @@ repositories {
 }
 
 jacocoTestReport {
-    dependsOn test
     group = "Reporting"
     description = "Generate Jacoco coverage reports after running tests."
     additionalSourceDirs = files(sourceSets.main.allJava.srcDirs)
 
     reports {
-        xml.enabled = true // coveralls plugin depends on xml format report
+        xml.enabled = true // codecov depends on xml format report
         html.enabled = true
     }
 }
 
-jacoco {
-    toolVersion = "0.7.5.201505241946"
-}
-
 dependencies {
     compile "org.apache.commons:commons-jexl:2.1.1"
     compile "commons-logging:commons-logging:1.1.1"
-    compile "org.xerial.snappy:snappy-java:1.0.3-rc3"
+    compile "org.xerial.snappy:snappy-java:1.1.4"
     compile "org.apache.commons:commons-compress:1.4.1"
     compile "org.tukaani:xz:1.5"
     compile "gov.nih.nlm.ncbi:ngs-java:1.2.4"
 
+    testCompile "org.scala-lang:scala-library:2.12.1"
+    testCompile "org.scalatest:scalatest_2.12:3.0.1"
+    testRuntime 'org.pegdown:pegdown:1.4.2' // Necessary for generating HTML reports with ScalaTest
     testCompile "org.testng:testng:6.9.9"
+    testCompile "com.google.jimfs:jimfs:1.1"
 }
 
 sourceCompatibility = 1.8
@@ -67,76 +67,57 @@ jar {
 
 import org.gradle.internal.os.OperatingSystem;
 
-tasks.withType(Test) {
-    outputs.upToDateWhen { false } // tests will always rerun
-    useTestNG()
+tasks.withType(Test) { task ->
+    task.outputs.upToDateWhen { false } // tests will always rerun
 
-    // set heap size for the test JVM(s)
-    minHeapSize = "1G"
-    maxHeapSize = "2G"
+    // Always run serially because there are some very badly behaved tests in HTSJDK that
+    // will cause errors and even deadlocks if run multi-threaded
+    task.maxParallelForks = 1
 
-    jvmArgs '-Djava.awt.headless=true'  //this prevents awt from displaying a java icon while the tests are running
+    // set heap size for the test JVM(s)
+    task.minHeapSize = "1G"
+    task.maxHeapSize = "2G"
 
-    if (System.env.CI == "true") {  //if running under a CI output less into the logs
-        int count = 0
+    task.jvmArgs '-Djava.awt.headless=true'  //this prevents awt from displaying a java icon while the tests are running
+}
 
-        beforeTest { descriptor ->
-            count++
-            if( count % 100 == 0) {
-                logger.lifecycle("Finished "+ Integer.toString(count++) + " tests")
-            }
-        }
-    } else {
-        // show standard out and standard error of the test JVM(s) on the console
-        testLogging.showStandardStreams = true
-        beforeTest { descriptor ->
-            logger.lifecycle("Running Test: " + descriptor)
-        }
+task findScalaAndJavaTypes(type: Exec) {
+    description = "Check that Scala files only exist in the scala test dir and that java files do not reside in the scala test dir."
+    commandLine './scripts/checkScalaAndJavaFiles.sh'
+}
 
-        // listen to standard out and standard error of the test JVM(s)
-        onOutput { descriptor, event ->
-            logger.lifecycle("Test: " + descriptor + " produced standard out/err: " + event.message )
-        }
-    }
+test {
+    description = "Runs the unit tests other than the SRA tests"
 
     testLogging {
-        testLogging {
-            events "skipped", "failed"
-            exceptionFormat = "full"
-        }
-        afterSuite { desc, result ->
-            if (!desc.parent) { // will match the outermost suite
-                println "Results: ${result.resultType} (${result.testCount} tests, ${result.successfulTestCount} successes, ${result.failedTestCount} failures, ${result.skippedTestCount} skipped)"
-            }
-        }
+        events "failed", "skipped"
     }
-}
 
-test {
-    description = "Runs the unit tests other than the SRA tests"
+    if (System.env.CI == "true") {
+        jvmArgs += '-Dsamjdk.sra_libraries_download=true'
+    }
 
-    useTestNG {
-        if( OperatingSystem.current().isUnix() ){
-            excludeGroups "slow", "broken", "sra"
-        } else {
-            excludeGroups "slow", "broken", "unix", "sra"
-        }
+    tags {
+        exclude "slow"
+        exclude "broken"
+        if (System.env.CI == "false") exclude "sra"
+        if (!OperatingSystem.current().isUnix()) exclude "unix"
     }
-}
+} dependsOn findScalaAndJavaTypes
 
 task testSRA(type: Test) {
-    jvmArgs '-Dsamjdk.sra_libraries_download=true'
+    description = "Run the SRA tests"
+    jvmArgs += '-Dsamjdk.sra_libraries_download=true'
 
-    description "Run the SRA tests"
-    useTestNG {
-        configFailurePolicy 'continue'
-        includeGroups "sra"
+    tags {
+        exclude "slow"
+        exclude "broken"
     }
 }
 
 task wrapper(type: Wrapper) {
     description = "Regenerate the gradle wrapper"
-    gradleVersion = '2.13'
+    gradleVersion = '3.2.1'
 }
 
 // This is a hack to disable the java 8 default javadoc lint until we fix the html formatting
@@ -188,7 +169,7 @@ uploadArchives {
                 authentication(userName: project.findProperty("sonatypeUsername"), password: project.findProperty("sonatypePassword"))
             }
 
-            snapshotRepository(url: "https://artifactory.broadinstitute.org/artifactory/libs-snapshot-local/") {
+            snapshotRepository(url: "https://broadinstitute.jfrog.io/broadinstitute/libs-snapshot-local/") {
                 authentication(userName: System.env.ARTIFACTORY_USERNAME, password: System.env.ARTIFACTORY_PASSWORD)
             }
 
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index aad2b24..f08cd01 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
-#Fri May 13 14:00:35 EDT 2016
+#Fri Jan 20 17:10:11 EST 2017
 distributionBase=GRADLE_USER_HOME
 distributionPath=wrapper/dists
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-2.13-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-3.2.1-all.zip
diff --git a/gradlew b/gradlew
index 27309d9..9aa616c 100755
--- a/gradlew
+++ b/gradlew
@@ -161,4 +161,9 @@ function splitJvmOpts() {
 eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
 JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
 
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]]; then
+  cd "$(dirname "$0")"
+fi
+
 exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
diff --git a/scripts/checkScalaAndJavaFiles.sh b/scripts/checkScalaAndJavaFiles.sh
new file mode 100755
index 0000000..4dbf794
--- /dev/null
+++ b/scripts/checkScalaAndJavaFiles.sh
@@ -0,0 +1,17 @@
+#!/usr/bin/env bash
+
+# Check that Scala files only exist in the scala test dir and
+# that java files do not reside in the scala test dir
+
+if `find src | grep -v '^src/test/scala' | grep -q '\.scala$' ` ; then
+	echo 'Found scala file(s) outside of scala test directory';
+	find src | grep -v '^src/test/scala' | grep '\.scala$'
+	exit 1; 
+fi
+
+if `find src/test/scala | grep -q '\.java$' ` ; then
+        echo 'Found java file(s) in scala test directory';
+	find src/test/scala | grep '\.java$'        
+	exit 1;
+fi
+
diff --git a/scripts/release_picard.sh b/scripts/release_picard.sh
deleted file mode 100755
index 732234a..0000000
--- a/scripts/release_picard.sh
+++ /dev/null
@@ -1,152 +0,0 @@
-#! /bin/bash
-
-# The MIT License
-#
-# Copyright (c) $today.year The Broad Institute
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-
-PROGNAME=`basename $0`
-USERNAME=alecw
-
-function usage () {
-    echo "USAGE: $PROGNAME <release-id>" >&2
-    echo "Tags Github Picard source, checks out and builds sources, uploads build results to Sourceforge.">&2
-    echo "-t <tmpdir>                Build in <tmpdir>.  Default: $TMPDIR." >&2
-    echo "-u <sourceforge-user> Sourceforge username.  Default: $USERNAME." >&2
-}
-
-function tag_exists() {
-    git tag | grep -q "$1$"
-    if test $? = 0
-        then return 0
-        else return 1
-    fi
-}
-
-function remote_does_not_exist() {
-    git ls-remote $1 2>/dev/null 1>/dev/null
-    if test $? = 0
-        then return 1
-        else return 0
-    fi
-}
-
-function remote_tag_does_not_exist() {
-    git ls-remote --tags $2 | grep -q "$1$";
-    if test $? = 0
-        then return 0
-        else return 1
-    fi
-}
-
-set -e
-
-while getopts "ht:u:" options; do
-  case $options in
-    u ) USERNAME=$OPTARG;;
-    t ) TMPDIR=$OPTARG;;
-    h ) usage;;
-    \? ) usage
-         exit 1;;
-    * ) usage
-          exit 1;;
-
-  esac
-done
-shift $(($OPTIND - 1))
-
-if (( $# != 1 ))
- then echo "ERROR: Incorrect number of arguments." >&2
-      usage
-      exit 1
-fi
-
-if [[ x"$EDITOR" == x ]]
-then echo "EDITOR environment variable must be set." >&2
-       exit 1
-fi
-
-# Require actual Java 1.6.  This is not necessary for compiling, because can run 1.7 with -target 1.6,
-# but this is necessary in order to force unit tests to run with 1.6.
-(echo $JAVA_HOME | fgrep -q 1.6 ) || { echo "JAVA_HOME $JAVA_HOME is not 1.6" ; exit 1; }
-java_version=`java -version 2>&1 | fgrep -i version`
-(echo $java_version | fgrep -q 1.6. ) || { echo "java -version: $java_version is not 1.6"; exit 1; }
-
-GITROOT=git at github.com:samtools/htsjdk.git
-REMOTE=origin
-
-RELEASE_ID=$1
-
-# Since releases are lexically sorted, need to filter in order to have 1.1xx be at the bottom.
-PREV_RELEASE_ID=`git ls-remote --tags | grep -v "{}$" | awk '{print $2}' | sed -e "s_.*/__g" | egrep '[.]\d\d\d' | tail -1`
-
-if [[ -e $TMPDIR/htsjdk ]]
-then echo "$TMPDIR/htsjdk already exists.  Please remove or specify a different TMPDIR." >&2
-        exit 1
-fi
-cd $TMPDIR
-
-# clone
-git clone $GITROOT htsjdk 
-cd htsjdk
-ant clean # Shouldn't be necessary, but no harm
-
-# tag must not exist
-if tag_exists $RELEASE_ID
-then echo "ERROR: Tag $RELEASE_ID locally already exists"
-     exit 1
-fi
-
-# remote must exist
-if remote_does_not_exist $REMOTE
-then echo "ERROR: Remote $REMOTE does not exist"
-     exit 1
-fi
-
-# tag at remote must not exist
-if remote_tag_does_not_exist $RELEASE_ID $REMOTE
-then echo "ERROR: Tag $RELEASE_ID at remote $REMOTE already exists"
-     exit 1
-fi
-
-# tag the branch locally then push to remote
-echo Tagging master as $tag and pushing the tag to $remote
-# NB: we could use annotated tags in the future to store release notes, etc.
-git tag $tag
-git push $remote $tag # TODO: should we check this return value in case someone made a tag since we last checked?
-
-ant -lib lib/ant test
-
-ant -lib lib/ant clean all javadoc
-
-mkdir -p deploy/picard-tools/$RELEASE_ID
-
-mkdir -p deploy/htsjdk/$RELEASE_ID
-cp dist/htsjdk-$RELEASE_ID.jar deploy/htsjdk/$RELEASE_ID/
-
-# Make all files to be pushed to Sourceforge writable by group so that another Picard admin can overwrite them.
-
-chmod -R gu+rw javadoc deploy dist
-
-find javadoc deploy dist -type d -exec chmod g+s '{}' ';' 
-
-scp -p -r javadoc $USERNAME,picard at web.sourceforge.net:htdocs
-
-cd deploy
-scp -p -r htsjdk/$RELEASE_ID $USERNAME,picard at web.sourceforge.net:/home/frs/project/p/pi/picard/htsjdk/
diff --git a/settings.gradle b/settings.gradle
new file mode 100644
index 0000000..95584da
--- /dev/null
+++ b/settings.gradle
@@ -0,0 +1 @@
+rootProject.name =	"htsjdk"
diff --git a/src/main/java/htsjdk/samtools/AbstractBAMFileIndex.java b/src/main/java/htsjdk/samtools/AbstractBAMFileIndex.java
index 6bf28ef..724e73c 100644
--- a/src/main/java/htsjdk/samtools/AbstractBAMFileIndex.java
+++ b/src/main/java/htsjdk/samtools/AbstractBAMFileIndex.java
@@ -88,6 +88,7 @@ public abstract class AbstractBAMFileIndex implements BAMIndex {
     /**
      * Close this index and release any associated resources.
      */
+    @Override
     public void close() {
         mIndexBuffer.close();
     }
@@ -170,6 +171,7 @@ public abstract class AbstractBAMFileIndex implements BAMIndex {
      * @return The file offset of the first record in the last linear bin, or -1
      * if there are no elements in linear bins (i.e. no mapped reads).
      */
+    @Override
     public long getStartOfLastLinearBin() {
         seek(4);
 
@@ -206,6 +208,7 @@ public abstract class AbstractBAMFileIndex implements BAMIndex {
      * @param reference the reference of interest
      * @return meta data for the reference
      */
+    @Override
     public BAMIndexMetaData getMetaData(final int reference) {
         seek(4);
 
diff --git a/src/main/java/htsjdk/samtools/AbstractSAMHeaderRecord.java b/src/main/java/htsjdk/samtools/AbstractSAMHeaderRecord.java
index 769a7a7..0c3d484 100644
--- a/src/main/java/htsjdk/samtools/AbstractSAMHeaderRecord.java
+++ b/src/main/java/htsjdk/samtools/AbstractSAMHeaderRecord.java
@@ -23,13 +23,12 @@
  */
 package htsjdk.samtools;
 
+import javax.xml.bind.annotation.XmlTransient;
 import java.io.Serializable;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
 
-import javax.xml.bind.annotation.XmlTransient;
-
 /**
  * Base class for the various concrete records in a SAM header, providing uniform
  * access to the attributes.
@@ -60,8 +59,6 @@ public abstract class AbstractSAMHeaderRecord implements Serializable {
     /**
      * Set the given value for the attribute named 'key'.  Replaces an existing value, if any.
      * If value is null, the attribute is removed.
-     * Supported types are Character, Integer, Float and String.  Byte and Short may also be
-     * passed in but they will be converted to Integer.
      * @param key attribute name
      * @param value attribute value
      */
@@ -72,6 +69,7 @@ public abstract class AbstractSAMHeaderRecord implements Serializable {
             mAttributes.put(key, value);
         }
     }
+
     /**
      * Returns the Set of attributes.
      */
@@ -113,4 +111,10 @@ public abstract class AbstractSAMHeaderRecord implements Serializable {
     @Override public String toString() {
         return getClass().getSimpleName() + this.mAttributes.toString();
     }
+
+    /**
+     * Returns the record in the SAM line-based text format.  Fields are
+     * separated by '\t' characters. The String is NOT terminated by '\n'.
+     */
+    abstract public String getSAMString();
 }
diff --git a/src/main/java/htsjdk/samtools/AsyncSAMFileWriter.java b/src/main/java/htsjdk/samtools/AsyncSAMFileWriter.java
index ab5b8d0..1a860f2 100644
--- a/src/main/java/htsjdk/samtools/AsyncSAMFileWriter.java
+++ b/src/main/java/htsjdk/samtools/AsyncSAMFileWriter.java
@@ -48,11 +48,13 @@ class AsyncSAMFileWriter extends AbstractAsyncWriter<SAMRecord> implements SAMFi
      * Adds an alignment to the queue to be written.  Will re-throw any exception that was received when
      * writing prior record(s) to the underlying SAMFileWriter.
      */
+    @Override
     public void addAlignment(final SAMRecord alignment) {
         write(alignment);
     }
 
     /** Returns the SAMFileHeader from the underlying SAMFileWriter. */
+    @Override
     public SAMFileHeader getFileHeader() {
         return this.underlyingWriter.getFileHeader();
     }
diff --git a/src/main/java/htsjdk/samtools/BAMFileReader.java b/src/main/java/htsjdk/samtools/BAMFileReader.java
index 98bb74f..9642de8 100644
--- a/src/main/java/htsjdk/samtools/BAMFileReader.java
+++ b/src/main/java/htsjdk/samtools/BAMFileReader.java
@@ -25,12 +25,8 @@ package htsjdk.samtools;
 
 
 import htsjdk.samtools.seekablestream.SeekableStream;
-import htsjdk.samtools.util.BinaryCodec;
-import htsjdk.samtools.util.BlockCompressedInputStream;
-import htsjdk.samtools.util.CloseableIterator;
-import htsjdk.samtools.util.CoordMath;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.samtools.util.StringLineReader;
+import htsjdk.samtools.util.*;
+import htsjdk.samtools.util.zip.InflaterFactory;
 
 import java.io.DataInputStream;
 import java.io.File;
@@ -44,7 +40,7 @@ import java.util.NoSuchElementException;
 /**
  * Class for reading and querying BAM files.
  */
-class BAMFileReader extends SamReader.ReaderImplementation {
+public class BAMFileReader extends SamReader.ReaderImplementation {
     // True if reading from a File rather than an InputStream
     private boolean mIsSeekable = false;
 
@@ -67,10 +63,6 @@ class BAMFileReader extends SamReader.ReaderImplementation {
     // If true, all SAMRecords are fully decoded as they are read.
     private boolean eagerDecode;
 
-    // If true, the BAMFileReader will use asynchronous IO.
-    // Note: this field currently has no effect (is not hooked up anywhere), but will be in the future. See https://github.com/samtools/htsjdk/pull/576
-    private final boolean useAsynchronousIO;
-
     // For error-checking.
     private ValidationStringency mValidationStringency;
 
@@ -95,41 +87,94 @@ class BAMFileReader extends SamReader.ReaderImplementation {
     /**
      * Prepare to read BAM from a stream (not seekable)
      * @param stream source of bytes.
+     * @param indexFile BAM index file
      * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
      * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @throws IOException
      */
     BAMFileReader(final InputStream stream,
                   final File indexFile,
                   final boolean eagerDecode,
                   final boolean useAsynchronousIO,
                   final ValidationStringency validationStringency,
-                  final SAMRecordFactory factory)
-        throws IOException {
+                  final SAMRecordFactory samRecordFactory)
+            throws IOException {
+        this(stream, indexFile, eagerDecode, useAsynchronousIO, validationStringency, samRecordFactory,
+             BlockGunzipper.getDefaultInflaterFactory());
+    }
+
+    /**
+     * Prepare to read BAM from a stream (not seekable)
+     * @param stream source of bytes.
+     * @param indexFile BAM index file
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @param inflaterFactory InflaterFactory used by BlockCompressedInputStream
+     * @throws IOException
+     */
+    BAMFileReader(final InputStream stream,
+                  final File indexFile,
+                  final boolean eagerDecode,
+                  final boolean useAsynchronousIO,
+                  final ValidationStringency validationStringency,
+                  final SAMRecordFactory samRecordFactory,
+                  final InflaterFactory inflaterFactory)
+            throws IOException {
         mIndexFile = indexFile;
         mIsSeekable = false;
-        this.useAsynchronousIO = useAsynchronousIO;
-        mCompressedInputStream = new BlockCompressedInputStream(stream);
+        mCompressedInputStream = useAsynchronousIO ? new AsyncBlockCompressedInputStream(stream, inflaterFactory) : new BlockCompressedInputStream(stream, inflaterFactory);
         mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
         this.eagerDecode = eagerDecode;
         this.mValidationStringency = validationStringency;
-        this.samRecordFactory = factory;
+        this.samRecordFactory = samRecordFactory;
         this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, null);
     }
 
     /**
      * Prepare to read BAM from a file (seekable)
      * @param file source of bytes.
+     * @param indexFile BAM index file
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @throws IOException
+     */
+    BAMFileReader(final File file,
+                  final File indexFile,
+                  final boolean eagerDecode,
+                  final boolean useAsynchronousIO,
+                  final ValidationStringency validationStringency,
+                  final SAMRecordFactory samRecordFactory)
+        throws IOException {
+        this(file, indexFile, eagerDecode, useAsynchronousIO, validationStringency, samRecordFactory, BlockGunzipper.getDefaultInflaterFactory());
+    }
+
+    /**
+     * Prepare to read BAM from a file (seekable)
+     * @param file source of bytes.
+     * @param indexFile BAM index file
      * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
      * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @param inflaterFactory InflaterFactory used by BlockCompressedInputStream
+     * @throws IOException
      */
     BAMFileReader(final File file,
                   final File indexFile,
                   final boolean eagerDecode,
                   final boolean useAsynchronousIO,
                   final ValidationStringency validationStringency,
-                  final SAMRecordFactory factory)
+                  final SAMRecordFactory samRecordFactory,
+                  final InflaterFactory inflaterFactory)
         throws IOException {
-        this(new BlockCompressedInputStream(file), indexFile!=null ? indexFile : SamFiles.findIndex(file), eagerDecode, useAsynchronousIO, file.getAbsolutePath(), validationStringency, factory);
+        this(useAsynchronousIO ? new AsyncBlockCompressedInputStream(file, inflaterFactory) : new BlockCompressedInputStream(file, inflaterFactory),
+                indexFile!=null ? indexFile : SamFiles.findIndex(file), eagerDecode, useAsynchronousIO, file.getAbsolutePath(), validationStringency, samRecordFactory);
         if (mIndexFile != null && mIndexFile.lastModified() < file.lastModified()) {
             System.err.println("WARNING: BAM index file " + mIndexFile.getAbsolutePath() +
                     " is older than BAM " + file.getAbsolutePath());
@@ -138,62 +183,148 @@ class BAMFileReader extends SamReader.ReaderImplementation {
         mStream.setInputFileName(file.getAbsolutePath());
     }
 
+    /**
+     * Prepare to read BAM from a stream (seekable)
+     * @param strm source of bytes
+     * @param indexFile BAM index file
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @throws IOException
+     */
+    BAMFileReader(final SeekableStream strm,
+                  final File indexFile,
+                  final boolean eagerDecode,
+                  final boolean useAsynchronousIO,
+                  final ValidationStringency validationStringency,
+                  final SAMRecordFactory samRecordFactory)
+        throws IOException {
+        this(strm, indexFile, eagerDecode, useAsynchronousIO, validationStringency, samRecordFactory, BlockGunzipper.getDefaultInflaterFactory());
+    }
+
+    /**
+     * Prepare to read BAM from a stream (seekable)
+     * @param strm source of bytes
+     * @param indexFile BAM index file
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @param inflaterFactory InflaterFactory used by BlockCompressedInputStream
+     * @throws IOException
+     */
     BAMFileReader(final SeekableStream strm,
                   final File indexFile,
                   final boolean eagerDecode,
                   final boolean useAsynchronousIO,
                   final ValidationStringency validationStringency,
-                  final SAMRecordFactory factory)
+                  final SAMRecordFactory samRecordFactory,
+                  final InflaterFactory inflaterFactory)
         throws IOException {
-        this(new BlockCompressedInputStream(strm), indexFile, eagerDecode, useAsynchronousIO, strm.getSource(), validationStringency, factory);
+        this(useAsynchronousIO ? new AsyncBlockCompressedInputStream(strm, inflaterFactory) : new BlockCompressedInputStream(strm, inflaterFactory),
+                indexFile, eagerDecode, useAsynchronousIO, strm.getSource(), validationStringency, samRecordFactory);
     }
 
+    /**
+     * Prepare to read BAM from a stream (seekable)
+     * @param strm source of bytes
+     * @param indexStream BAM index stream
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @throws IOException
+     */
     BAMFileReader(final SeekableStream strm,
                   final SeekableStream indexStream,
                   final boolean eagerDecode,
                   final boolean useAsynchronousIO,
                   final ValidationStringency validationStringency,
-                  final SAMRecordFactory factory)
+                  final SAMRecordFactory samRecordFactory)
         throws IOException {
-        this(new BlockCompressedInputStream(strm), indexStream, eagerDecode, useAsynchronousIO, strm.getSource(), validationStringency, factory);
+        this(strm, indexStream, eagerDecode, useAsynchronousIO, validationStringency, samRecordFactory, BlockGunzipper.getDefaultInflaterFactory());
     }
 
+    /**
+     * Prepare to read BAM from a stream (seekable)
+     * @param strm source of bytes
+     * @param indexStream BAM index stream
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @param inflaterFactory InflaterFactory used by BlockCompressedInputStream
+     * @throws IOException
+     */
+    BAMFileReader(final SeekableStream strm,
+                  final SeekableStream indexStream,
+                  final boolean eagerDecode,
+                  final boolean useAsynchronousIO,
+                  final ValidationStringency validationStringency,
+                  final SAMRecordFactory samRecordFactory,
+                  final InflaterFactory inflaterFactory)
+        throws IOException {
+        this(useAsynchronousIO ? new AsyncBlockCompressedInputStream(strm, inflaterFactory) : new BlockCompressedInputStream(strm, inflaterFactory),
+                indexStream, eagerDecode, useAsynchronousIO, strm.getSource(), validationStringency, samRecordFactory);
+    }
+
+    /**
+     * Prepare to read BAM from a compressed stream (seekable)
+     * @param compressedInputStream source of bytes
+     * @param indexFile BAM index file
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
+     * @param source string used when reporting errors
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @throws IOException
+     */
     private BAMFileReader(final BlockCompressedInputStream compressedInputStream,
                           final File indexFile,
                           final boolean eagerDecode,
                           final boolean useAsynchronousIO,
                           final String source,
                           final ValidationStringency validationStringency,
-                          final SAMRecordFactory factory)
+                          final SAMRecordFactory samRecordFactory)
         throws IOException {
         mIndexFile = indexFile;
         mIsSeekable = true;
         mCompressedInputStream = compressedInputStream;
         mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
         this.eagerDecode = eagerDecode;
-        this.useAsynchronousIO = useAsynchronousIO;
         this.mValidationStringency = validationStringency;
-        this.samRecordFactory = factory;
+        this.samRecordFactory = samRecordFactory;
         this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, source);
         mFirstRecordPointer = mCompressedInputStream.getFilePointer();
-    }    
+    }
 
+    /**
+     * Prepare to read BAM from a compressed stream (seekable)
+     * @param compressedInputStream source of bytes
+     * @param indexStream BAM index stream
+     * @param eagerDecode if true, decode all BAM fields as reading rather than lazily.
+     * @param useAsynchronousIO if true, use asynchronous I/O
+     * @param source string used when reporting errors
+     * @param validationStringency Controls how to handle invalidate reads or header lines.
+     * @param samRecordFactory SAM record factory
+     * @throws IOException
+     */
     private BAMFileReader(final BlockCompressedInputStream compressedInputStream,
                           final SeekableStream indexStream,
                           final boolean eagerDecode,
                           final boolean useAsynchronousIO,
                           final String source,
                           final ValidationStringency validationStringency,
-                          final SAMRecordFactory factory)
+                          final SAMRecordFactory samRecordFactory)
         throws IOException {
         mIndexStream = indexStream;
         mIsSeekable = true;
         mCompressedInputStream = compressedInputStream;
         mStream = new BinaryCodec(new DataInputStream(mCompressedInputStream));
         this.eagerDecode = eagerDecode;
-        this.useAsynchronousIO = useAsynchronousIO;
         this.mValidationStringency = validationStringency;
-        this.samRecordFactory = factory;
+        this.samRecordFactory = samRecordFactory;
         this.mFileHeader = readHeader(this.mStream, this.mValidationStringency, source);
         mFirstRecordPointer = mCompressedInputStream.getFilePointer();
     }
@@ -210,6 +341,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
      * If true, writes the source of every read into the source SAMRecords.
      * @param enabled true to write source information into each SAMRecord.
      */
+    @Override
     void enableFileSource(final SamReader reader, final boolean enabled) {
         this.mReader = enabled ? reader : null;
     }
@@ -218,6 +350,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
      * If true, uses the caching version of the index reader.
      * @param enabled true to use the caching version of the reader.
      */
+    @Override
     protected void enableIndexCaching(final boolean enabled) {
         if(mIndex != null)
             throw new SAMException("Unable to turn on index caching; index file has already been loaded.");
@@ -229,6 +362,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
      * This is slower but more scalable when accessing large numbers of BAM files sequentially.
      * @param enabled True to use memory mapping, false to use regular I/O.
      */
+    @Override
     protected void enableIndexMemoryMapping(final boolean enabled) {
         if (mIndex != null) {
             throw new SAMException("Unable to change index memory mapping; index file has already been loaded.");
@@ -240,7 +374,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
         this.mCompressedInputStream.setCheckCrcs(enabled);
     }
 
-    @Override void setSAMRecordFactory(final SAMRecordFactory factory) { this.samRecordFactory = factory; }
+    @Override void setSAMRecordFactory(final SAMRecordFactory samRecordFactory) { this.samRecordFactory = samRecordFactory; }
 
     @Override
     public SamReader.Type type() {
@@ -250,6 +384,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
     /**
      * @return true if ths is a BAM file, and has an index
      */
+    @Override
     public boolean hasIndex() {
         return mIsSeekable && ((mIndexFile != null) || (mIndexStream != null));
     }
@@ -258,6 +393,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
      * Retrieves the index for the given file type.  Ensure that the index is of the specified type.
      * @return An index of the given type.
      */
+    @Override
     public BAMIndex getIndex() {
         if(!hasIndex())
             throw new SAMException("No index is available for this BAM file.");
@@ -294,6 +430,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
         mIndex = null;
     }
 
+    @Override
     public SAMFileHeader getFileHeader() {
         return mFileHeader;
     }
@@ -301,10 +438,12 @@ class BAMFileReader extends SamReader.ReaderImplementation {
     /**
      * Set error-checking level for subsequent SAMRecord reads.
      */
+    @Override
     void setValidationStringency(final ValidationStringency validationStringency) {
         this.mValidationStringency = validationStringency;
     }
 
+    @Override
     public ValidationStringency getValidationStringency() {
         return this.mValidationStringency;
     }
@@ -317,6 +456,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
      * getIterator() begins its iteration where the last one left off.  That is the best that can be
      * done in that situation.
      */
+    @Override
     public CloseableIterator<SAMRecord> getIterator() {
         if (mStream == null) {
             throw new IllegalStateException("File reader is closed");
@@ -421,6 +561,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
      * @return Iterator for the matching SAMRecords
      * @see QueryInterval#optimizeIntervals(QueryInterval[])
      */
+    @Override
     public CloseableIterator<SAMRecord> query(final QueryInterval[] intervals, final boolean contained) {
         if (mStream == null) {
             throw new IllegalStateException("File reader is closed");
@@ -451,6 +592,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
      * @param start Alignment start sought.
      * @return Iterator for the matching SAMRecords.
      */
+    @Override
     public CloseableIterator<SAMRecord> queryAlignmentStart(final String sequence, final int start) {
         if (mStream == null) {
             throw new IllegalStateException("File reader is closed");
@@ -477,6 +619,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
      *
      * @return Iterator for the matching SAMRecords.
      */
+    @Override
     public CloseableIterator<SAMRecord> queryUnmapped() {
         if (mStream == null) {
             throw new IllegalStateException("File reader is closed");
@@ -521,7 +664,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
         final String textHeader = stream.readString(headerTextLength);
         final SAMTextHeaderCodec headerCodec = new SAMTextHeaderCodec();
         headerCodec.setValidationStringency(validationStringency);
-        final SAMFileHeader samFileHeader = headerCodec.decode(new StringLineReader(textHeader),
+        final SAMFileHeader samFileHeader = headerCodec.decode(BufferedLineReader.fromString(textHeader),
                 source);
 
         final int sequenceCount = stream.readInt();
@@ -579,6 +722,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
 
         private boolean isClosed = false;
 
+        @Override
         public void close() {
             if (!isClosed) {
                 if (mCurrentIterator != null && this != mCurrentIterator) {
@@ -593,6 +737,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
             if (isClosed) throw new AssertionError("Iterator has been closed");
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException("Not supported: remove");
         }
@@ -639,11 +784,13 @@ class BAMFileReader extends SamReader.ReaderImplementation {
             }
         }
 
+        @Override
         public boolean hasNext() {
             assertOpen();
             return (mNextRecord != null);
         }
 
+        @Override
         public SAMRecord next() {
             assertOpen();
             final SAMRecord result = mNextRecord;
@@ -732,31 +879,62 @@ class BAMFileReader extends SamReader.ReaderImplementation {
             if (prev.overlaps(thisInterval)) {
                 throw new IllegalArgumentException(String.format("List of intervals is not optimized: %s intersects %s", prev, thisInterval));
             }
-            if (prev.abuts(thisInterval)) {
+            if (prev.endsAtStartOf(thisInterval)) {
                 throw new IllegalArgumentException(String.format("List of intervals is not optimized: %s abuts %s", prev, thisInterval));
             }
         }
     }
 
-    private CloseableIterator<SAMRecord> createIndexIterator(final QueryInterval[] intervals,
-                                                             final boolean contained) {
-
-        assertIntervalsOptimized(intervals);
-
-        // Hit the index to determine the chunk boundaries for the required data.
+    /**
+     * Use the index to determine the chunk boundaries for the required intervals.
+     * @param intervals the intervals to restrict reads to
+     * @param fileIndex the BAM index to use
+     * @return file pointer pairs corresponding to chunk boundaries
+     */
+    public static BAMFileSpan getFileSpan(QueryInterval[] intervals, BAMIndex fileIndex) {
         final BAMFileSpan[] inputSpans = new BAMFileSpan[intervals.length];
-        final BAMIndex fileIndex = getIndex();
         for (int i = 0; i < intervals.length; ++i) {
             final QueryInterval interval = intervals[i];
             final BAMFileSpan span = fileIndex.getSpanOverlapping(interval.referenceIndex, interval.start, interval.end);
             inputSpans[i] = span;
         }
-        final long[] filePointers;
+        final BAMFileSpan span;
         if (inputSpans.length > 0) {
-            filePointers = BAMFileSpan.merge(inputSpans).toCoordinateArray();
+            span = BAMFileSpan.merge(inputSpans);
         } else {
-            filePointers = null;
+            span = null;
         }
+        return span;
+    }
+
+    private CloseableIterator<SAMRecord> createIndexIterator(final QueryInterval[] intervals,
+                                                             final boolean contained) {
+
+        assertIntervalsOptimized(intervals);
+
+        BAMFileSpan span = getFileSpan(intervals, getIndex());
+
+        // Create an iterator over the above chunk boundaries.
+        final BAMFileIndexIterator iterator = new BAMFileIndexIterator(span == null ? null : span.toCoordinateArray());
+
+        // Add some preprocessing filters for edge-case reads that don't fit into this
+        // query type.
+        return new BAMQueryFilteringIterator(iterator, new BAMQueryMultipleIntervalsIteratorFilter(intervals, contained));
+    }
+
+    /**
+     * Prepare to iterate through SAMRecords that match the given intervals.
+     * @param intervals the intervals to restrict reads to
+     * @param contained if <code>true</code>, return records that are strictly
+     *                  contained in the intervals, otherwise return records that overlap
+     * @param filePointers file pointer pairs corresponding to chunk boundaries for the
+     *                     intervals
+     */
+    public CloseableIterator<SAMRecord> createIndexIterator(final QueryInterval[] intervals,
+                                                            final boolean contained,
+                                                            final long[] filePointers) {
+
+        assertIntervalsOptimized(intervals);
 
         // Create an iterator over the above chunk boundaries.
         final BAMFileIndexIterator iterator = new BAMFileIndexIterator(filePointers);
@@ -785,6 +963,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
             advance();
         }
 
+        @Override
         SAMRecord getNextRecord()
             throws IOException {
             // Advance to next file block if necessary
@@ -827,6 +1006,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
         /**
          * Returns true if a next element exists; false otherwise.
          */
+        @Override
         public boolean hasNext() {
             assertOpen();
             return mNextRecord != null;
@@ -836,6 +1016,7 @@ class BAMFileReader extends SamReader.ReaderImplementation {
          * Gets the next record from the given iterator.
          * @return The next SAM record in the iterator.
          */
+        @Override
         public SAMRecord next() {
             if(!hasNext())
                 throw new NoSuchElementException("BAMQueryFilteringIterator: no next element available");
diff --git a/src/main/java/htsjdk/samtools/BAMFileSpan.java b/src/main/java/htsjdk/samtools/BAMFileSpan.java
index 193e443..d99760d 100644
--- a/src/main/java/htsjdk/samtools/BAMFileSpan.java
+++ b/src/main/java/htsjdk/samtools/BAMFileSpan.java
@@ -78,6 +78,7 @@ public class BAMFileSpan implements SAMFileSpan, Serializable {
      * Does this chunk list map to any position within the BAM file?
      * @return True iff the ChunkList points to any data within the BAM.
      */
+    @Override
     public boolean isEmpty() {
         return chunks.isEmpty();
     }
@@ -86,6 +87,7 @@ public class BAMFileSpan implements SAMFileSpan, Serializable {
      * Deep clone the given chunk list.
      * @return A copy of the chunk list.
      */
+    @Override
     public BAMFileSpan clone() {
         final BAMFileSpan clone = new BAMFileSpan();
         for(final Chunk chunk: chunks)
@@ -100,6 +102,7 @@ public class BAMFileSpan implements SAMFileSpan, Serializable {
      * @param fileSpan The filespan before which to eliminate.
      * @return A new BAMFileSpan which contains the portion of the chunk list after the given chunk.
      */
+    @Override
     public SAMFileSpan removeContentsBefore(final SAMFileSpan fileSpan) {
         if(fileSpan == null)
             return clone();
@@ -115,15 +118,55 @@ public class BAMFileSpan implements SAMFileSpan, Serializable {
         validateSorted();
 
         final BAMFileSpan trimmedChunkList = new BAMFileSpan();
+        final long chunkStart = bamFileSpan.chunks.get(0).getChunkStart();
         for(final Chunk chunkToTrim: chunks) {
-            if(chunkToTrim.getChunkEnd() > chunkToTrim.getChunkStart()) {
-                if(chunkToTrim.getChunkStart() >= bamFileSpan.chunks.get(0).getChunkStart()) {
+            if(chunkToTrim.getChunkEnd() > chunkStart) {
+                if(chunkToTrim.getChunkStart() >= chunkStart) {
                     // This chunk from the list is completely beyond the start of the filtering chunk.
                     trimmedChunkList.add(chunkToTrim.clone());
                 }
                 else {
                     // This chunk from the list partially overlaps the filtering chunk and must be trimmed.
-                    trimmedChunkList.add(new Chunk(bamFileSpan.chunks.get(0).getChunkStart(),chunkToTrim.getChunkEnd()));
+                    trimmedChunkList.add(new Chunk(chunkStart,chunkToTrim.getChunkEnd()));
+                }
+            }
+        }
+        return trimmedChunkList;
+    }
+
+    /**
+     * Creates a new file span by removing all chunks after the given file span ends.
+     * If a chunk in the chunk list starts before and ends after the given
+     * chunk, the second portion of the chunk will be deleted.
+     * @param fileSpan The filespan after which to eliminate.
+     * @return A new BAMFileSpan which contains the portion of the chunk list before the
+     * given chunk.
+     */
+    public SAMFileSpan removeContentsAfter(final SAMFileSpan fileSpan) {
+        if(fileSpan == null)
+            return clone();
+
+        if(!(fileSpan instanceof BAMFileSpan))
+            throw new SAMException("Unable to compare ");
+
+        final BAMFileSpan bamFileSpan = (BAMFileSpan)fileSpan;
+
+        if(bamFileSpan.isEmpty())
+            return clone();
+
+        validateSorted();
+
+        final BAMFileSpan trimmedChunkList = new BAMFileSpan();
+        final long chunkEnd = bamFileSpan.chunks.get(bamFileSpan.chunks.size() - 1).getChunkEnd();
+        for(final Chunk chunkToTrim: chunks) {
+            if(chunkToTrim.getChunkStart() < chunkEnd) {
+                if(chunkToTrim.getChunkEnd() <= chunkEnd) {
+                    // This chunk from the list is completely before the end of the filtering chunk.
+                    trimmedChunkList.add(chunkToTrim.clone());
+                }
+                else {
+                    // This chunk from the list partially overlaps the filtering chunk and must be trimmed.
+                    trimmedChunkList.add(new Chunk(chunkToTrim.getChunkStart(),chunkEnd));
                 }
             }
         }
@@ -134,6 +177,7 @@ public class BAMFileSpan implements SAMFileSpan, Serializable {
      * Gets a file span over the data immediately following this span.
      * @return The a pointer to data immediately following this span.
      */
+    @Override
     public SAMFileSpan getContentsFollowing() {
         if(chunks.isEmpty())
             throw new SAMException("Unable to get the file pointer following this one: no data present.");
diff --git a/src/main/java/htsjdk/samtools/BAMFileWriter.java b/src/main/java/htsjdk/samtools/BAMFileWriter.java
index f6a474e..db8daf3 100644
--- a/src/main/java/htsjdk/samtools/BAMFileWriter.java
+++ b/src/main/java/htsjdk/samtools/BAMFileWriter.java
@@ -25,6 +25,7 @@ package htsjdk.samtools;
 
 import htsjdk.samtools.util.BinaryCodec;
 import htsjdk.samtools.util.BlockCompressedOutputStream;
+import htsjdk.samtools.util.IOUtil;
 import htsjdk.samtools.util.RuntimeIOException;
 import htsjdk.samtools.util.zip.DeflaterFactory;
 
@@ -34,6 +35,9 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.io.StringWriter;
 import java.io.Writer;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 
 /**
  * Concrete implementation of SAMFileWriter for writing gzipped BAM files.
@@ -75,7 +79,13 @@ class BAMFileWriter extends SAMFileWriterImpl {
         outputBinaryCodec.setOutputFileName(getPathString(file));
     }
 
-    private void prepareToWriteAlignments() {
+    protected BAMFileWriter(final OutputStream os, final String absoluteFilename, final int compressionLevel, final DeflaterFactory deflaterFactory) {
+      blockCompressedOutputStream = new BlockCompressedOutputStream(os, null, compressionLevel, deflaterFactory);
+      outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
+      outputBinaryCodec.setOutputFileName(absoluteFilename);
+    }
+
+  private void prepareToWriteAlignments() {
         if (bamRecordCodec == null) {
             bamRecordCodec = new BAMRecordCodec(getFileHeader());
             bamRecordCodec.setOutputStream(outputBinaryCodec.getOutputStream(), getFilename());
@@ -99,22 +109,23 @@ class BAMFileWriter extends SAMFileWriterImpl {
         bamIndexer = createBamIndex(getFilename());
     }
 
-    private BAMIndexer createBamIndex(final String path) {
+    private BAMIndexer createBamIndex(final String pathURI) {
         try {
-            final String indexFileBase = path.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION) ?
-                    path.substring(0, path.lastIndexOf('.')) : path;
-            final File indexFile = new File(indexFileBase + BAMIndex.BAMIndexSuffix);
-            if (indexFile.exists()) {
-                if (!indexFile.canWrite()) {
-                    throw new SAMException("Not creating BAM index since unable to write index file " + indexFile);
+            final String indexFileBase = pathURI.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION) ?
+                    pathURI.substring(0, pathURI.lastIndexOf('.')) : pathURI;
+            final Path indexPath = IOUtil.getPath(indexFileBase + BAMIndex.BAMIndexSuffix);
+            if (Files.exists(indexPath)) {
+                if (!Files.isWritable(indexPath)) {
+                    throw new SAMException("Not creating BAM index since unable to write index file " + indexPath.toUri());
                 }
             }
-            return new BAMIndexer(indexFile, getFileHeader());
+            return new BAMIndexer(indexPath, getFileHeader());
         } catch (Exception e) {
             throw new SAMException("Not creating BAM index", e);
         }
     }
 
+    @Override
     protected void writeAlignment(final SAMRecord alignment) {
         prepareToWriteAlignments();
 
@@ -135,10 +146,12 @@ class BAMFileWriter extends SAMFileWriterImpl {
         }
     }
 
+    @Override
     protected void writeHeader(final String textHeader) {
         writeHeader(outputBinaryCodec, getFileHeader(), textHeader);
     }
 
+    @Override
     protected void finish() {
         outputBinaryCodec.close();
             try {
@@ -150,7 +163,9 @@ class BAMFileWriter extends SAMFileWriterImpl {
             }
     }
 
-    /** @return absolute path, or null if this writer does not correspond to a file.  */
+    /** @return absolute path in URI format, or null if this writer does not correspond to a file.
+     * To get a Path from this, use: IOUtil.getPath(getFilename()) */
+    @Override
     protected String getFilename() {
         return outputBinaryCodec.getOutputFileName();
     }
diff --git a/src/main/java/htsjdk/samtools/BAMIndex.java b/src/main/java/htsjdk/samtools/BAMIndex.java
index 3663df9..62c69c7 100644
--- a/src/main/java/htsjdk/samtools/BAMIndex.java
+++ b/src/main/java/htsjdk/samtools/BAMIndex.java
@@ -63,5 +63,6 @@ public interface BAMIndex extends Closeable {
     /**
      * Close the index and release any associated resources.
      */
+    @Override
     void close();
 }
diff --git a/src/main/java/htsjdk/samtools/BAMIndexWriter.java b/src/main/java/htsjdk/samtools/BAMIndexWriter.java
index b036b68..aafcb5f 100644
--- a/src/main/java/htsjdk/samtools/BAMIndexWriter.java
+++ b/src/main/java/htsjdk/samtools/BAMIndexWriter.java
@@ -49,6 +49,7 @@ interface BAMIndexWriter extends Closeable {  // note - only package visibility
     /**
      * Any necessary processing at the end of the file
      */
+    @Override
     public void close();
 
 }
\ No newline at end of file
diff --git a/src/main/java/htsjdk/samtools/BAMIndexer.java b/src/main/java/htsjdk/samtools/BAMIndexer.java
index f5b1558..4dfe39d 100644
--- a/src/main/java/htsjdk/samtools/BAMIndexer.java
+++ b/src/main/java/htsjdk/samtools/BAMIndexer.java
@@ -27,6 +27,7 @@ import htsjdk.samtools.util.Log;
 
 import java.io.File;
 import java.io.OutputStream;
+import java.nio.file.Path;
 import java.util.function.Function;
 
 /**
@@ -56,11 +57,19 @@ public class BAMIndexer {
      * @param output     binary BAM Index (.bai) file
      * @param fileHeader header for the corresponding bam file
      */
-    public BAMIndexer(final File output, final SAMFileHeader fileHeader) {
+    public BAMIndexer(final Path output, final SAMFileHeader fileHeader) {
         this(fileHeader, numRefs -> new BinaryBAMIndexWriter(numRefs, output));
     }
 
     /**
+     * @param output     binary BAM Index (.bai) file
+     * @param fileHeader header for the corresponding bam file
+     */
+    public BAMIndexer(final File output, final SAMFileHeader fileHeader) {
+        this(output.toPath(), fileHeader);
+    }
+
+    /**
      * Prepare to index a BAM.
      *
      * @param output     Index will be written here.  output will be closed when finish() method is called.
@@ -283,9 +292,9 @@ public class BAMIndexer {
      * Generates a BAM index file from an input BAM file
      *
      * @param reader SamReader for input BAM file
-     * @param output File for output index file
+     * @param output Path for output index file
      */
-    public static void createIndex(SamReader reader, File output) {
+    public static void createIndex(SamReader reader, Path output) {
         createIndex(reader, output, null);
     }
 
@@ -295,7 +304,17 @@ public class BAMIndexer {
      * @param reader SamReader for input BAM file
      * @param output File for output index file
      */
-    public static void createIndex(SamReader reader, File output, Log log) {
+    public static void createIndex(SamReader reader, File output) {
+        createIndex(reader, output.toPath(), null);
+    }
+
+    /**
+     * Generates a BAM index file from an input BAM file
+     *
+     * @param reader SamReader for input BAM file
+     * @param output Path for output index file
+     */
+    public static void createIndex(SamReader reader, Path output, Log log) {
 
         BAMIndexer indexer = new BAMIndexer(output, reader.getFileHeader());
 
@@ -310,4 +329,14 @@ public class BAMIndexer {
         }
         indexer.finish();
     }
+
+    /**
+     * Generates a BAM index file from an input BAM file
+     *
+     * @param reader SamReader for input BAM file
+     * @param output File for output index file
+     */
+    public static void createIndex(SamReader reader, File output, Log log) {
+        createIndex(reader, output.toPath(), log);
+    }
 }
diff --git a/src/main/java/htsjdk/samtools/BAMRecord.java b/src/main/java/htsjdk/samtools/BAMRecord.java
index c45566f..14b6295 100644
--- a/src/main/java/htsjdk/samtools/BAMRecord.java
+++ b/src/main/java/htsjdk/samtools/BAMRecord.java
@@ -113,6 +113,7 @@ public class BAMRecord extends SAMRecord {
     /**
      * Force all the lazily-initialized attributes to be decoded.
      */
+    @Override
     protected void eagerDecode() {
         getReadName();
         getCigar();
@@ -341,7 +342,12 @@ public class BAMRecord extends SAMRecord {
             return NULL_SEQUENCE;
         }
         final int basesOffset = readNameSize() + cigarSize();
-        return SAMUtils.compressedBasesToBytes(mReadLength, mRestOfBinaryData, basesOffset);
+        try {
+            return SAMUtils.compressedBasesToBytes(mReadLength, mRestOfBinaryData, basesOffset);
+        } catch ( final IllegalArgumentException ex ) {
+            final String msg = ex.getMessage() + " in read: " + getReadName();
+            throw new IllegalStateException(msg, ex);
+        }
     }
 
     /* methods for computing disk size of variably-sized elements, in order to locate
diff --git a/src/main/java/htsjdk/samtools/BAMRecordCodec.java b/src/main/java/htsjdk/samtools/BAMRecordCodec.java
index dc1ca81..e363a5b 100644
--- a/src/main/java/htsjdk/samtools/BAMRecordCodec.java
+++ b/src/main/java/htsjdk/samtools/BAMRecordCodec.java
@@ -49,6 +49,7 @@ public class BAMRecordCodec implements SortingCollection.Codec<SAMRecord> {
         this.samRecordFactory = factory;
     }
 
+    @Override
     public BAMRecordCodec clone() {
         // Do not clone the references to codecs, as they must be distinct for each instance.
         return new BAMRecordCodec(this.header, this.samRecordFactory);
@@ -56,6 +57,7 @@ public class BAMRecordCodec implements SortingCollection.Codec<SAMRecord> {
 
 
     /** Sets the output stream that records will be written to. */
+    @Override
     public void setOutputStream(final OutputStream os) {
         this.binaryCodec.setOutputStream(os);
     }
@@ -67,6 +69,7 @@ public class BAMRecordCodec implements SortingCollection.Codec<SAMRecord> {
     }
 
     /** Sets the input stream that records will be read from. */
+    @Override
     public void setInputStream(final InputStream is) {
         this.binaryCodec.setInputStream(is);
     }
@@ -85,6 +88,7 @@ public class BAMRecordCodec implements SortingCollection.Codec<SAMRecord> {
      *
      * @param alignment Record to be written.
      */
+    @Override
     public void encode(final SAMRecord alignment) {
         // Compute block size, as it is the first element of the file representation of SAMRecord
         final int readLength = alignment.getReadLength();
@@ -150,7 +154,12 @@ public class BAMRecordCodec implements SortingCollection.Codec<SAMRecord> {
                 // that it is specced as a uint.
                 this.binaryCodec.writeInt(cigarElement);
             }
-            this.binaryCodec.writeBytes(SAMUtils.bytesToCompressedBases(alignment.getReadBases()));
+            try {
+                this.binaryCodec.writeBytes(SAMUtils.bytesToCompressedBases(alignment.getReadBases()));
+            } catch ( final IllegalArgumentException ex ) {
+                final String msg = ex.getMessage() + " in read: " +  alignment.getReadName();
+                throw new IllegalStateException(msg, ex);
+            }
             byte[] qualities = alignment.getBaseQualities();
             if (qualities.length == 0) {
                 qualities = new byte[alignment.getReadLength()];
@@ -171,6 +180,7 @@ public class BAMRecordCodec implements SortingCollection.Codec<SAMRecord> {
      * @return null if no more records.  Should throw exception if EOF is encountered in the middle of
      *         a record.
      */
+    @Override
     public SAMRecord decode() {
         int recordLength = 0;
         try {
diff --git a/src/main/java/htsjdk/samtools/Bin.java b/src/main/java/htsjdk/samtools/Bin.java
index 1ac5724..f199d0a 100644
--- a/src/main/java/htsjdk/samtools/Bin.java
+++ b/src/main/java/htsjdk/samtools/Bin.java
@@ -105,6 +105,7 @@ public class Bin implements Comparable<Bin> {
      * @param other Other bin to which this bin should be compared.
      * @return -1 if this < other, 0 if this == other, 1 if this > other.
      */
+    @Override
     public int compareTo(final Bin other) {
         if(other == null)
             throw new ClassCastException("Cannot compare to a null object");
diff --git a/src/main/java/htsjdk/samtools/BinList.java b/src/main/java/htsjdk/samtools/BinList.java
index e7107d4..2111ba4 100644
--- a/src/main/java/htsjdk/samtools/BinList.java
+++ b/src/main/java/htsjdk/samtools/BinList.java
@@ -60,6 +60,7 @@ public class BinList implements Iterable<Bin> {
      * Gets an iterator over all selected bins.
      * @return An iterator over all selected bins.
      */
+    @Override
     public Iterator<Bin> iterator() {
         return new BinIterator();
     }
@@ -95,6 +96,7 @@ public class BinList implements Iterable<Bin> {
          * Are there more bins in this set, waiting to be returned?
          * @return True if more bins are remaining.
          */
+        @Override
         public boolean hasNext() {
             return nextBin >= 0;
         }
@@ -103,6 +105,7 @@ public class BinList implements Iterable<Bin> {
          * Gets the next bin in the provided BinList.
          * @return the next available bin in the BinList.
          */
+        @Override
         public Bin next() {
             if(!hasNext())
                 throw new NoSuchElementException("This BinIterator is currently empty");
@@ -111,6 +114,7 @@ public class BinList implements Iterable<Bin> {
             return new Bin(referenceSequence,currentBin);
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException("Unable to remove from a bin iterator");
         }
diff --git a/src/main/java/htsjdk/samtools/BinaryBAMIndexWriter.java b/src/main/java/htsjdk/samtools/BinaryBAMIndexWriter.java
index 35a22f7..5a498c3 100644
--- a/src/main/java/htsjdk/samtools/BinaryBAMIndexWriter.java
+++ b/src/main/java/htsjdk/samtools/BinaryBAMIndexWriter.java
@@ -29,6 +29,7 @@ import htsjdk.samtools.util.BinaryCodec;
 import java.io.File;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.file.Path;
 import java.util.List;
 
 /**
@@ -41,12 +42,20 @@ class BinaryBAMIndexWriter implements BAMIndexWriter {
     private int count = 0;
 
     /**
-     * constructor
      *
      * @param nRef    Number of reference sequences
      * @param output  BAM Index output file
      */
     public BinaryBAMIndexWriter(final int nRef, final File output) {
+        this(nRef, null == output ? null : output.toPath());
+    }
+
+    /**
+     *
+     * @param nRef    Number of reference sequences
+     * @param output  BAM Index output file
+     */
+    public BinaryBAMIndexWriter(final int nRef, final Path output) {
 
         this.nRef = nRef;
 
@@ -78,6 +87,7 @@ class BinaryBAMIndexWriter implements BAMIndexWriter {
     /**
      * Write this content as binary output
      */
+    @Override
     public void writeReference(final BAMIndexContent content) {
 
         if (content == null) {
@@ -147,6 +157,7 @@ class BinaryBAMIndexWriter implements BAMIndexWriter {
      *
      * @param count
      */
+    @Override
     public void writeNoCoordinateRecordCount(final Long count) {
         codec.writeLong(count == null ? 0 : count);
     }
@@ -154,6 +165,7 @@ class BinaryBAMIndexWriter implements BAMIndexWriter {
     /**
      * Any necessary processing at the end of the file
      */
+    @Override
     public void close() {
         codec.close();
     }
diff --git a/src/main/java/htsjdk/samtools/BinningIndexBuilder.java b/src/main/java/htsjdk/samtools/BinningIndexBuilder.java
index 37933f4..4f988f4 100644
--- a/src/main/java/htsjdk/samtools/BinningIndexBuilder.java
+++ b/src/main/java/htsjdk/samtools/BinningIndexBuilder.java
@@ -25,6 +25,7 @@ package htsjdk.samtools;
 
 import htsjdk.samtools.util.BlockCompressedFilePointerUtil;
 
+import java.util.Arrays;
 import java.util.List;
 
 import static htsjdk.samtools.GenomicIndexUtil.MAX_BINS;
@@ -33,6 +34,7 @@ import static htsjdk.samtools.GenomicIndexUtil.MAX_BINS;
  * Builder for a BinningIndexContent object.
  */
 public class BinningIndexBuilder {
+    private static final int UNINITIALIZED_WINDOW = -1;
     private final int referenceSequence;
     // the bins for the current reference
     private final Bin[] bins; // made only as big as needed for each reference
@@ -50,6 +52,10 @@ public class BinningIndexBuilder {
      */
     public BinningIndexBuilder(final int referenceSequence, final int sequenceLength) {
         this.referenceSequence = referenceSequence;
+        // Initially set each window to -1 so we can distinguish between windows that have no overlapping
+        // features, and those whose lowest offset is 0, which is a valid (virtual file) offset for feature
+        // formats that don't require a header.
+        Arrays.fill(index, UNINITIALIZED_WINDOW);
         final int numBins;
         if (sequenceLength <= 0) numBins = MAX_BINS + 1;
         else numBins = AbstractBAMFileIndex.getMaxBinNumberForSequenceLength(sequenceLength) + 1;
@@ -131,9 +137,14 @@ public class BinningIndexBuilder {
             largestIndexSeen = endWindow;
         }
 
-        // set linear index at every 16K window that this feature overlaps
+        // Set the linear index at every 16K window that this feature overlaps, but only if this chunk
+        // start is strictly earlier than any previously seen chunk start for the window, or if the window
+        // is uninitialized (this is the first feature overlapping this window).
         for (int win = startWindow; win <= endWindow; win++) {
-            if (index[win] == 0 || chunkStart < index[win]) {
+            // Initially each window is set to UNINITIALIZED_WINDOW (-1) so that we can distinguish here between
+            // windows that have no overlapping features, and those whose lowest feature offset is legitimately 0,
+            // which is a valid (virtual file) offset for feature formats that don't require a header.
+            if (index[win] == UNINITIALIZED_WINDOW || chunkStart < index[win]) {
                 index[win] = chunkStart;
             }
         }
@@ -159,7 +170,7 @@ public class BinningIndexBuilder {
         // C (samtools index) also fills in intermediate 0's with values.  This seems unnecessary, but safe
         long lastNonZeroOffset = 0;
         for (int i = 0; i <= largestIndexSeen; i++) {
-            if (index[i] == 0) {
+            if (index[i] == UNINITIALIZED_WINDOW) {
                 index[i] = lastNonZeroOffset; // not necessary, but C (samtools index) does this
                 // note, if you remove the above line BAMIndexWriterTest.compareTextual and compareBinary will have to change
             } else {
diff --git a/src/main/java/htsjdk/samtools/BinningIndexContent.java b/src/main/java/htsjdk/samtools/BinningIndexContent.java
index 9e32601..124353e 100644
--- a/src/main/java/htsjdk/samtools/BinningIndexContent.java
+++ b/src/main/java/htsjdk/samtools/BinningIndexContent.java
@@ -171,6 +171,7 @@ public class BinningIndexContent {
         /**
          * @return An iterator over all non-empty bins.
          */
+        @Override
         public Iterator<Bin> iterator() {
             return new BinIterator();
         }
@@ -190,6 +191,7 @@ public class BinningIndexContent {
              *
              * @return True if more bins are remaining.
              */
+            @Override
             public boolean hasNext() {
                 while (nextBin <= maxBinNumber) {
                     if (getBin(nextBin) != null) return true;
@@ -203,6 +205,7 @@ public class BinningIndexContent {
              *
              * @return the next available bin in the BinList.
              */
+            @Override
             public Bin next() {
                 if (!hasNext())
                     throw new NoSuchElementException("This BinIterator is currently empty");
@@ -211,6 +214,7 @@ public class BinningIndexContent {
                 return result;
             }
 
+            @Override
             public void remove() {
                 throw new UnsupportedOperationException("Unable to remove from a bin iterator");
             }
diff --git a/src/main/java/htsjdk/samtools/CRAMContainerStreamWriter.java b/src/main/java/htsjdk/samtools/CRAMContainerStreamWriter.java
index 4707b7b..c588bdb 100644
--- a/src/main/java/htsjdk/samtools/CRAMContainerStreamWriter.java
+++ b/src/main/java/htsjdk/samtools/CRAMContainerStreamWriter.java
@@ -17,6 +17,7 @@ import htsjdk.samtools.cram.structure.CramCompressionRecord;
 import htsjdk.samtools.cram.structure.Slice;
 import htsjdk.samtools.util.Log;
 import htsjdk.samtools.util.RuntimeIOException;
+import htsjdk.samtools.util.SequenceUtil;
 
 import java.io.IOException;
 import java.io.OutputStream;
@@ -437,7 +438,13 @@ public class CRAMContainerStreamWriter {
                     final SAMRecord restoredSamRecord = f.create(cramRecords.get(i));
                     assert (restoredSamRecord.getAlignmentStart() == samRecords.get(i).getAlignmentStart());
                     assert (restoredSamRecord.getReferenceName().equals(samRecords.get(i).getReferenceName()));
-                    assert (restoredSamRecord.getReadString().equals(samRecords.get(i).getReadString()));
+
+                    if (!restoredSamRecord.getReadString().equals(samRecords.get(i).getReadString())) {
+                        // try to fix the original read bases by normalizing them to BAM set:
+                        final byte[] originalReadBases = samRecords.get(i).getReadString().getBytes();
+                        final String originalReadBasesUpperCaseIupacNoDot = new String(SequenceUtil.toBamReadBasesInPlace(originalReadBases));
+                        assert (restoredSamRecord.getReadString().equals(originalReadBasesUpperCaseIupacNoDot));
+                    }
                     assert (restoredSamRecord.getBaseQualityString().equals(samRecords.get(i).getBaseQualityString()));
                 }
             }
diff --git a/src/main/java/htsjdk/samtools/CRAMFileReader.java b/src/main/java/htsjdk/samtools/CRAMFileReader.java
index 9a29d36..a7a4088 100644
--- a/src/main/java/htsjdk/samtools/CRAMFileReader.java
+++ b/src/main/java/htsjdk/samtools/CRAMFileReader.java
@@ -1,5 +1,5 @@
 /*******************************************************************************
- * Copyright 2013 EMBL-EBI
+ * Copyright 2013-2016 EMBL-EBI
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -466,9 +466,8 @@ public class CRAMFileReader extends SamReader.ReaderImplementation implements Sa
             iterator.setFileSource(enabled ? reader : null);
     }
 
-    private class CRAMIntervalIterator
-            extends BAMQueryMultipleIntervalsIteratorFilter
-            implements SAMRecordIterator {
+    private class CRAMIntervalIterator extends BAMQueryMultipleIntervalsIteratorFilter
+            implements CloseableIterator<SAMRecord> {
 
         // the granularity of this iterator is the container, so the records returned
         // by it must still be filtered to find those matching the filter criteria
@@ -507,11 +506,6 @@ public class CRAMFileReader extends SamReader.ReaderImplementation implements Sa
         }
 
         @Override
-        public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
-            return null;
-        }
-
-        @Override
         public void close() {
             if (unfilteredIterator != null) {
                 unfilteredIterator.close();
diff --git a/src/main/java/htsjdk/samtools/CRAMFileWriter.java b/src/main/java/htsjdk/samtools/CRAMFileWriter.java
index 5d3f2e2..2da917f 100644
--- a/src/main/java/htsjdk/samtools/CRAMFileWriter.java
+++ b/src/main/java/htsjdk/samtools/CRAMFileWriter.java
@@ -18,8 +18,8 @@ package htsjdk.samtools;
 import htsjdk.samtools.cram.lossy.PreservationPolicy;
 import htsjdk.samtools.cram.ref.CRAMReferenceSource;
 import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.util.BufferedLineReader;
 import htsjdk.samtools.util.Log;
-import htsjdk.samtools.util.StringLineReader;
 
 import java.io.OutputStream;
 import java.util.List;
@@ -117,7 +117,7 @@ public class CRAMFileWriter extends SAMFileWriterImpl {
     @Override
     protected void writeHeader(final String textHeader) {
         cramContainerStream.writeHeader(
-                new SAMTextHeaderCodec().decode(new StringLineReader(textHeader),fileName != null ? fileName : null));
+                new SAMTextHeaderCodec().decode(BufferedLineReader.fromString(textHeader),fileName != null ? fileName : null));
     }
 
     @Override
diff --git a/src/main/java/htsjdk/samtools/CRAMIterator.java b/src/main/java/htsjdk/samtools/CRAMIterator.java
index f8179e6..33492df 100644
--- a/src/main/java/htsjdk/samtools/CRAMIterator.java
+++ b/src/main/java/htsjdk/samtools/CRAMIterator.java
@@ -1,5 +1,5 @@
 /*******************************************************************************
- * Copyright 2013 EMBL-EBI
+ * Copyright 2013-2016 EMBL-EBI
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -292,7 +292,7 @@ public class CRAMIterator implements SAMRecordIterator {
 
     @Override
     public SAMRecordIterator assertSorted(final SortOrder sortOrder) {
-        throw new RuntimeException("Not implemented.");
+        return SamReader.AssertingIterator.of(this).assertSorted(sortOrder);
     }
 
     public SamReader getFileSource() {
diff --git a/src/main/java/htsjdk/samtools/CachingBAMFileIndex.java b/src/main/java/htsjdk/samtools/CachingBAMFileIndex.java
index 8010ce5..5597832 100644
--- a/src/main/java/htsjdk/samtools/CachingBAMFileIndex.java
+++ b/src/main/java/htsjdk/samtools/CachingBAMFileIndex.java
@@ -61,6 +61,7 @@ class CachingBAMFileIndex extends AbstractBAMFileIndex implements BrowseableBAMI
      *         in a range that can be scanned to find SAMRecords that overlap the given positions.
      *         May return null if there is no content overlapping the region.
      */
+    @Override
     public BAMFileSpan getSpanOverlapping(final int referenceIndex, final int startPos, final int endPos) {
         final BAMIndexContent queryResults = getQueryResults(referenceIndex);
 
@@ -80,6 +81,7 @@ class CachingBAMFileIndex extends AbstractBAMFileIndex implements BrowseableBAMI
      * @param endPos 1-based end of the desired interval, inclusive
      * @return a list of bins that contain relevant data.
      */
+    @Override
     public BinList getBinsOverlapping(final int referenceIndex, final int startPos, final int endPos) {
         final BitSet regionBins = GenomicIndexUtil.regionToBins(startPos, endPos);
         if (regionBins == null) {
@@ -93,6 +95,7 @@ class CachingBAMFileIndex extends AbstractBAMFileIndex implements BrowseableBAMI
      * @param bin The bin over which to perform an overlapping query.
      * @return The file pointers
      */
+    @Override
     public BAMFileSpan getSpanOverlapping(final Bin bin) {
         if(bin == null)
             return null;
@@ -138,6 +141,7 @@ class CachingBAMFileIndex extends AbstractBAMFileIndex implements BrowseableBAMI
      * @param referenceIndex The reference to load.  CachingBAMFileIndex only stores index data for entire references. 
      * @return The index information for this reference.
      */
+    @Override
     protected BAMIndexContent getQueryResults(final int referenceIndex) {
         // WeakHashMap is a bit weird in that its lookups are done via equals() equality, but expirations must be
         // handled via == equality.  This implementation jumps through a few hoops to make sure that == equality still
diff --git a/src/main/java/htsjdk/samtools/Chunk.java b/src/main/java/htsjdk/samtools/Chunk.java
index 0d77b0c..dbe27c6 100644
--- a/src/main/java/htsjdk/samtools/Chunk.java
+++ b/src/main/java/htsjdk/samtools/Chunk.java
@@ -38,6 +38,7 @@ public class Chunk implements Cloneable, Serializable,Comparable<Chunk> {
         mChunkEnd = end;
     }
 
+    @Override
     public Chunk clone() {
         return new Chunk(mChunkStart,mChunkEnd);
     }
@@ -58,6 +59,7 @@ public class Chunk implements Cloneable, Serializable,Comparable<Chunk> {
         mChunkEnd = value;
     }
 
+    @Override
     public int compareTo(final Chunk chunk) {
         int result = Long.signum(mChunkStart - chunk.mChunkStart);
         if (result == 0) {
diff --git a/src/main/java/htsjdk/samtools/CigarElement.java b/src/main/java/htsjdk/samtools/CigarElement.java
index c645e6c..016956c 100644
--- a/src/main/java/htsjdk/samtools/CigarElement.java
+++ b/src/main/java/htsjdk/samtools/CigarElement.java
@@ -36,6 +36,7 @@ public class CigarElement implements Serializable {
     private final CigarOperator operator;
 
     public CigarElement(final int length, final CigarOperator operator) {
+        if (length < 0) throw new IllegalArgumentException(String.format("Cigar element being constructed with negative length: %d and operation: %s" , length, operator.name()));
         this.length = length;
         this.operator = operator;
     }
diff --git a/src/main/java/htsjdk/samtools/ComparableSamRecordIterator.java b/src/main/java/htsjdk/samtools/ComparableSamRecordIterator.java
index 06186a1..cb2da89 100644
--- a/src/main/java/htsjdk/samtools/ComparableSamRecordIterator.java
+++ b/src/main/java/htsjdk/samtools/ComparableSamRecordIterator.java
@@ -63,6 +63,7 @@ class ComparableSamRecordIterator extends PeekableIterator<SAMRecord> implements
      * @param that another iterator to compare to
      * @return a negative, 0 or positive number as described in the Comparator interface
      */
+    @Override
     public int compareTo(final ComparableSamRecordIterator that) {
         if (this.comparator.getClass() != that.comparator.getClass()) {
             throw new IllegalStateException("Attempt to compare two ComparableSAMRecordIterators that " +
diff --git a/src/main/java/htsjdk/samtools/CoordinateSortedPairInfoMap.java b/src/main/java/htsjdk/samtools/CoordinateSortedPairInfoMap.java
index d892d65..37c200c 100644
--- a/src/main/java/htsjdk/samtools/CoordinateSortedPairInfoMap.java
+++ b/src/main/java/htsjdk/samtools/CoordinateSortedPairInfoMap.java
@@ -202,6 +202,7 @@ public class CoordinateSortedPairInfoMap<KEY, REC> implements Iterable<Map.Entry
      * or removed from map when iteration is in progress, nor may a second iteration be started.
      * Iterator must be closed in order to allow normal access to the map.
      */
+    @Override
     public CloseableIterator<Map.Entry<KEY, REC>> iterator() {
         if (iterationInProgress) throw new IllegalStateException("Cannot be called when iteration is in progress");
         iterationInProgress = true;
@@ -238,11 +239,13 @@ public class CoordinateSortedPairInfoMap<KEY, REC> implements Iterable<Map.Entry
             currentReferenceIterator = mapInRam.entrySet().iterator();
         }
 
+        @Override
         public void close() {
             closed = true;
             iterationInProgress = false;
         }
 
+        @Override
         public boolean hasNext() {
             if (closed) throw new IllegalStateException("Iterator has been closed");
             if (currentReferenceIterator != null && !currentReferenceIterator.hasNext())
@@ -250,6 +253,7 @@ public class CoordinateSortedPairInfoMap<KEY, REC> implements Iterable<Map.Entry
             return currentReferenceIterator != null;
         }
 
+        @Override
         public Map.Entry<KEY, REC> next() {
             if (closed) throw new IllegalStateException("Iterator has been closed");
             if (!hasNext()) throw new NoSuchElementException();
@@ -258,6 +262,7 @@ public class CoordinateSortedPairInfoMap<KEY, REC> implements Iterable<Map.Entry
             return ret;
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException();
         }
diff --git a/src/main/java/htsjdk/samtools/DefaultSAMRecordFactory.java b/src/main/java/htsjdk/samtools/DefaultSAMRecordFactory.java
index 7e3848e..707cc6e 100644
--- a/src/main/java/htsjdk/samtools/DefaultSAMRecordFactory.java
+++ b/src/main/java/htsjdk/samtools/DefaultSAMRecordFactory.java
@@ -14,6 +14,7 @@ public class DefaultSAMRecordFactory implements SAMRecordFactory {
     }
 
     /** Create a new SAMRecord to be filled in */
+    @Override
     public SAMRecord createSAMRecord(final SAMFileHeader header) {
         return new SAMRecord(header);
     }
@@ -23,6 +24,7 @@ public class DefaultSAMRecordFactory implements SAMRecordFactory {
      * any value other than NO_ALIGNMENT_REFERENCE_INDEX, the values must be resolvable against the sequence
      * dictionary in the header argument.
      */
+    @Override
     public BAMRecord createBAMRecord (final SAMFileHeader header,
                                       final int referenceSequenceIndex,
                                       final int alignmentStart,
diff --git a/src/main/java/htsjdk/samtools/Defaults.java b/src/main/java/htsjdk/samtools/Defaults.java
index 5e3f6da..af3e439 100644
--- a/src/main/java/htsjdk/samtools/Defaults.java
+++ b/src/main/java/htsjdk/samtools/Defaults.java
@@ -14,7 +14,7 @@ import java.util.TreeMap;
  * @author Tim Fennell
  */
 public class Defaults {
-    private static Log log = Log.getInstance(Defaults.class);
+      private static final Log log = Log.getInstance(Defaults.class);
     
     /** Should BAM index files be created when writing out coordinate sorted BAM files?  Default = false. */
     public static final boolean CREATE_INDEX;
@@ -85,6 +85,16 @@ public class Defaults {
     public static final boolean SRA_LIBRARIES_DOWNLOAD;
 
 
+    /**
+     * the name of the system property that disables snappy
+     */
+    public static final String DISABLE_SNAPPY_PROPERTY_NAME = "snappy.disable";
+
+    /**
+     * Disable use of the Snappy compressor
+     */
+    public static final boolean DISABLE_SNAPPY_COMPRESSOR;
+
     static {
         CREATE_INDEX = getBooleanProperty("create_index", false);
         CREATE_MD5 = getBooleanProperty("create_md5", false);
@@ -104,6 +114,7 @@ public class Defaults {
         CUSTOM_READER_FACTORY = getStringProperty("custom_reader", "");
         SAM_FLAG_FIELD_FORMAT = SamFlagField.valueOf(getStringProperty("sam_flag_field_format", SamFlagField.DECIMAL.name()));
         SRA_LIBRARIES_DOWNLOAD = getBooleanProperty("sra_libraries_download", false);
+        DISABLE_SNAPPY_COMPRESSOR = getBooleanProperty(DISABLE_SNAPPY_PROPERTY_NAME, false);
     }
 
     /**
@@ -126,6 +137,7 @@ public class Defaults {
         result.put("EBI_REFERENCE_SERVICE_URL_MASK", EBI_REFERENCE_SERVICE_URL_MASK);
         result.put("CUSTOM_READER_FACTORY", CUSTOM_READER_FACTORY);
         result.put("SAM_FLAG_FIELD_FORMAT", SAM_FLAG_FIELD_FORMAT);
+        result.put("DISABLE_SNAPPY_COMPRESSOR", DISABLE_SNAPPY_COMPRESSOR);
         return Collections.unmodifiableSortedMap(result);
     }
 
diff --git a/src/main/java/htsjdk/samtools/DiskBasedBAMFileIndex.java b/src/main/java/htsjdk/samtools/DiskBasedBAMFileIndex.java
index b5d6f59..1eddddd 100644
--- a/src/main/java/htsjdk/samtools/DiskBasedBAMFileIndex.java
+++ b/src/main/java/htsjdk/samtools/DiskBasedBAMFileIndex.java
@@ -56,6 +56,7 @@ public class DiskBasedBAMFileIndex extends AbstractBAMFileIndex
      * positions. The last position in each pair is a virtual file pointer to the first SAMRecord beyond
      * the range that may contain the indicated SAMRecords.
      */
+    @Override
     public BAMFileSpan getSpanOverlapping(final int referenceIndex, final int startPos, final int endPos) {
         final BAMIndexContent queryResults = query(referenceIndex,startPos,endPos);
 
@@ -69,6 +70,7 @@ public class DiskBasedBAMFileIndex extends AbstractBAMFileIndex
         return new BAMFileSpan(chunkList);
     }
 
+     @Override
      protected BAMIndexContent getQueryResults(final int reference){
          throw new UnsupportedOperationException();
          // todo: there ought to be a way to support this using the first startPos for the reference and the last
diff --git a/src/main/java/htsjdk/samtools/DuplicateScoringStrategy.java b/src/main/java/htsjdk/samtools/DuplicateScoringStrategy.java
index 1abd514..26c83a5 100644
--- a/src/main/java/htsjdk/samtools/DuplicateScoringStrategy.java
+++ b/src/main/java/htsjdk/samtools/DuplicateScoringStrategy.java
@@ -36,7 +36,7 @@ public class DuplicateScoringStrategy {
     public enum ScoringStrategy {
         SUM_OF_BASE_QUALITIES,
         TOTAL_MAPPED_REFERENCE_LENGTH,
-        RANDOM,
+        RANDOM
     }
 
     /** Hash used for the RANDOM scoring strategy. */
@@ -46,8 +46,8 @@ public class DuplicateScoringStrategy {
     private static enum Attr { DuplicateScore }
 
     /** Calculates a score for the read which is the sum of scores over Q15. */
-    private static short getSumOfBaseQualities(final SAMRecord rec) {
-        short score = 0;
+    private static int getSumOfBaseQualities(final SAMRecord rec) {
+        int score = 0;
         for (final byte b : rec.getBaseQualities()) {
             if (b >= 15) score += b;
         }
@@ -64,6 +64,8 @@ public class DuplicateScoringStrategy {
 
     /**
      * Returns the duplicate score computed from the given fragment.
+     * value should be capped by Short.MAX_VALUE/2 since the score from two reads will be
+     * added and an overflow will be
      *
      * If true is given to assumeMateCigar, then any score that can use the mate cigar to compute the mate's score will return the score
      * computed on both ends.
@@ -72,24 +74,40 @@ public class DuplicateScoringStrategy {
         Short storedScore = (Short) record.getTransientAttribute(Attr.DuplicateScore);
 
         if (storedScore == null) {
-            short score = 0;
-
+            short score=0;
             switch (scoringStrategy) {
                 case SUM_OF_BASE_QUALITIES:
-                    score += getSumOfBaseQualities(record);
+                    // two (very) long reads worth of high-quality bases can go over Short.MAX_VALUE/2
+                    // and risk overflow.
+                    score += (short) Math.min(getSumOfBaseQualities(record), Short.MAX_VALUE / 2);
                     break;
                 case TOTAL_MAPPED_REFERENCE_LENGTH:
                     if (!record.getReadUnmappedFlag()) {
-                        score += record.getCigar().getReferenceLength();
+                        // no need to remember the score since this scoring mechanism is symmetric
+                        score = (short) Math.min(record.getCigar().getReferenceLength(), Short.MAX_VALUE / 2);
                     }
                     if (assumeMateCigar && record.getReadPairedFlag() && !record.getMateUnmappedFlag()) {
-                        score += SAMUtils.getMateCigar(record).getReferenceLength();
+                        score += (short) Math.min(SAMUtils.getMateCigar(record).getReferenceLength(), Short.MAX_VALUE / 2);
                     }
                     break;
+                // The RANDOM score gives the same score to both reads so that they get filtered together.
+                // it's not critical do use the readName since the scores from both ends get added, but it seem
+                // to be clearer this way.
                 case RANDOM:
-                    score += (short) (hasher.hashUnencodedChars(record.getReadName()) >> 16);
+                    // start with a random number between Short.MIN_VALUE/4 and Short.MAX_VALUE/4
+                    score += (short) (hasher.hashUnencodedChars(record.getReadName()) & 0b11_1111_1111_1111);
+                    // subtract Short.MIN_VALUE/4 from it to end up with a number between
+                    // 0 and Short.MAX_VALUE/2. This number can be then discounted in case the read is
+                    // not passing filters. We need to stay far from overflow so that when we add the two
+                    // scores from the two read mates we do not overflow since that could cause us to chose a
+                    // failing read-pair instead of a passing one.
+                    score -= Short.MIN_VALUE / 4;
             }
 
+            // make sure that filter-failing records are heavily discounted. (the discount can happen twice, once
+            // for each mate, so need to make sure we do not subtract more than Short.MIN_VALUE overall.)
+            score += record.getReadFailsVendorQualityCheckFlag() ? (short) (Short.MIN_VALUE / 2) : 0;
+
             storedScore = score;
             record.setTransientAttribute(Attr.DuplicateScore, storedScore);
         }
@@ -110,7 +128,7 @@ public class DuplicateScoringStrategy {
         int cmp;
 
         // always prefer paired over non-paired
-        if (rec1.getReadPairedFlag() != rec2.getReadPairedFlag()) return rec1.getReadPairedFlag() ? 1 : -1;
+        if (rec1.getReadPairedFlag() != rec2.getReadPairedFlag()) return rec1.getReadPairedFlag() ? -1 : 1;
 
         cmp = computeDuplicateScore(rec2, scoringStrategy, assumeMateCigar) - computeDuplicateScore(rec1, scoringStrategy, assumeMateCigar);
 
@@ -125,7 +143,7 @@ public class DuplicateScoringStrategy {
     }
 
     /**
-     * Compare two records based on their duplicate scores.  The duplicate scores for each record is assume to be
+     * Compare two records based on their duplicate scores.  The duplicate scores for each record is assumed to be
      * pre-computed by computeDuplicateScore and stored in the "DS" tag.  If the scores are equal, we break
      * ties based on mapping quality (added to the mate's mapping quality if paired and mapped), then library/read name.
      *
diff --git a/src/main/java/htsjdk/samtools/DuplicateSetIterator.java b/src/main/java/htsjdk/samtools/DuplicateSetIterator.java
index 9a0c6f1..6e83303 100644
--- a/src/main/java/htsjdk/samtools/DuplicateSetIterator.java
+++ b/src/main/java/htsjdk/samtools/DuplicateSetIterator.java
@@ -114,12 +114,13 @@ public class DuplicateSetIterator implements CloseableIterator<DuplicateSet> {
     }
 
     @Deprecated
-    /** Do not use this method as the first duplicate set will not be compared with this scoring strategy.
+    /** @deprecated Do not use this method as the first duplicate set will not be compared with this scoring strategy.
       * Instead, provide a comparator to the constructor that has the scoring strategy set. */
     public void setScoringStrategy(final DuplicateScoringStrategy.ScoringStrategy scoringStrategy) {
         this.comparator.setScoringStrategy(scoringStrategy);
     }
 
+    @Override
     public DuplicateSet next() {
         DuplicateSet duplicateSet = null;
 
@@ -161,12 +162,15 @@ public class DuplicateSetIterator implements CloseableIterator<DuplicateSet> {
         return duplicateSet;
     }
 
+    @Override
     public void close() { wrappedIterator.close(); }
 
+    @Override
     public boolean hasNext() {
         return (!duplicateSet.isEmpty() || wrappedIterator.hasNext());
     }
 
     // Does nothing!
+    @Override
     public void remove() { }
 }
diff --git a/src/main/java/htsjdk/samtools/FixBAMFile.java b/src/main/java/htsjdk/samtools/FixBAMFile.java
index 6bea56e..ab8a131 100755
--- a/src/main/java/htsjdk/samtools/FixBAMFile.java
+++ b/src/main/java/htsjdk/samtools/FixBAMFile.java
@@ -27,6 +27,10 @@ import htsjdk.samtools.util.CloserUtil;
 
 import java.io.File;
 
+/**
+ * @deprecated since 07/2017. This tool is undocumented and untested.
+ */
+ at Deprecated
 public class FixBAMFile {
     public static void main(String[] args) {
         File inputFile = new File(args[0]);
diff --git a/src/main/java/htsjdk/samtools/MergingSamRecordIterator.java b/src/main/java/htsjdk/samtools/MergingSamRecordIterator.java
index a294752..45d002e 100644
--- a/src/main/java/htsjdk/samtools/MergingSamRecordIterator.java
+++ b/src/main/java/htsjdk/samtools/MergingSamRecordIterator.java
@@ -107,6 +107,7 @@ public class MergingSamRecordIterator implements CloseableIterator<SAMRecord> {
     /**
      * Close down all open iterators.
      */
+    @Override
     public void close() {
         // Iterators not in the priority queue have already been closed; only close down the iterators that are still in the priority queue.
         for (CloseableIterator<SAMRecord> iterator : pq)
@@ -114,12 +115,14 @@ public class MergingSamRecordIterator implements CloseableIterator<SAMRecord> {
     }
 
     /** Returns true if any of the underlying iterators has more records, otherwise false. */
+    @Override
     public boolean hasNext() {
         startIterationIfRequired();
         return !this.pq.isEmpty();
     }
 
     /** Returns the next record from the top most iterator during merging. */
+    @Override
     public SAMRecord next() {
         startIterationIfRequired();
 
@@ -163,6 +166,7 @@ public class MergingSamRecordIterator implements CloseableIterator<SAMRecord> {
     }
 
     /** Unsupported operation. */
+    @Override
     public void remove() {
         throw new UnsupportedOperationException("MergingSAMRecorderIterator.remove()");
     }
@@ -176,10 +180,12 @@ public class MergingSamRecordIterator implements CloseableIterator<SAMRecord> {
         // For unsorted build a fake comparator that compares based on object ID
         if (this.sortOrder == SAMFileHeader.SortOrder.unsorted) {
             return new SAMRecordComparator() {
+                @Override
                 public int fileOrderCompare(final SAMRecord lhs, final SAMRecord rhs) {
                     return System.identityHashCode(lhs) - System.identityHashCode(rhs);
                 }
 
+                @Override
                 public int compare(final SAMRecord lhs, final SAMRecord rhs) {
                     return fileOrderCompare(lhs, rhs);
                 }
@@ -206,6 +212,7 @@ public class MergingSamRecordIterator implements CloseableIterator<SAMRecord> {
     private class MergedSequenceDictionaryCoordinateOrderComparator extends SAMRecordCoordinateComparator implements Serializable {
         private static final long serialVersionUID = 1L;
 
+        @Override
         public int fileOrderCompare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
             final int referenceIndex1 = getReferenceIndex(samRecord1);
             final int referenceIndex2 = getReferenceIndex(samRecord2);
diff --git a/src/main/java/htsjdk/samtools/QueryInterval.java b/src/main/java/htsjdk/samtools/QueryInterval.java
index bdfb52c..dfda209 100644
--- a/src/main/java/htsjdk/samtools/QueryInterval.java
+++ b/src/main/java/htsjdk/samtools/QueryInterval.java
@@ -29,6 +29,7 @@ public class QueryInterval implements Comparable<QueryInterval> {
     }
 
 
+    @Override
     public int compareTo(final QueryInterval other) {
         int comp = this.referenceIndex - other.referenceIndex;
         if (comp != 0) return comp;
@@ -41,10 +42,10 @@ public class QueryInterval implements Comparable<QueryInterval> {
     }
 
     /**
-     * @return true if both are on same reference, and other starts exactly where this ends.
+     * @return true if both are on same reference, and other starts exactly before this ends.
      */
-    public boolean abuts(final QueryInterval other) {
-        return this.referenceIndex == other.referenceIndex && this.end == other.start;
+    public boolean endsAtStartOf(final QueryInterval other) {
+        return this.referenceIndex == other.referenceIndex && this.end + 1 == other.start;
     }
 
     /**
@@ -80,7 +81,7 @@ public class QueryInterval implements Comparable<QueryInterval> {
 
         for (int i = 1; i < inputIntervals.length; ++i) {
             final QueryInterval next = inputIntervals[i];
-            if (previous.abuts(next) || previous.overlaps(next)) {
+            if (previous.endsAtStartOf(next) || previous.overlaps(next)) {
                 final int newEnd = ((previous.end == 0 || next.end == 0) ? 0 : Math.max(previous.end, next.end));
                 previous = new QueryInterval(previous.referenceIndex, previous.start, newEnd);
             } else {
@@ -93,4 +94,22 @@ public class QueryInterval implements Comparable<QueryInterval> {
 
         return unique.toArray(EMPTY_QUERY_INTERVAL_ARRAY);
     }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        QueryInterval that = (QueryInterval) o;
+
+        return this.compareTo(that) == 0;
+    }
+
+    @Override
+    public int hashCode() {
+        int result = referenceIndex;
+        result = 31 * result + start;
+        result = 31 * result + end;
+        return result;
+    }
 }
diff --git a/src/main/java/htsjdk/samtools/SAMFileHeader.java b/src/main/java/htsjdk/samtools/SAMFileHeader.java
index 47543c2..789ece0 100644
--- a/src/main/java/htsjdk/samtools/SAMFileHeader.java
+++ b/src/main/java/htsjdk/samtools/SAMFileHeader.java
@@ -24,7 +24,9 @@
 package htsjdk.samtools;
 
 
-import htsjdk.samtools.util.StringLineReader;
+import htsjdk.samtools.util.BufferedLineReader;
+import htsjdk.samtools.util.CollectionUtil;
+import htsjdk.samtools.util.Log;
 
 import java.io.StringWriter;
 import java.lang.reflect.Constructor;
@@ -47,15 +49,19 @@ public class SAMFileHeader extends AbstractSAMHeaderRecord
     public static final String SORT_ORDER_TAG = "SO";
     public static final String GROUP_ORDER_TAG = "GO";
     public static final String CURRENT_VERSION = "1.5";
-    public static final Set<String> ACCEPTABLE_VERSIONS =
-            new HashSet<String>(Arrays.asList("1.0", "1.3", "1.4", "1.5"));
+    public static final Set<String> ACCEPTABLE_VERSIONS = CollectionUtil.makeSet("1.0", "1.3", "1.4", "1.5");
 
+    private SortOrder sortOrder = null;
+    private GroupOrder groupOrder = null;
+
+    private static final Log log = Log.getInstance(SAMFileHeader.class);
     /**
      * These tags are of known type, so don't need a type field in the text representation.
      */
     public static final Set<String> STANDARD_TAGS =
-            new HashSet<String>(Arrays.asList(VERSION_TAG, SORT_ORDER_TAG, GROUP_ORDER_TAG));
+            new HashSet<>(Arrays.asList(VERSION_TAG, SORT_ORDER_TAG, GROUP_ORDER_TAG));
 
+    @Override
     Set<String> getStandardTags() {
         return STANDARD_TAGS;
     }
@@ -64,11 +70,11 @@ public class SAMFileHeader extends AbstractSAMHeaderRecord
      * Ways in which a SAM or BAM may be sorted.
      */
     public enum SortOrder {
-
         unsorted(null),
         queryname(SAMRecordQueryNameComparator.class),
         coordinate(SAMRecordCoordinateComparator.class),
-        duplicate(SAMRecordDuplicateComparator.class); // NB: this is not in the SAM spec!
+        duplicate(SAMRecordDuplicateComparator.class), // NB: this is not in the SAM spec!
+        unknown(null);
 
         private final Class<? extends SAMRecordComparator> comparator;
 
@@ -105,16 +111,14 @@ public class SAMFileHeader extends AbstractSAMHeaderRecord
         none, query, reference
     }
 
-    private List<SAMReadGroupRecord> mReadGroups =
-        new ArrayList<SAMReadGroupRecord>();
-    private List<SAMProgramRecord> mProgramRecords = new ArrayList<SAMProgramRecord>();
-    private final Map<String, SAMReadGroupRecord> mReadGroupMap =
-        new HashMap<String, SAMReadGroupRecord>();
-    private final Map<String, SAMProgramRecord> mProgramRecordMap = new HashMap<String, SAMProgramRecord>();
+    private List<SAMReadGroupRecord> mReadGroups = new ArrayList<>();
+    private List<SAMProgramRecord> mProgramRecords = new ArrayList<>();
+    private final Map<String, SAMReadGroupRecord> mReadGroupMap = new HashMap<>();
+    private final Map<String, SAMProgramRecord> mProgramRecordMap = new HashMap<>();
     private SAMSequenceDictionary mSequenceDictionary = new SAMSequenceDictionary();
-    final private List<String> mComments = new ArrayList<String>();
+    final private List<String> mComments = new ArrayList<>();
     private String textHeader;
-    private final List<SAMValidationError> mValidationErrors = new ArrayList<SAMValidationError>();
+    private final List<SAMValidationError> mValidationErrors = new ArrayList<>();
 
     public SAMFileHeader() {
         setAttribute(VERSION_TAG, CURRENT_VERSION);
@@ -127,11 +131,11 @@ public class SAMFileHeader extends AbstractSAMHeaderRecord
     }
 
     public String getVersion() {
-        return (String) getAttribute("VN");
+        return getAttribute(VERSION_TAG);
     }
 
     public String getCreator() {
-        return (String) getAttribute("CR");
+        return getAttribute("CR");
     }
 
     public SAMSequenceDictionary getSequenceDictionary() {
@@ -248,26 +252,82 @@ public class SAMFileHeader extends AbstractSAMHeaderRecord
     }
 
     public SortOrder getSortOrder() {
-        final String so = getAttribute("SO");
-        if (so == null || so.equals("unknown")) {
-            return SortOrder.unsorted;
+        if (sortOrder == null) {
+            final String so = getAttribute(SORT_ORDER_TAG);
+            if (so == null) {
+                sortOrder = SortOrder.unsorted;
+            } else {
+                try {
+                    return SortOrder.valueOf(so);
+                } catch (IllegalArgumentException e) {
+                    log.warn("Found non conforming header SO tag: " + so + ". Treating as 'unknown'.");
+                    sortOrder = SortOrder.unknown;
+                }
+            }
         }
-        return SortOrder.valueOf((String) so);
+        return sortOrder;
     }
 
     public void setSortOrder(final SortOrder so) {
-        setAttribute("SO", so.name());
+        sortOrder = so;
+        super.setAttribute(SORT_ORDER_TAG, so.name());
     }
 
     public GroupOrder getGroupOrder() {
-        if (getAttribute("GO") == null) {
-            return GroupOrder.none;
+        if (groupOrder == null) {
+            final String go = getAttribute(GROUP_ORDER_TAG);
+            if (go == null) {
+                groupOrder = GroupOrder.none;
+            } else {
+                try {
+                    return GroupOrder.valueOf(go);
+                } catch (IllegalArgumentException e) {
+                    log.warn("Found non conforming header GO tag: " + go + ". Treating as 'none'.");
+                    groupOrder = GroupOrder.none;
+                }
+            }
         }
-        return GroupOrder.valueOf((String)getAttribute("GO"));
+        return groupOrder;
     }
 
     public void setGroupOrder(final GroupOrder go) {
-        setAttribute("GO", go.name());
+        groupOrder = go;
+        super.setAttribute(GROUP_ORDER_TAG, go.name());
+    }
+
+
+    /**
+     * Set the given value for the attribute named 'key'.  Replaces an existing value, if any.
+     * If value is null, the attribute is removed.
+     * Otherwise, the value will be converted to a String with toString.
+     * @param key attribute name
+     * @param value attribute value
+     * @deprecated Use {@link #setAttribute(String, String) instead
+     */
+    @Deprecated
+    @Override
+    public void setAttribute(final String key, final Object value) {
+        if (key.equals(SORT_ORDER_TAG) || key.equals(GROUP_ORDER_TAG)) {
+            this.setAttribute(key, value.toString());
+        } else {
+            super.setAttribute(key, value);
+        }
+    }
+
+    /**
+     * Set the given value for the attribute named 'key'.  Replaces an existing value, if any.
+     * If value is null, the attribute is removed.
+     * @param key attribute name
+     * @param value attribute value
+     */
+    @Override
+    public void setAttribute(final String key, final String value) {
+        if (key.equals(SORT_ORDER_TAG)) {
+            this.sortOrder = null;
+        } else if (key.equals(GROUP_ORDER_TAG)) {
+            this.groupOrder = null;
+        }
+        super.setAttribute(key, value);
     }
 
     /**
@@ -353,19 +413,25 @@ public class SAMFileHeader extends AbstractSAMHeaderRecord
         return result;
     }
 
+    @Override
     public final SAMFileHeader clone() {
         final SAMTextHeaderCodec codec = new SAMTextHeaderCodec();
         codec.setValidationStringency(ValidationStringency.SILENT);
+        return codec.decode(BufferedLineReader.fromString(getSAMString()), "SAMFileHeader.clone");
+    }
+
+    @Override
+    public String getSAMString() {
         final StringWriter stringWriter = new StringWriter();
-        codec.encode(stringWriter, this);
-        return codec.decode(new StringLineReader(stringWriter.toString()), "SAMFileHeader.clone");
+        new SAMTextHeaderCodec().encode(stringWriter, this);
+        return stringWriter.toString();
     }
 
     /** Little class to generate program group IDs */
     public static class PgIdGenerator {
         private int recordCounter;
 
-        private final Set<String> idsThatAreAlreadyTaken = new HashSet<String>();
+        private final Set<String> idsThatAreAlreadyTaken = new HashSet<>();
 
         public PgIdGenerator(final SAMFileHeader header) {
             for (final SAMProgramRecord pgRecord : header.getProgramRecords()) {
@@ -393,7 +459,6 @@ public class SAMFileHeader extends AbstractSAMHeaderRecord
                 idsThatAreAlreadyTaken.add(newId);
                 return newId;
             }
-
         }
     }
 }
diff --git a/src/main/java/htsjdk/samtools/SAMFileWriter.java b/src/main/java/htsjdk/samtools/SAMFileWriter.java
index fe99591..24936a0 100644
--- a/src/main/java/htsjdk/samtools/SAMFileWriter.java
+++ b/src/main/java/htsjdk/samtools/SAMFileWriter.java
@@ -46,5 +46,6 @@ public interface SAMFileWriter extends Closeable {
     /**
      * Must be called to flush or file will likely be defective. 
      */
+    @Override
     void close();
 }
diff --git a/src/main/java/htsjdk/samtools/SAMFileWriterFactory.java b/src/main/java/htsjdk/samtools/SAMFileWriterFactory.java
index 61f1c9c..5829d56 100644
--- a/src/main/java/htsjdk/samtools/SAMFileWriterFactory.java
+++ b/src/main/java/htsjdk/samtools/SAMFileWriterFactory.java
@@ -35,6 +35,8 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.util.zip.Deflater;
 
 /**
@@ -86,6 +88,14 @@ public class SAMFileWriterFactory implements Cloneable {
     }
 
     /**
+     * Gets the default for whether to create md5Files for BAM files this factory.
+     * @see #setDefaultCreateMd5File(boolean)
+     */
+    public static boolean getDefaultCreateMd5File() {
+        return defaultCreateMd5File;
+    }
+
+    /**
      * Sets whether to create md5Files for BAMs from this factory.
      */
     public SAMFileWriterFactory setCreateMd5File(final boolean createMd5File) {
@@ -128,6 +138,14 @@ public class SAMFileWriterFactory implements Cloneable {
     }
 
     /**
+     * Gets the default for subsequent SAMFileWriterFactories that do not specify whether to create an index.
+     * @see #setDefaultCreateIndexWhileWriting
+     */
+    public static boolean getDefaultCreateIndexWhileWriting() {
+        return defaultCreateIndexWhileWriting;
+    }
+
+    /**
      * Convenience method allowing newSAMFileWriterFactory().setCreateIndex(true);
      * Equivalent to SAMFileWriterFactory.setDefaultCreateIndexWhileWriting(true); newSAMFileWriterFactory();
      * If a BAM or CRAM (not SAM) file is created, the setting is true, and the file header specifies coordinate order,
@@ -158,6 +176,14 @@ public class SAMFileWriterFactory implements Cloneable {
     }
 
     /**
+     * Gets the maximum number of records held in RAM before spilling to disk during sorting.
+     * @see #setMaxRecordsInRam(int)
+     */
+    public int getMaxRecordsInRam() {
+        return maxRecordsInRam;
+    }
+
+    /**
      * Turn on or off the use of asynchronous IO for writing output SAM and BAM files.  If true then
      * each SAMFileWriter creates a dedicated thread which is used for compression and IO activities.
      */
@@ -195,6 +221,14 @@ public class SAMFileWriterFactory implements Cloneable {
     }
 
     /**
+     * Gets the temporary directory that will be used when sorting data.
+     * @see #setTempDirectory(File)
+     */
+    public File getTempDirectory() {
+        return tmpDir;
+    }
+
+    /**
      * Set the flag output format only when writing text.
      * Default value: [[htsjdk.samtools.SAMTextWriter.samFlagFieldOutput.DECIMAL]]
      */
@@ -216,6 +250,17 @@ public class SAMFileWriterFactory implements Cloneable {
     }
 
     /**
+     * Create a BAMFileWriter that is ready to receive SAMRecords.  Uses default compression level.
+     *
+     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputPath where to write the output.
+     */
+    public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final Path outputPath) {
+        return makeBAMWriter(header, presorted, outputPath, this.getCompressionLevel());
+    }
+
+    /**
      * Create a BAMFileWriter that is ready to receive SAMRecords.
      *
      * @param header           entire header. Sort order is determined by the sortOrder property of this arg.
@@ -225,25 +270,37 @@ public class SAMFileWriterFactory implements Cloneable {
      */
     public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile,
                                        final int compressionLevel) {
+        return makeBAMWriter(header, presorted, outputFile.toPath(), compressionLevel);
+    }
+
+    /**
+     * Create a BAMFileWriter that is ready to receive SAMRecords.
+     *
+     * @param header           entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted        if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputPath       where to write the output.
+     * @param compressionLevel Override default compression level with the given value, between 0 (fastest) and 9 (smallest).
+     */
+    public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final Path outputPath,
+        final int compressionLevel) {
         try {
-            final boolean createMd5File = this.createMd5File && IOUtil.isRegularPath(outputFile);
+            final boolean createMd5File = this.createMd5File && IOUtil.isRegularPath(outputPath);
             if (this.createMd5File && !createMd5File) {
-                log.warn("Cannot create MD5 file for BAM because output file is not a regular file: " + outputFile.getAbsolutePath());
+                log.warn("Cannot create MD5 file for BAM because output file is not a regular file: " + outputPath.toUri());
             }
-            OutputStream os = IOUtil.maybeBufferOutputStream(new FileOutputStream(outputFile, false), bufferSize);
-            if (createMd5File) os = new Md5CalculatingOutputStream(os, new File(outputFile.getAbsolutePath() + ".md5"));
-            final BAMFileWriter ret = new BAMFileWriter(os, outputFile, compressionLevel, deflaterFactory);
-            final boolean createIndex = this.createIndex && IOUtil.isRegularPath(outputFile);
+            OutputStream os = IOUtil.maybeBufferOutputStream(Files.newOutputStream(outputPath), bufferSize);
+            if (createMd5File) os = new Md5CalculatingOutputStream(os, IOUtil.addExtension(outputPath,".md5"));
+            final BAMFileWriter ret = new BAMFileWriter(os, outputPath.toUri().toString(), compressionLevel, deflaterFactory);
+            final boolean createIndex = this.createIndex && IOUtil.isRegularPath(outputPath);
             if (this.createIndex && !createIndex) {
-                log.warn("Cannot create index for BAM because output file is not a regular file: " + outputFile.getAbsolutePath());
+                log.warn("Cannot create index for BAM because output file is not a regular file: " + outputPath.toUri());
             }
-            if (this.tmpDir != null) ret.setTempDirectory(this.tmpDir);
             initializeBAMWriter(ret, header, presorted, createIndex);
 
             if (this.useAsyncIo) return new AsyncSAMFileWriter(ret, this.asyncOutputBufferSize);
             else return ret;
         } catch (final IOException ioe) {
-            throw new RuntimeIOException("Error opening file: " + outputFile.getAbsolutePath());
+            throw new RuntimeIOException("Error opening file: " + outputPath.toUri());
         }
     }
 
@@ -252,6 +309,7 @@ public class SAMFileWriterFactory implements Cloneable {
         if (maxRecordsInRam != null) {
             writer.setMaxRecordsInRam(maxRecordsInRam);
         }
+        if (this.tmpDir != null) writer.setTempDirectory(this.tmpDir);
         writer.setHeader(header);
         if (createIndex && writer.getSortOrder().equals(SAMFileHeader.SortOrder.coordinate)) {
             writer.enableBamIndexConstruction();
@@ -266,6 +324,17 @@ public class SAMFileWriterFactory implements Cloneable {
      * @param outputFile where to write the output.
      */
     public SAMFileWriter makeSAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile) {
+        return makeSAMWriter(header, presorted, outputFile.toPath());
+    }
+
+    /**
+     * Create a SAMTextWriter that is ready to receive SAMRecords.
+     *
+     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputPath where to write the output.
+     */
+    public SAMFileWriter makeSAMWriter(final SAMFileHeader header, final boolean presorted, final Path outputPath) {
         /**
          * Use the value specified from Defaults.SAM_FLAG_FIELD_FORMAT when samFlagFieldOutput value has not been set.  This should
          * be SamFlagField.DECIMAL when the user has not set Defaults.SAM_FLAG_FIELD_FORMAT.
@@ -275,19 +344,15 @@ public class SAMFileWriterFactory implements Cloneable {
         }
         try {
             final SAMTextWriter ret = this.createMd5File
-                    ? new SAMTextWriter(new Md5CalculatingOutputStream(new FileOutputStream(outputFile, false),
-                    new File(outputFile.getAbsolutePath() + ".md5")), samFlagFieldOutput)
-                    : new SAMTextWriter(outputFile, samFlagFieldOutput);
-            ret.setSortOrder(header.getSortOrder(), presorted);
-            if (maxRecordsInRam != null) {
-                ret.setMaxRecordsInRam(maxRecordsInRam);
-            }
-            ret.setHeader(header);
-
-            if (this.useAsyncIo) return new AsyncSAMFileWriter(ret, this.asyncOutputBufferSize);
-            else return ret;
+                    ? new SAMTextWriter(new Md5CalculatingOutputStream(Files.newOutputStream(outputPath),
+                          IOUtil.addExtension(outputPath, ".md5")), samFlagFieldOutput)
+                    : new SAMTextWriter(null == outputPath
+                                        ? null
+                                        : Files.newOutputStream(outputPath),
+                                        samFlagFieldOutput);
+            return initWriter(header, presorted, ret);
         } catch (final IOException ioe) {
-            throw new RuntimeIOException("Error opening file: " + outputFile.getAbsolutePath());
+            throw new RuntimeIOException("Error opening file: " + outputPath.toUri());
         }
     }
 
@@ -308,7 +373,7 @@ public class SAMFileWriterFactory implements Cloneable {
         if (samFlagFieldOutput == SamFlagField.NONE) {
             samFlagFieldOutput = Defaults.SAM_FLAG_FIELD_FORMAT;
         }
-        return initWriter(header, presorted, false, new SAMTextWriter(stream, samFlagFieldOutput));
+        return initWriter(header, presorted, new SAMTextWriter(stream, samFlagFieldOutput));
     }
 
     /**
@@ -322,24 +387,23 @@ public class SAMFileWriterFactory implements Cloneable {
      */
 
     public SAMFileWriter makeBAMWriter(final SAMFileHeader header, final boolean presorted, final OutputStream stream) {
-        return initWriter(header, presorted, true, new BAMFileWriter(stream, null, this.getCompressionLevel(), this.deflaterFactory));
+        return initWriter(header, presorted, new BAMFileWriter(stream, (File)null, this.getCompressionLevel(), this.deflaterFactory));
     }
 
     /**
      * Initialize SAMTextWriter or a BAMFileWriter and possibly wrap in AsyncSAMFileWriter
-     *
      * @param header    entire header. Sort order is determined by the sortOrder property of this arg.
      * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
-     * @param binary    do we want to generate a BAM or a SAM
      * @param writer    SAM or BAM writer to initialize and maybe wrap.
      */
 
-    private SAMFileWriter initWriter(final SAMFileHeader header, final boolean presorted, final boolean binary,
+    private SAMFileWriter initWriter(final SAMFileHeader header, final boolean presorted,
                                      final SAMFileWriterImpl writer) {
         writer.setSortOrder(header.getSortOrder(), presorted);
         if (maxRecordsInRam != null) {
             writer.setMaxRecordsInRam(maxRecordsInRam);
         }
+        if (this.tmpDir != null) writer.setTempDirectory(this.tmpDir);
         writer.setHeader(header);
 
         if (this.useAsyncIo) return new AsyncSAMFileWriter(writer, this.asyncOutputBufferSize);
@@ -355,14 +419,26 @@ public class SAMFileWriterFactory implements Cloneable {
      * @return SAM or BAM writer based on file extension of outputFile.
      */
     public SAMFileWriter makeSAMOrBAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile) {
-        final String filename = outputFile.getName();
+       return makeSAMOrBAMWriter(header, presorted, outputFile.toPath());
+    }
+
+    /**
+     * Create either a SAM or a BAM writer based on examination of the outputPath extension.
+     *
+     * @param header     entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputPath where to write the output.  Must end with .sam or .bam.
+     * @return SAM or BAM writer based on file extension of outputPath.
+     */
+    public SAMFileWriter makeSAMOrBAMWriter(final SAMFileHeader header, final boolean presorted, final Path outputPath) {
+        final String filename = outputPath.getFileName().toString();
         if (filename.endsWith(BamFileIoUtils.BAM_FILE_EXTENSION)) {
-            return makeBAMWriter(header, presorted, outputFile);
+            return makeBAMWriter(header, presorted, outputPath);
         }
         if (filename.endsWith(".sam")) {
-            return makeSAMWriter(header, presorted, outputFile);
+            return makeSAMWriter(header, presorted, outputPath);
         }
-        return makeBAMWriter(header, presorted, outputFile);
+        return makeBAMWriter(header, presorted, outputPath);
     }
 
     /**
@@ -377,11 +453,26 @@ public class SAMFileWriterFactory implements Cloneable {
      *
      */
     public SAMFileWriter makeWriter(final SAMFileHeader header, final boolean presorted, final File outputFile, final File referenceFasta) {
-        if (outputFile.getName().endsWith(SamReader.Type.CRAM_TYPE.fileExtension())) {
-            return makeCRAMWriter(header, presorted, outputFile, referenceFasta);
+        return makeWriter(header, presorted, null == outputFile ? null : outputFile.toPath(), referenceFasta);
+    }
+
+    /**
+     *
+     * Create a SAM, BAM or CRAM writer based on examination of the outputPath extension.
+     *
+     * @param header header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param outputPath where to write the output.  Must end with .sam, .bam or .cram.
+     * @param referenceFasta reference sequence file
+     * @return SAMFileWriter appropriate for the file type specified in outputPath
+     *
+     */
+    public SAMFileWriter makeWriter(final SAMFileHeader header, final boolean presorted, final Path outputPath, final File referenceFasta) {
+        if (null != outputPath && outputPath.toString().endsWith(SamReader.Type.CRAM_TYPE.fileExtension())) {
+            return makeCRAMWriter(header, presorted, outputPath, referenceFasta);
         }
         else {
-            return makeSAMOrBAMWriter(header, presorted, outputFile);
+            return makeSAMOrBAMWriter(header, presorted, outputPath);
         }
     }
 
@@ -416,7 +507,23 @@ public class SAMFileWriterFactory implements Cloneable {
      *
      */
     public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final File outputFile, final File referenceFasta) {
-        return createCRAMWriterWithSettings(header, true, outputFile, referenceFasta);
+        return createCRAMWriterWithSettings(header, true, outputFile.toPath(), referenceFasta);
+    }
+
+    /**
+     * Create a CRAMFileWriter on an output file. Requires input record to be presorted to match the
+     * sort order defined by the input header.
+     *
+     * Note: does not honor factory settings for USE_ASYNC_IO.
+     *
+     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param outputPath where to write the output.  Must end with .sam, .bam or .cram.
+     * @param referenceFasta reference sequence file
+     * @return CRAMFileWriter
+     *
+     */
+    public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final Path outputPath, final File referenceFasta) {
+        return createCRAMWriterWithSettings(header, true, outputPath, referenceFasta);
     }
 
     /**
@@ -432,7 +539,24 @@ public class SAMFileWriterFactory implements Cloneable {
      *
      */
     public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final boolean presorted, final File outputFile, final File referenceFasta) {
-        return createCRAMWriterWithSettings(header, presorted, outputFile, referenceFasta);
+        return makeCRAMWriter(header, presorted, outputFile.toPath(), referenceFasta);
+    }
+
+
+    /**
+     * Create a CRAMFileWriter on an output file.
+     *
+     * Note: does not honor factory setting for USE_ASYNC_IO.
+     *
+     * @param header entire header. Sort order is determined by the sortOrder property of this arg.
+     * @param presorted  if true, SAMRecords must be added to the SAMFileWriter in order that agrees with header.sortOrder.
+     * @param output where to write the output.  Must end with .sam, .bam or .cram.
+     * @param referenceFasta reference sequence file
+     * @return CRAMFileWriter
+     *
+     */
+    public CRAMFileWriter makeCRAMWriter(final SAMFileHeader header, final boolean presorted, final Path output, final File referenceFasta) {
+        return createCRAMWriterWithSettings(header, presorted, output, referenceFasta);
     }
 
     /**
@@ -449,40 +573,41 @@ public class SAMFileWriterFactory implements Cloneable {
     private CRAMFileWriter createCRAMWriterWithSettings(
             final SAMFileHeader header,
             final boolean presorted,
-            final File outputFile,
+            final Path outputFile,
             final File referenceFasta) {
         OutputStream cramOS = null;
         OutputStream indexOS = null ;
 
         if (createIndex) {
             if (!IOUtil.isRegularPath(outputFile)) {
-                log.warn("Cannot create index for CRAM because output file is not a regular file: " + outputFile.getAbsolutePath());
+                log.warn("Cannot create index for CRAM because output file is not a regular file: " + outputFile.toUri());
             }
             else {
+                final Path indexPath = IOUtil.addExtension(outputFile, BAMIndex.BAMIndexSuffix);
                 try {
-                    final File indexFile = new File(outputFile.getAbsolutePath() + BAMIndex.BAMIndexSuffix) ;
-                    indexOS = new FileOutputStream(indexFile) ;
+                    indexOS = Files.newOutputStream(indexPath);
                 }
                 catch (final IOException ioe) {
-                    throw new RuntimeIOException("Error creating index file for: " + outputFile.getAbsolutePath()+ BAMIndex.BAMIndexSuffix);
+                    throw new RuntimeIOException("Error creating index file for: " + indexPath.toUri());
                 }
             }
         }
 
         try {
-            cramOS = IOUtil.maybeBufferOutputStream(new FileOutputStream(outputFile, false), bufferSize);
+            cramOS = IOUtil.maybeBufferOutputStream(Files.newOutputStream(outputFile), bufferSize);
         }
         catch (final IOException ioe) {
-            throw new RuntimeIOException("Error creating CRAM file: " + outputFile.getAbsolutePath());
+            throw new RuntimeIOException("Error creating CRAM file: " + outputFile.toUri());
         }
 
+        final Path md5Path = IOUtil.addExtension(outputFile, ".md5");
         final CRAMFileWriter writer = new CRAMFileWriter(
-                createMd5File ? new Md5CalculatingOutputStream(cramOS, new File(outputFile.getAbsolutePath() + ".md5")) : cramOS,
+                createMd5File ? new Md5CalculatingOutputStream(cramOS, md5Path) : cramOS,
                 indexOS,
                 presorted,
                 new ReferenceSource(referenceFasta),
                 header,
-                outputFile.getAbsolutePath());
+                outputFile.toUri().toString());
         setCRAMWriterDefaults(writer);
 
         return writer;
diff --git a/src/main/java/htsjdk/samtools/SAMFileWriterImpl.java b/src/main/java/htsjdk/samtools/SAMFileWriterImpl.java
index 130ecea..31a8604 100644
--- a/src/main/java/htsjdk/samtools/SAMFileWriterImpl.java
+++ b/src/main/java/htsjdk/samtools/SAMFileWriterImpl.java
@@ -75,6 +75,7 @@ public abstract class SAMFileWriterImpl implements SAMFileWriter
      * Sets the progress logger used by this implementation. Setting this lets this writer emit log
      * messages as SAM records in a SortingCollection are being written to disk.
      */
+    @Override
     public void setProgressLogger(final ProgressLoggerInterface progress) {
         this.progressLogger = progress;
     }
@@ -110,7 +111,11 @@ public abstract class SAMFileWriterImpl implements SAMFileWriter
         }
         this.maxRecordsInRam = maxRecordsInRam;
     }
-    
+
+    int getMaxRecordsInRam() {
+        return maxRecordsInRam;
+    }
+
     /**
      * When writing records that are not presorted, specify the path of the temporary directory 
      * for spilling to disk.  Must be called before setHeader().
@@ -122,6 +127,10 @@ public abstract class SAMFileWriterImpl implements SAMFileWriter
         }
     }
 
+    File getTempDirectory() {
+        return tmpDir;
+    }
+
     /**
      * Must be called before addAlignment. Header cannot be null.
      */
@@ -153,6 +162,7 @@ public abstract class SAMFileWriterImpl implements SAMFileWriter
         }
     }
 
+    @Override
     public SAMFileHeader getFileHeader() {
         return header;
     }
@@ -180,6 +190,7 @@ public abstract class SAMFileWriterImpl implements SAMFileWriter
      * @throws IllegalArgumentException if the record's reference or mate reference indices cannot be
      * resolved against the writer's header using the current reference and mate reference names
      */
+    @Override
     public void addAlignment(final SAMRecord alignment)
     {
         alignment.setHeaderStrict(header); // re-establish the record header and resolve reference indices
@@ -206,6 +217,7 @@ public abstract class SAMFileWriterImpl implements SAMFileWriter
     /**
      * Must be called or else file will likely be defective.
      */
+    @Override
     public final void close()
     {
         if (!isClosed) {
diff --git a/src/main/java/htsjdk/samtools/SAMProgramRecord.java b/src/main/java/htsjdk/samtools/SAMProgramRecord.java
index 3bbecf9..f5ddd96 100644
--- a/src/main/java/htsjdk/samtools/SAMProgramRecord.java
+++ b/src/main/java/htsjdk/samtools/SAMProgramRecord.java
@@ -57,6 +57,7 @@ public class SAMProgramRecord extends AbstractSAMHeaderRecord {
         }
     }
 
+    @Override
     public String getId() {
         return getProgramGroupId();
     }
@@ -126,7 +127,14 @@ public class SAMProgramRecord extends AbstractSAMHeaderRecord {
         return result;
     }
 
+    @Override
     Set<String> getStandardTags() {
         return STANDARD_TAGS;
     }
+
+
+    @Override
+    public String getSAMString() {
+        return new SAMTextHeaderCodec().getPGLine(this);
+    }
 }
diff --git a/src/main/java/htsjdk/samtools/SAMReadGroupRecord.java b/src/main/java/htsjdk/samtools/SAMReadGroupRecord.java
index fd81852..14f1c50 100644
--- a/src/main/java/htsjdk/samtools/SAMReadGroupRecord.java
+++ b/src/main/java/htsjdk/samtools/SAMReadGroupRecord.java
@@ -74,6 +74,7 @@ public class SAMReadGroupRecord extends AbstractSAMHeaderRecord
         }
     }
 
+    @Override
     public String getId() { return getReadGroupId();  }
     public String getReadGroupId() { return mReadGroupId; }
 
@@ -158,8 +159,14 @@ public class SAMReadGroupRecord extends AbstractSAMHeaderRecord
         return mReadGroupId.hashCode();
     }
 
+    @Override
     Set<String> getStandardTags() {
         return STANDARD_TAGS;
     }
+
+    @Override
+    public String getSAMString() {
+      return new SAMTextHeaderCodec().getRGLine(this);
+    }
 }
 
diff --git a/src/main/java/htsjdk/samtools/SAMRecord.java b/src/main/java/htsjdk/samtools/SAMRecord.java
index 13ec386..b049602 100644
--- a/src/main/java/htsjdk/samtools/SAMRecord.java
+++ b/src/main/java/htsjdk/samtools/SAMRecord.java
@@ -238,7 +238,9 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
             mReadBases = NULL_SEQUENCE;
         } else {
             final byte[] bases = StringUtil.stringToBytes(value);
-            SAMUtils.normalizeBases(bases);
+            if (bases != null) {
+                SAMUtils.normalizeBases(bases);
+            }
             setReadBases(bases);
         }
     }
@@ -262,7 +264,8 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
      * @return number of bases in the read.
      */
     public int getReadLength() {
-        return getReadBases().length;
+        final byte[] readBases = getReadBases();
+        return readBases == null ? 0 : readBases.length;
     }
 
     /**
@@ -572,14 +575,16 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
     }
 
     /**
-     * @return 1-based inclusive leftmost position of the clipped sequence, or 0 if there is no position.
+     * @return 1-based inclusive leftmost position of the sequence remaining after clipping, or 0 
+     * if there is no position, e.g. for unmapped read.
      */
     public int getAlignmentStart() {
         return mAlignmentStart;
     }
 
     /**
-     * @param value 1-based inclusive leftmost position of the clipped sequence, or 0 if there is no position.
+     * @param value 1-based inclusive leftmost position of the sequence remaining after clipping or 0 
+     * if there is no position, e.g. for unmapped read.
      */
     public void setAlignmentStart(final int value) {
         mAlignmentStart = value;
@@ -590,7 +595,8 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
     }
 
     /**
-     * @return 1-based inclusive rightmost position of the clipped sequence, or 0 read if unmapped.
+     * @return 1-based inclusive rightmost position of the sequence remaining after clipping or 0 
+     * if there is no position, e.g. for unmapped read.
      */
     public int getAlignmentEnd() {
         if (getReadUnmappedFlag()) {
@@ -627,38 +633,44 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
 
 
     /**
-     * @param offset 1-based location within the unclipped sequence or 0 if there is no position.
-     * <p/>
      * Non static version of the static function with the same name.
-     * @return 1-based inclusive reference position of the unclipped sequence at a given offset,
+     *
+     * @param position 1-based location within the unclipped sequence
+     * @return 1-based reference position of the unclipped sequence at a given read position,
+     *         or 0 if there is no position.
      */
-    public int getReferencePositionAtReadPosition(final int offset) {
-        return getReferencePositionAtReadPosition(this, offset);
+    public int getReferencePositionAtReadPosition(final int position) {
+        return getReferencePositionAtReadPosition(this, position);
     }
 
     /**
-     * @param rec record to use
-     * @param offset 1-based location within the unclipped sequence
-     * @return 1-based inclusive reference position of the unclipped sequence at a given offset,
-     * or 0 if there is no position.
+     * Returns the 1-based reference position for the provided 1-based position in read.
+     *
      * For example, given the sequence NNNAAACCCGGG, cigar 3S9M, and an alignment start of 1,
-     * and a (1-based)offset 10 (start of GGG) it returns 7 (1-based offset starting after the soft clip.
+     * and a (1-based) position of 10 (start of GGG) it returns 7 (1-based position starting after
+     * the soft clip.
+     *
      * For example: given the sequence AAACCCGGGTTT, cigar 4M1D6M, an alignment start of 1,
-     * an offset of 4 returns reference position 4, an offset of 5 returns reference position 6.
+     * a position of 4, returns reference position 4, a position of 5 returns reference position 6.
+     *
      * Another example: given the sequence AAACCCGGGTTT, cigar 4M1I6M, an alignment start of 1,
-     * an offset of 4 returns reference position 4, an offset of 5 returns 0.
+     * a position of 4 returns reference position 4, an position of 5 returns 0.
+     *
+     * @param rec record to use
+     * @param position 1-based location within the unclipped sequence
+     * @return 1-based  reference position of the unclipped sequence at a given read position,
+     *                  or 0 if there is no position.
      */
-    public static int getReferencePositionAtReadPosition(final SAMRecord rec, final int offset) {
-
-        if (offset == 0) return 0;
+    public static int getReferencePositionAtReadPosition(final SAMRecord rec, final int position) {
+        if (position == 0) return 0;
 
         for (final AlignmentBlock alignmentBlock : rec.getAlignmentBlocks()) {
-            if (CoordMath.getEnd(alignmentBlock.getReadStart(), alignmentBlock.getLength()) < offset) {
+            if (CoordMath.getEnd(alignmentBlock.getReadStart(), alignmentBlock.getLength()) < position) {
                 continue;
-            } else if (offset < alignmentBlock.getReadStart()) {
+            } else if (position < alignmentBlock.getReadStart()) {
                 return 0;
             } else {
-                return alignmentBlock.getReferenceStart() + offset - alignmentBlock.getReadStart();
+                return alignmentBlock.getReferenceStart() + position - alignmentBlock.getReadStart();
             }
         }
         return 0; // offset not located in an alignment block
@@ -666,8 +678,9 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
 
 
     /**
+     * Returns the 1-based position in the read of the 1-based reference position provided.
+     *
      * @param pos 1-based reference position
-     * return the offset
      * @return 1-based (to match getReferencePositionAtReadPosition behavior) inclusive position into the
      * unclipped sequence at a given reference position, or 0 if there is no such position.
      *
@@ -678,37 +691,43 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
     }
 
     /**
+     * Non-static version of static function with the same name. See examples below.
+     *
      * @param pos 1-based reference position
-     * @param returnLastBaseIfDeleted if positive, and reference position matches a deleted base in the read, function will
-     * return the offset
+     * @param returnLastBaseIfDeleted if positive, and reference position matches a deleted base in the read,
+     *                                function will return the offset
      * @return 1-based (to match getReferencePositionAtReadPosition behavior) inclusive position into the
-     * unclipped sequence at a given reference position,
-     * or 0 if there is no such position. If returnLastBaseIfDeleted is true deletions are assumed to "live" on the last read base
-     * in the preceding block.
-     *
-     * Non-static version of static function with the same name. See examples below.
+     *         unclipped sequence at a given reference position, or 0 if there is no such position. If
+     *         returnLastBaseIfDeleted is true deletions are assumed to "live" on the last read base
+     *         in the preceding block.
      */
     public int getReadPositionAtReferencePosition(final int pos, final boolean returnLastBaseIfDeleted) {
         return getReadPositionAtReferencePosition(this, pos, returnLastBaseIfDeleted);
     }
 
     /**
-     * @param rec record to use
-     * @param pos 1-based reference position
-     * @param returnLastBaseIfDeleted if positive, and reference position matches a deleted base in the read, function will
-     * return the offset
-     * @return 1-based (to match getReferencePositionAtReadPosition behavior) inclusive position into the
-     * unclipped sequence at a given reference position,
-     * or 0 if there is no such position. If returnLastBaseIfDeleted is true deletions are assumed to "live" on the last read base
-     * in the preceding block.
+     * Returns the 1-based position in the read of the provided reference position, or 0 if no
+     * such position exists.
+     *
      * For example, given the sequence NNNAAACCCGGG, cigar 3S9M, and an alignment start of 1,
-     * and a (1-based)pos of 7 (start of GGG) it returns 10 (1-based offset including the soft clip.
+     * and a (1-based) pos of 7 (start of GGG) it returns 10 (1-based position including the soft clip).
+     *
      * For example: given the sequence AAACCCGGGT, cigar 4M1D6M, an alignment start of 1,
-     * a reference position of 4 returns offset of 4, a reference of 5 also returns an offset 4 (using "left aligning") if returnLastBaseIfDeleted
-     * and 0 otherwise.
+     * a reference position of 4 returns read position 4, a reference position of 5 also returns a read
+     * position of 4 if returnLastBaseIfDeleted and 0 otherwise.
+     *
      * For example: given the sequence AAACtCGGGTT, cigar 4M1I6M, an alignment start of 1,
-     * a position 4 returns an offset 5, a position of 5 returns 6 (the inserted base is the 5th offset), a position of 11 returns 0 since
-     * that position in the reference doesn't overlap the read at all.
+     * a position 4 returns a position of 5, a position of 5 returns 6 (the inserted base is the 5th read position),
+     * a position of 11 returns 0 since that position in the reference doesn't overlap the read at all.
+     *
+     * @param rec record to use
+     * @param pos 1-based reference position
+     * @param returnLastBaseIfDeleted if positive, and reference position matches a deleted base in the read,
+     *                                function will return the position of the last non-deleted base
+     * @return 1-based (to match getReferencePositionAtReadPosition behavior) inclusive position into the
+     *         unclipped sequence at a given reference position, or 0 if there is no such position. If
+     *         returnLastBaseIfDeleted is true deletions are assumed to "live" on the last read base
+     *         in the preceding block.
      *
      */
     public static int getReadPositionAtReferencePosition(final SAMRecord rec, final int pos, final boolean returnLastBaseIfDeleted) {
@@ -1098,6 +1117,13 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
     }
 
     /**
+     * @return {@code true} if the SAM record has the requested attribute set, {@code false} otherwise.
+     */
+    public boolean hasAttribute(final String tag) {
+        return getAttribute(tag)!=null;
+    }
+
+    /**
      * Get the value for a SAM tag.
      * WARNING: Some value types (e.g. byte[]) are mutable.  It is dangerous to change one of these values in
      * place, because some SAMRecord implementations keep track of when attributes have been changed.  If you
@@ -1500,7 +1526,7 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
      */
     public List<SAMTagAndValue> getAttributes() {
         SAMBinaryTagAndValue binaryAttributes = getBinaryAttributes();
-        final List<SAMTagAndValue> ret = new ArrayList<SAMTagAndValue>();
+        final List<SAMTagAndValue> ret = new ArrayList<>();
         while (binaryAttributes != null) {
             ret.add(new SAMTagAndValue(SAMTagUtil.getSingleton().makeStringTag(binaryAttributes.tag),
                     binaryAttributes.value));
@@ -1750,7 +1776,7 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
     /**
      * Run all validations of CIGAR.  These include validation that the CIGAR makes sense independent of
      * placement, plus validation that CIGAR + placement yields all bases with M operator within the range of the reference.
-     * @param recordNumber For error reporting.  -1 if not known.
+     * @param recordNumber For error reporting, the record number in the SAM/BAM file.  -1 if not known.
      * @return List of errors, or null if no errors.
      */
     public List<SAMValidationError> validateCigar(final long recordNumber) {
@@ -1859,35 +1885,40 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
         ArrayList<SAMValidationError> ret = null;
         if (!getReadPairedFlag()) {
             if (getProperPairFlagUnchecked()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_PROPER_PAIR, "Proper pair flag should not be set for unpaired read.", getReadName()));
                 if (firstOnly) return ret;
             }
             if (getMateUnmappedFlagUnchecked()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_MATE_UNMAPPED, "Mate unmapped flag should not be set for unpaired read.", getReadName()));
                 if (firstOnly) return ret;
             }
             if (getMateNegativeStrandFlagUnchecked()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_MATE_NEG_STRAND, "Mate negative strand flag should not be set for unpaired read.", getReadName()));
                 if (firstOnly) return ret;
             }
             if (getFirstOfPairFlagUnchecked()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_FIRST_OF_PAIR, "First of pair flag should not be set for unpaired read.", getReadName()));
                 if (firstOnly) return ret;
             }
             if (getSecondOfPairFlagUnchecked()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_SECOND_OF_PAIR, "Second of pair flag should not be set for unpaired read.", getReadName()));
                 if (firstOnly) return ret;
             }
             if (null != getHeader() && getMateReferenceIndex() != NO_ALIGNMENT_REFERENCE_INDEX) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_MATE_REF_INDEX, "MRNM should not be set for unpaired read.", getReadName()));
                 if (firstOnly) return ret;
             }
+            if (!getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) {
+                if (ret == null) ret = new ArrayList<>();
+                ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_UNPAIRED_MATE_REFERENCE, "Unpaired read mate reference is " + getMateReferenceName() + " not " + SAMRecord.NO_ALIGNMENT_REFERENCE_NAME + " for unpaired read", getReadName()));
+                if (firstOnly) return ret;
+            }
         } else {
             final List<SAMValidationError> errors = isValidReferenceIndexAndPosition(mMateReferenceIndex, mMateReferenceName,
                     getMateAlignmentStart(), true, firstOnly);
@@ -1918,23 +1949,23 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
 */
         }
         if (getInferredInsertSize() > MAX_INSERT_SIZE || getInferredInsertSize() < -MAX_INSERT_SIZE) {
-            if (ret == null) ret = new ArrayList<SAMValidationError>();
+            if (ret == null) ret = new ArrayList<>();
             ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_INSERT_SIZE, "Insert size out of range", getReadName()));
             if (firstOnly) return ret;
         }
         if (getReadUnmappedFlag()) {
             if (getNotPrimaryAlignmentFlag()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_NOT_PRIM_ALIGNMENT, "Not primary alignment flag should not be set for unmapped read.", getReadName()));
                 if (firstOnly) return ret;
             }
             if (getSupplementaryAlignmentFlag()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_SUPPLEMENTARY_ALIGNMENT, "Supplementary alignment flag should not be set for unmapped read.", getReadName()));
                 if (firstOnly) return ret;
             }
             if (getMappingQuality() != 0) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_MAPPING_QUALITY, "MAPQ should be 0 for unmapped read.", getReadName()));
                 if (firstOnly) return ret;
             }
@@ -1943,22 +1974,22 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
             TODO: PIC-97 This validation should be enabled, but probably at this point there are too many
             BAM files that have the proper pair flag set when read or mate is unmapped.
             if (getProperPairFlagUnchecked()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_PROPER_PAIR, "Proper pair flag should not be set for unmapped read.", getReadName()));
             }
 */
         } else {
             if (getMappingQuality() >= 256) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_MAPPING_QUALITY, "MAPQ should be < 256.", getReadName()));
                 if (firstOnly) return ret;
             }
             if (getCigarLength() == 0) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_CIGAR, "CIGAR should have > zero elements for mapped read.", getReadName()));
             /* todo - will uncomment once unit tests are added
             } else if (getCigar().getReadLength() != getReadLength()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_CIGAR, "CIGAR read length " + getCigar().getReadLength() + " doesn't match read length " + getReadLength(), getReadName()));
             */
                 if (firstOnly) return ret;
@@ -1969,7 +2000,7 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
                 if (firstOnly) return ret;
             }
             if (!hasReferenceName()) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_READ_UNMAPPED, "Mapped read should have valid reference name", getReadName()));
                 if (firstOnly) return ret;
             }
@@ -1987,14 +2018,14 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
         // Validate the RG ID is found in header
         final String rgId = (String)getAttribute(SAMTagUtil.getSingleton().RG);
         if (rgId != null && getHeader() != null && getHeader().getReadGroup(rgId) == null) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.READ_GROUP_NOT_FOUND,
                         "RG ID on SAMRecord not found in header: " + rgId, getReadName()));
                 if (firstOnly) return ret;
         }
         final List<SAMValidationError> errors = isValidReferenceIndexAndPosition(mReferenceIndex, mReferenceName, getAlignmentStart(), false);
         if (errors != null) {
-            if (ret == null) ret = new ArrayList<SAMValidationError>();
+            if (ret == null) ret = new ArrayList<>();
             ret.addAll(errors);
             if (firstOnly) return ret;
         }
@@ -2005,7 +2036,7 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
                 final String cq = (String)getAttribute(SAMTagUtil.getSingleton().CQ);
                 final String cs = (String)getAttribute(SAMTagUtil.getSingleton().CS);
                 if (cq == null || cq.isEmpty() || cs == null || cs.isEmpty()) {
-                    if (ret == null) ret = new ArrayList<SAMValidationError>();
+                    if (ret == null) ret = new ArrayList<>();
                     ret.add(new SAMValidationError(SAMValidationError.Type.EMPTY_READ,
                             "Zero-length read without FZ, CS or CQ tag", getReadName()));
                     if (firstOnly) return ret;
@@ -2019,7 +2050,7 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
                         }
                     }
                     if (!hasIndel) {
-                        if (ret == null) ret = new ArrayList<SAMValidationError>();
+                        if (ret == null) ret = new ArrayList<>();
                         ret.add(new SAMValidationError(SAMValidationError.Type.EMPTY_READ,
                                 "Colorspace read with zero-length bases but no indel", getReadName()));
                         if (firstOnly) return ret;
@@ -2028,7 +2059,7 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
             }
         }
         if (this.getReadLength() != getBaseQualities().length &&  !Arrays.equals(getBaseQualities(), NULL_QUALS)) {
-            if (ret == null) ret = new ArrayList<SAMValidationError>();
+            if (ret == null) ret = new ArrayList<>();
             ret.add(new SAMValidationError(SAMValidationError.Type.MISMATCH_READ_LENGTH_AND_QUALS_LENGTH,
                     "Read length does not match quals length", getReadName()));
             if (firstOnly) return ret;
@@ -2036,13 +2067,39 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
 
         if (this.getAlignmentStart() != NO_ALIGNMENT_START && this.getIndexingBin() != null &&
                 this.computeIndexingBin() != this.getIndexingBin()) {
-            if (ret == null) ret = new ArrayList<SAMValidationError>();
+            if (ret == null) ret = new ArrayList<>();
             ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_INDEXING_BIN,
                     "bin field of BAM record does not equal value computed based on alignment start and end, and length of sequence to which read is aligned",
                     getReadName()));
             if (firstOnly) return ret;
         }
 
+        if (getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME) &&
+                getMateAlignmentStart() != SAMRecord.NO_ALIGNMENT_START) {
+            if (ret == null) ret = new ArrayList<>();
+            ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_UNALIGNED_MATE_START,
+                    "The unaligned mate start position is " + getAlignmentStart() + ", should be " + SAMRecord.NO_ALIGNMENT_START,
+                    getReadName()));
+            if (firstOnly) return ret;
+        }
+
+        if (getCigar().getReadLength() != 0 && getCigar().getReadLength() != getReadLength()) {
+            if (ret == null) ret = new ArrayList<>();
+            ret.add(new SAMValidationError(SAMValidationError.Type.MISMATCH_CIGAR_SEQ_LENGTH,
+                    "CIGAR covers " + getCigar().getReadLength() + " bases but the sequence is " + getReadLength() + " read bases ",
+                    getReadName()));
+            if (firstOnly) return ret;
+        }
+
+        if (getBaseQualities().length != 0 && getReadLength() != getBaseQualities().length) {
+            if (ret == null) ret = new ArrayList<>();
+            ret.add(new SAMValidationError(
+                    SAMValidationError.Type.MISMATCH_SEQ_QUAL_LENGTH,
+                    "Read length is  " + getReadLength() + " bases but have " +  mBaseQualities.length + " qualities ",
+                    getReadName()));
+            if (firstOnly) return ret;
+        }
+
         if (ret == null || ret.isEmpty()) {
             return null;
         }
@@ -2080,13 +2137,13 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
         ArrayList<SAMValidationError> ret = null;
         if (!hasReference) {
             if (alignmentStart != 0) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_ALIGNMENT_START, buildMessage("Alignment start should be 0 because reference name = *.", isMate), getReadName()));
                 if (firstOnly) return ret;
             }
         } else {
             if (alignmentStart == 0) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_ALIGNMENT_START, buildMessage("Alignment start should != 0 because reference name != *.", isMate), getReadName()));
                 if (firstOnly) return ret;
             }
@@ -2094,12 +2151,12 @@ public class SAMRecord implements Cloneable, Locatable, Serializable {
                 final SAMSequenceRecord sequence =
                         (referenceIndex != null? getHeader().getSequence(referenceIndex): getHeader().getSequence(referenceName));
                 if (sequence == null) {
-                    if (ret == null) ret = new ArrayList<SAMValidationError>();
+                    if (ret == null) ret = new ArrayList<>();
                     ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_REFERENCE_INDEX, buildMessage("Reference sequence not found in sequence dictionary.", isMate), getReadName()));
                     if (firstOnly) return ret;
                 } else {
                     if (alignmentStart > sequence.getSequenceLength()) {
-                        if (ret == null) ret = new ArrayList<SAMValidationError>();
+                        if (ret == null) ret = new ArrayList<>();
                         ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_ALIGNMENT_START, buildMessage("Alignment start (" + alignmentStart + ") must be <= reference sequence length (" +
                                 sequence.getSequenceLength() + ") on reference " + sequence.getSequenceName(), isMate), getReadName()));
                         if (firstOnly) return ret;
diff --git a/src/main/java/htsjdk/samtools/SAMRecordCoordinateComparator.java b/src/main/java/htsjdk/samtools/SAMRecordCoordinateComparator.java
index e8887bc..fe054b4 100644
--- a/src/main/java/htsjdk/samtools/SAMRecordCoordinateComparator.java
+++ b/src/main/java/htsjdk/samtools/SAMRecordCoordinateComparator.java
@@ -43,6 +43,7 @@ import java.io.Serializable;
 public class SAMRecordCoordinateComparator implements SAMRecordComparator, Serializable {
     private static final long serialVersionUID = 1L;
 
+    @Override
     public int compare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
         int cmp = fileOrderCompare(samRecord1, samRecord2);
         if (cmp != 0) {
@@ -83,6 +84,7 @@ public class SAMRecordCoordinateComparator implements SAMRecordComparator, Seria
      *
      * @return negative if samRecord1 < samRecord2,  0 if equal, else positive
      */
+    @Override
     public int fileOrderCompare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
 
         if (null == samRecord1.getHeader() || null == samRecord2.getHeader()) {
diff --git a/src/main/java/htsjdk/samtools/SAMRecordDuplicateComparator.java b/src/main/java/htsjdk/samtools/SAMRecordDuplicateComparator.java
index 4ed2bb5..436ba3c 100644
--- a/src/main/java/htsjdk/samtools/SAMRecordDuplicateComparator.java
+++ b/src/main/java/htsjdk/samtools/SAMRecordDuplicateComparator.java
@@ -220,6 +220,7 @@ public class SAMRecordDuplicateComparator implements SAMRecordComparator, Serial
      * If both reads are paired and both ends mapped, always prefer the first end over the second end.  This is needed to
      * properly choose the first end for optical duplicate identification when both ends are mapped to the same position etc. 
      */ 
+    @Override
     public int compare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
         populateTransientAttributes(samRecord1, samRecord2);
         int cmp;
@@ -357,6 +358,7 @@ public class SAMRecordDuplicateComparator implements SAMRecordComparator, Serial
     /**
      * Less stringent than duplicateSetCompare, such that two records are equal enough such that their ordering in a sorted SAM file would be arbitrary.
      */
+    @Override
     public int fileOrderCompare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
         return fileOrderCompare(samRecord1, samRecord2, false, true);
     }
diff --git a/src/main/java/htsjdk/samtools/SAMRecordQueryNameComparator.java b/src/main/java/htsjdk/samtools/SAMRecordQueryNameComparator.java
index 7fd97f5..d2f7cde 100644
--- a/src/main/java/htsjdk/samtools/SAMRecordQueryNameComparator.java
+++ b/src/main/java/htsjdk/samtools/SAMRecordQueryNameComparator.java
@@ -31,6 +31,7 @@ import java.io.Serializable;
 public class SAMRecordQueryNameComparator implements SAMRecordComparator, Serializable {
     private static final long serialVersionUID = 1L;
 
+    @Override
     public int compare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
         int cmp = fileOrderCompare(samRecord1, samRecord2);
         if (cmp != 0) {
@@ -75,6 +76,7 @@ public class SAMRecordQueryNameComparator implements SAMRecordComparator, Serial
      *
      * @return negative if samRecord1 < samRecord2,  0 if equal, else positive
      */
+    @Override
     public int fileOrderCompare(final SAMRecord samRecord1, final SAMRecord samRecord2) {
         return compareReadNames(samRecord1.getReadName(), samRecord2.getReadName());
     }
diff --git a/src/main/java/htsjdk/samtools/SAMRecordSetBuilder.java b/src/main/java/htsjdk/samtools/SAMRecordSetBuilder.java
index 2af91c3..b55265f 100644
--- a/src/main/java/htsjdk/samtools/SAMRecordSetBuilder.java
+++ b/src/main/java/htsjdk/samtools/SAMRecordSetBuilder.java
@@ -193,16 +193,21 @@ public class SAMRecordSetBuilder implements Iterable<SAMRecord> {
     }
 
     /** Returns a CloseableIterator over the collection of SAMRecords. */
+    @Override
     public CloseableIterator<SAMRecord> iterator() {
         return new CloseableIterator<SAMRecord>() {
             private final Iterator<SAMRecord> iterator = records.iterator();
 
+            @Override
             public void close() { /** Do nothing. */}
 
+            @Override
             public boolean hasNext() { return this.iterator.hasNext(); }
 
+            @Override
             public SAMRecord next() { return this.iterator.next(); }
 
+            @Override
             public void remove() { this.iterator.remove(); }
         };
     }
@@ -358,13 +363,8 @@ public class SAMRecordSetBuilder implements Iterable<SAMRecord> {
         end1.setMappingQuality(255);
         end1.setReadPairedFlag(true);
         end1.setProperPairFlag(true);
-        end1.setMateReferenceIndex(contig);
-        end1.setAttribute(SAMTag.MC.name(), readLength + "M");
-        end1.setMateAlignmentStart(start2);
-        end1.setMateNegativeStrandFlag(true);
         end1.setFirstOfPairFlag(end1IsFirstOfPair);
         end1.setSecondOfPairFlag(!end1IsFirstOfPair);
-        end1.setInferredInsertSize((int) CoordMath.getLength(start1, CoordMath.getEnd(start2, this.readLength)));
         end1.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
         if (programRecord != null) {
             end1.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
@@ -383,13 +383,8 @@ public class SAMRecordSetBuilder implements Iterable<SAMRecord> {
         end2.setMappingQuality(255);
         end2.setReadPairedFlag(true);
         end2.setProperPairFlag(true);
-        end2.setMateReferenceIndex(contig);
-        end2.setAttribute(SAMTag.MC.name(), readLength + "M");
-        end2.setMateAlignmentStart(start1);
-        end2.setMateNegativeStrandFlag(false);
         end2.setFirstOfPairFlag(!end1IsFirstOfPair);
         end2.setSecondOfPairFlag(end1IsFirstOfPair);
-        end2.setInferredInsertSize(end1.getInferredInsertSize());
         end2.setAttribute(SAMTag.RG.name(), READ_GROUP_ID);
         if (programRecord != null) {
             end2.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
@@ -399,6 +394,9 @@ public class SAMRecordSetBuilder implements Iterable<SAMRecord> {
         }
         fillInBasesAndQualities(end2);
 
+        // set mate info
+        SamPairUtil.setMateInfo(end1, end2, true);
+
         this.records.add(end1);
         this.records.add(end2);
     }
@@ -487,7 +485,7 @@ public class SAMRecordSetBuilder implements Iterable<SAMRecord> {
             end1.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
         }
         if (this.unmappedHasBasesAndQualities) {
-        fillInBasesAndQualities(end1);
+            fillInBasesAndQualities(end1);
         }
 
         end2.setReadName(name);
@@ -503,7 +501,7 @@ public class SAMRecordSetBuilder implements Iterable<SAMRecord> {
             end2.setAttribute(SAMTag.PG.name(), programRecord.getProgramGroupId());
         }
         if (this.unmappedHasBasesAndQualities) {
-        fillInBasesAndQualities(end2);
+            fillInBasesAndQualities(end2);
         }
 
         this.records.add(end1);
diff --git a/src/main/java/htsjdk/samtools/SAMRecordUtil.java b/src/main/java/htsjdk/samtools/SAMRecordUtil.java
index d778789..9435934 100644
--- a/src/main/java/htsjdk/samtools/SAMRecordUtil.java
+++ b/src/main/java/htsjdk/samtools/SAMRecordUtil.java
@@ -23,23 +23,28 @@
  */
 package htsjdk.samtools;
 
-import htsjdk.samtools.util.SequenceUtil;
-import htsjdk.samtools.util.StringUtil;
-
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 
 /**
  *
- * Use {@link SAMRecord#reverseComplement()} instead, which defaults to making a copy of attributes for reverse
- * complement rather than changing them in-place.
- *
  * @author alecw at broadinstitute.org
+ *
+ * @deprecated 10/27/2016 Use {@link SAMRecord} constants and functions
  */
 @Deprecated
 public class SAMRecordUtil {
+    /**
+     * @deprecated 6/5/2017 Use {@link SAMRecord#TAGS_TO_REVERSE_COMPLEMENT}
+     */
+    @Deprecated
     public static List<String> TAGS_TO_REVERSE_COMPLEMENT = Arrays.asList(SAMTag.E2.name(), SAMTag.SQ.name());
+
+    /**
+     * @deprecated 6/5/2017 Use {@link SAMRecord#TAGS_TO_REVERSE}
+     */
+    @Deprecated
     public static List<String> TAGS_TO_REVERSE            = Arrays.asList(SAMTag.OQ.name(), SAMTag.U2.name());
 
     /**
@@ -48,7 +53,11 @@ public class SAMRecordUtil {
      * or attributes. If a copy is needed use {@link #reverseComplement(SAMRecord, boolean)}.
      * See {@link #TAGS_TO_REVERSE_COMPLEMENT} {@link #TAGS_TO_REVERSE}
      * for the default set of tags that are handled.
+     *
+     * @deprecated  6/5/2017 Use {@link SAMRecord#reverseComplement} but note that the default behavior there is different
+     * It will default to making a copy, not reverse-complementing in-place!
      */
+    @Deprecated
     public static void reverseComplement(final SAMRecord rec) {
         rec.reverseComplement(TAGS_TO_REVERSE_COMPLEMENT, TAGS_TO_REVERSE, true);
     }
@@ -61,7 +70,10 @@ public class SAMRecordUtil {
      *
      * @param rec Record to reverse complement.
      * @param inplace Setting this to false will clone all attributes, bases and qualities before changing the values.
+     *
+     * @deprecated  6/5/2017 Use {@link SAMRecord#reverseComplement}
      */
+    @Deprecated
     public static void reverseComplement(final SAMRecord rec, boolean inplace) {
         rec.reverseComplement(TAGS_TO_REVERSE_COMPLEMENT, TAGS_TO_REVERSE, inplace);
     }
@@ -70,7 +82,10 @@ public class SAMRecordUtil {
      * Reverse complement bases and reverse quality scores. In addition reverse complement any
      * non-null attributes specified by tagsToRevcomp and reverse and non-null attributes
      * specified by tagsToReverse.
+     *
+     * @deprecated  6/5/2017 Use {@link SAMRecord#reverseComplement}
      */
+    @Deprecated
     public static void reverseComplement(final SAMRecord rec, final Collection<String> tagsToRevcomp, final Collection<String> tagsToReverse, boolean inplace) {
         rec.reverseComplement(tagsToRevcomp, tagsToReverse, inplace);
     }
diff --git a/src/main/java/htsjdk/samtools/SAMSequenceDictionary.java b/src/main/java/htsjdk/samtools/SAMSequenceDictionary.java
index b7744d7..86ffa6c 100644
--- a/src/main/java/htsjdk/samtools/SAMSequenceDictionary.java
+++ b/src/main/java/htsjdk/samtools/SAMSequenceDictionary.java
@@ -29,7 +29,6 @@ import java.io.Serializable;
 import java.math.BigInteger;
 import java.security.MessageDigest;
 import java.util.*;
-import java.util.stream.Collector;
 import java.util.stream.Collectors;
 
 import javax.xml.bind.annotation.XmlElement;
@@ -50,8 +49,8 @@ public class SAMSequenceDictionary implements Serializable {
     getter because the later wraps the list into an unmodifiable List 
     see http://tech.joshuacummings.com/2010/10/problems-with-defensive-collection.html */
     @XmlElement(name="Reference")
-    private List<SAMSequenceRecord> mSequences = new ArrayList<SAMSequenceRecord>();
-    private final Map<String, SAMSequenceRecord> mSequenceMap = new HashMap<String, SAMSequenceRecord>();
+    private List<SAMSequenceRecord> mSequences = new ArrayList<>();
+    private final Map<String, SAMSequenceRecord> mSequenceMap = new HashMap<>();
 
     public SAMSequenceDictionary() {
     }
@@ -150,7 +149,7 @@ public class SAMSequenceDictionary implements Serializable {
     private static String DICT_MISMATCH_TEMPLATE = "SAM dictionaries are not the same: %s.";
     /**
      * Non-comprehensive {@link #equals(Object)}-assertion: instead of calling {@link SAMSequenceRecord#equals(Object)} on constituent
-     * {@link SAMSequenceRecord}s in this dictionary against its pair in the target dictionary, in order,  call
+     * {@link SAMSequenceRecord}s in this dictionary against its pair in the target dictionary, in order, call
      * {@link SAMSequenceRecord#isSameSequence(SAMSequenceRecord)}.
      * Aliases are ignored.
      *
@@ -161,20 +160,49 @@ public class SAMSequenceDictionary implements Serializable {
 
         final Iterator<SAMSequenceRecord> thatSequences = that.mSequences.iterator();
         for (final SAMSequenceRecord thisSequence : mSequences) {
-            if (!thatSequences.hasNext())
+            if (!thatSequences.hasNext()) {
                 throw new AssertionError(String.format(DICT_MISMATCH_TEMPLATE, thisSequence + " is present in only one dictionary"));
-            else {
+            } else {
                 final SAMSequenceRecord thatSequence = thatSequences.next();
-                if(!thatSequence.isSameSequence(thisSequence))
+                if(!thatSequence.isSameSequence(thisSequence)) {
                     throw new AssertionError(
                             String.format(DICT_MISMATCH_TEMPLATE, thatSequence + " was found when " + thisSequence + " was expected")
                     );
+                }
             }
         }
         if (thatSequences.hasNext())
             throw new AssertionError(String.format(DICT_MISMATCH_TEMPLATE, thatSequences.next() + " is present in only one dictionary"));
     }
 
+    /**
+     * Non-comprehensive {@link #equals(Object)}-validation: instead of calling {@link SAMSequenceRecord#equals(Object)} on constituent
+     * {@link SAMSequenceRecord}s in this dictionary against its pair in the target dictionary, in order, call
+     * {@link SAMSequenceRecord#isSameSequence(SAMSequenceRecord)}.
+     *
+     * @param that {@link SAMSequenceDictionary} to compare against
+     * @return true if the dictionaries are the same, false otherwise
+     *
+     */
+    public boolean isSameDictionary(final SAMSequenceDictionary that) {
+        if (that == null || that.mSequences == null) return false;
+        if (this == that) return true;
+
+        final Iterator<SAMSequenceRecord> thatSequences = that.mSequences.iterator();
+        for (final SAMSequenceRecord thisSequence : mSequences) {
+            if (!thatSequences.hasNext()) {
+                return false;
+            } else {
+                final SAMSequenceRecord thatSequence = thatSequences.next();
+                if (!thatSequence.isSameSequence(thisSequence)) {
+                    return false;
+                }
+            }
+        }
+
+        return !thatSequences.hasNext();
+    }
+
     /** returns true if the two dictionaries are the same, aliases are NOT considered */
     @Override
     public boolean equals(Object o) {
@@ -183,9 +211,7 @@ public class SAMSequenceDictionary implements Serializable {
 
         SAMSequenceDictionary that = (SAMSequenceDictionary) o;
 
-        if (!mSequences.equals(that.mSequences)) return false;
-
-        return true;
+       return mSequences.equals(that.mSequences);
     }
 
     /**
@@ -318,8 +344,8 @@ public class SAMSequenceDictionary implements Serializable {
             finalDict.addSequence(sMerged);
 
             final Set<String> allTags = new HashSet<>();
-            s1.getAttributes().stream().forEach(a -> allTags.add(a.getKey()));
-            s2.getAttributes().stream().forEach(a -> allTags.add(a.getKey()));
+            s1.getAttributes().forEach(a -> allTags.add(a.getKey()));
+            s2.getAttributes().forEach(a -> allTags.add(a.getKey()));
 
             for (final String tag : allTags) {
                 final String value1 = s1.getAttribute(tag);
diff --git a/src/main/java/htsjdk/samtools/SAMSequenceRecord.java b/src/main/java/htsjdk/samtools/SAMSequenceRecord.java
index 6bca979..a4b4df2 100644
--- a/src/main/java/htsjdk/samtools/SAMSequenceRecord.java
+++ b/src/main/java/htsjdk/samtools/SAMSequenceRecord.java
@@ -23,6 +23,9 @@
  */
 package htsjdk.samtools;
 
+import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlValue;
 import java.math.BigInteger;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -32,10 +35,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlRootElement;
-import javax.xml.bind.annotation.XmlValue;
-
 /**
  * Header information about a reference sequence.  Corresponds to @SQ header record in SAM text header.
  */
@@ -194,10 +193,12 @@ public class SAMSequenceRecord extends AbstractSAMHeaderRecord implements Clonea
         return mSequenceName != null ? mSequenceName.hashCode() : 0;
     }
 
+    @Override
     Set<String> getStandardTags() {
         return STANDARD_TAGS;
     }
 
+    @Override
     public final SAMSequenceRecord clone() {
         final SAMSequenceRecord ret = new SAMSequenceRecord(this.mSequenceName, this.mSequenceLength);
         ret.mSequenceIndex = this.mSequenceIndex;
@@ -244,5 +245,10 @@ public class SAMSequenceRecord extends AbstractSAMHeaderRecord implements Clonea
                 getAssembly()
         );
     }
+
+    @Override
+    public String getSAMString() {
+        return new SAMTextHeaderCodec().getSQLine(this);
+    }
 }
 
diff --git a/src/main/java/htsjdk/samtools/SAMTestUtil.java b/src/main/java/htsjdk/samtools/SAMTestUtil.java
index 83766f3..ec85ce2 100644
--- a/src/main/java/htsjdk/samtools/SAMTestUtil.java
+++ b/src/main/java/htsjdk/samtools/SAMTestUtil.java
@@ -23,6 +23,8 @@
  */
 package htsjdk.samtools;
 
+import java.util.List;
+
 /**
  * Misc methods for SAM-related unit tests.  These are in the src tree rather than the tests tree
  * so that they will be included in sam.jar, and therefore can be used by tests outside of htsjdk.samtools.
@@ -55,47 +57,21 @@ public class SAMTestUtil {
     }
 
     /**
-     * Basic sanity check for a SAMRecord.
-     * @throws SanityCheckFailedException if the sanity check failed
+     * Basic sanity check for a SAMRecord. Print errors to screen.
+     * @param read SAM record
+     * @throws IllegalArgumentException if read is null
+     * @throws SanityCheckFailedException if errors
      */
-    public void assertReadValid(final SAMRecord read) throws SanityCheckFailedException {
-        assertEquals(read.getReadBases().length, read.getBaseQualities().length);
-        // Note that it is possible to have an unmapped read that has a coordinate
-        if (read.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) {
-            assertEquals(read.getAlignmentStart(), SAMRecord.NO_ALIGNMENT_START);
-            assertTrue(read.getReadUnmappedFlag());
-        } else {
-            assertNotSame(read.getAlignmentStart(), SAMRecord.NO_ALIGNMENT_START);
-        }
-        if (read.getReadUnmappedFlag()) {
-            assertEquals(read.getMappingQuality(), SAMRecord.NO_MAPPING_QUALITY);
-            assertEquals(read.getCigar().getCigarElements().size(), 0);
-        } else {
-            assertNotSame(read.getCigar().getCigarElements(), 0);
+    public static void assertReadValid(final SAMRecord read) throws SanityCheckFailedException {
+        if (read == null) {
+            throw new IllegalArgumentException("SAMRecord is null");
         }
-        if (read.getReadPairedFlag()) {
-            if (read.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) {
-                assertEquals(read.getMateAlignmentStart(), SAMRecord.NO_ALIGNMENT_START);
-                assertTrue(read.getMateUnmappedFlag());
-            } else {
-                // Even if the mate is unmapped, if it has a reference name, it should have a position.
-                assertNotSame(read.getMateAlignmentStart(), SAMRecord.NO_ALIGNMENT_START);
-            }
-            if (read.getReadUnmappedFlag() || read.getMateUnmappedFlag() ||
-                    !read.getReferenceName().equals(read.getMateReferenceName())) {
-                assertEquals(read.getInferredInsertSize(), 0);
-            } else {
-                assertNotSame(read.getInferredInsertSize(), 0);
-            }
-            if (!read.getReadUnmappedFlag() && !read.getMateUnmappedFlag()) {
-                assertNotSame(read.getReadNegativeStrandFlag(), read.getMateNegativeStrandFlag());
-                assertNotSame(read.getMateNegativeStrandFlag(),
-                        read.getReadName());
-            }
 
-        } else {
-            assertEquals(read.getInferredInsertSize(), 0);
+        final List<SAMValidationError> errors = read.isValid(false);
+        if ( errors != null) {
+            errors.forEach(v -> System.out.println(v.toString()));
         }
+        assertTrue(errors.isEmpty());
     }
 
     private static <T> void assertEquals(T a, T b) {
diff --git a/src/main/java/htsjdk/samtools/SAMTextHeaderCodec.java b/src/main/java/htsjdk/samtools/SAMTextHeaderCodec.java
index fb4b02a..908e836 100644
--- a/src/main/java/htsjdk/samtools/SAMTextHeaderCodec.java
+++ b/src/main/java/htsjdk/samtools/SAMTextHeaderCodec.java
@@ -228,6 +228,25 @@ public class SAMTextHeaderCodec {
         if (!parsedHeaderLine.requireTag(SAMFileHeader.VERSION_TAG)) {
             return;
         }
+
+        final String soString = parsedHeaderLine.getValue(SAMFileHeader.SORT_ORDER_TAG);
+        try {
+            if (soString != null) SAMFileHeader.SortOrder.valueOf(soString);
+        } catch (IllegalArgumentException e) {
+            reportErrorParsingLine(HEADER_LINE_START + parsedHeaderLine.getHeaderRecordType() +
+                            " line has non-conforming SO tag value: "+ soString + ".",
+                    SAMValidationError.Type.HEADER_TAG_NON_CONFORMING_VALUE, null);
+        }
+
+        final String goString = parsedHeaderLine.getValue(SAMFileHeader.GROUP_ORDER_TAG);
+        try {
+            if (goString != null) SAMFileHeader.GroupOrder.valueOf(goString);
+        } catch (IllegalArgumentException e) {
+            reportErrorParsingLine(HEADER_LINE_START + parsedHeaderLine.getHeaderRecordType() +
+                            " line has non-conforming GO tag value: "+ goString + ".",
+                    SAMValidationError.Type.HEADER_TAG_NON_CONFORMING_VALUE, null);
+        }
+
         transferAttributes(mFileHeader, parsedHeaderLine.mKeyValuePairs);
     }
 
@@ -429,22 +448,27 @@ public class SAMTextHeaderCodec {
     }
 
     private void writePGLine(final SAMProgramRecord programRecord) {
-        if (programRecord == null) {
-            return;
-        }
+        println(getPGLine(programRecord));
+    }
+
+    protected String getPGLine(final SAMProgramRecord programRecord) {
         final String[] fields = new String[2 + programRecord.getAttributes().size()];
         fields[0] = HEADER_LINE_START + HeaderRecordType.PG;
         fields[1] = SAMProgramRecord.PROGRAM_GROUP_ID_TAG + TAG_KEY_VALUE_SEPARATOR + programRecord.getProgramGroupId();
         encodeTags(programRecord, fields, 2);
-        println(StringUtil.join(FIELD_SEPARATOR, fields));
+        return StringUtil.join(FIELD_SEPARATOR, fields);
     }
 
     private void writeRGLine(final SAMReadGroupRecord readGroup) {
-        final String[] fields = new String[2 + readGroup.getAttributes().size()];
-        fields[0] = HEADER_LINE_START + HeaderRecordType.RG;
-        fields[1] = SAMReadGroupRecord.READ_GROUP_ID_TAG + TAG_KEY_VALUE_SEPARATOR + readGroup.getReadGroupId();
-        encodeTags(readGroup, fields, 2);
-        println(StringUtil.join(FIELD_SEPARATOR, fields));
+        println(getRGLine(readGroup));
+    }
+    
+    protected String getRGLine(final SAMReadGroupRecord readGroup) {
+      final String[] fields = new String[2 + readGroup.getAttributes().size()];
+      fields[0] = HEADER_LINE_START + HeaderRecordType.RG;
+      fields[1] = SAMReadGroupRecord.READ_GROUP_ID_TAG + TAG_KEY_VALUE_SEPARATOR + readGroup.getReadGroupId();
+      encodeTags(readGroup, fields, 2);
+      return StringUtil.join(FIELD_SEPARATOR, fields);
     }
 
     private void writeHDLine(final boolean keepExistingVersionNumber) {
@@ -470,13 +494,17 @@ public class SAMTextHeaderCodec {
     }
 
     private void writeSQLine(final SAMSequenceRecord sequenceRecord) {
+        println(getSQLine(sequenceRecord));
+    }
+
+    protected String getSQLine(final SAMSequenceRecord sequenceRecord) {
         final int numAttributes = sequenceRecord.getAttributes() != null ? sequenceRecord.getAttributes().size() : 0;
         final String[] fields = new String[3 + numAttributes];
         fields[0] = HEADER_LINE_START + HeaderRecordType.SQ;
         fields[1] = SAMSequenceRecord.SEQUENCE_NAME_TAG + TAG_KEY_VALUE_SEPARATOR + sequenceRecord.getSequenceName();
         fields[2] = SAMSequenceRecord.SEQUENCE_LENGTH_TAG + TAG_KEY_VALUE_SEPARATOR + Integer.toString(sequenceRecord.getSequenceLength());
         encodeTags(sequenceRecord, fields, 3);
-        println(StringUtil.join(FIELD_SEPARATOR, fields));
+        return StringUtil.join(FIELD_SEPARATOR, fields);
     }
 
     /**
diff --git a/src/main/java/htsjdk/samtools/SAMTextReader.java b/src/main/java/htsjdk/samtools/SAMTextReader.java
index 3968f1c..62f8717 100644
--- a/src/main/java/htsjdk/samtools/SAMTextReader.java
+++ b/src/main/java/htsjdk/samtools/SAMTextReader.java
@@ -79,22 +79,27 @@ class SAMTextReader extends SamReader.ReaderImplementation {
      *
      * @param enabled true to write source information into each SAMRecord.
      */
+    @Override
     public void enableFileSource(final SamReader reader, final boolean enabled) {
         this.mParentReader = enabled ? reader : null;
     }
 
+    @Override
     void enableIndexCaching(final boolean enabled) {
         throw new UnsupportedOperationException("Cannot enable index caching for a SAM text reader");
     }
 
+    @Override
     void enableIndexMemoryMapping(final boolean enabled) {
         throw new UnsupportedOperationException("Cannot enable index memory mapping for a SAM text reader");
     }
 
+    @Override
     void enableCrcChecking(final boolean enabled) {
         // Do nothing - this has no meaning for SAM reading
     }
 
+    @Override
     void setSAMRecordFactory(final SAMRecordFactory factory) {
         this.samRecordFactory = factory;
     }
@@ -104,14 +109,17 @@ class SAMTextReader extends SamReader.ReaderImplementation {
         return SamReader.Type.SAM_TYPE;
     }
 
+    @Override
     public boolean hasIndex() {
         return false;
     }
 
+    @Override
     public BAMIndex getIndex() {
         throw new UnsupportedOperationException();
     }
 
+    @Override
     public void close() {
         if (mReader != null) {
             try {
@@ -122,14 +130,17 @@ class SAMTextReader extends SamReader.ReaderImplementation {
         }
     }
 
+    @Override
     public SAMFileHeader getFileHeader() {
         return mFileHeader;
     }
 
+    @Override
     public ValidationStringency getValidationStringency() {
         return validationStringency;
     }
 
+    @Override
     public void setValidationStringency(final ValidationStringency stringency) {
         this.validationStringency = stringency;
     }
@@ -141,6 +152,7 @@ class SAMTextReader extends SamReader.ReaderImplementation {
      *
      * @return Iterator of SAMRecords in file order.
      */
+    @Override
     public CloseableIterator<SAMRecord> getIterator() {
         if (mReader == null) {
             throw new IllegalStateException("File reader is closed");
@@ -158,6 +170,7 @@ class SAMTextReader extends SamReader.ReaderImplementation {
      * @param fileSpan The file span.
      * @return An iterator over the given file span.
      */
+    @Override
     public CloseableIterator<SAMRecord> getIterator(final SAMFileSpan fileSpan) {
         throw new UnsupportedOperationException("Cannot directly iterate over regions within SAM text files.");
     }
@@ -167,6 +180,7 @@ class SAMTextReader extends SamReader.ReaderImplementation {
      *
      * @return An pointer to the first read in the file.
      */
+    @Override
     public SAMFileSpan getFilePointerSpanningReads() {
         throw new UnsupportedOperationException("Cannot retrieve file pointers within SAM text files.");
     }
@@ -186,10 +200,12 @@ class SAMTextReader extends SamReader.ReaderImplementation {
     /**
      * Unsupported for SAM text files.
      */
+    @Override
     public CloseableIterator<SAMRecord> queryAlignmentStart(final String sequence, final int start) {
         throw new UnsupportedOperationException("Cannot query SAM text files");
     }
 
+    @Override
     public CloseableIterator<SAMRecord> queryUnmapped() {
         throw new UnsupportedOperationException("Cannot query SAM text files");
     }
@@ -220,14 +236,17 @@ class SAMTextReader extends SamReader.ReaderImplementation {
             }
         }
 
+        @Override
         public void close() {
             SAMTextReader.this.close();
         }
 
+        @Override
         public boolean hasNext() {
             return mCurrentLine != null;
         }
 
+        @Override
         public SAMRecord next() {
             if (!hasNext()) {
                 throw new IllegalStateException("Cannot call next() on exhausted iterator");
@@ -239,6 +258,7 @@ class SAMTextReader extends SamReader.ReaderImplementation {
             }
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException("Not supported: remove");
         }
diff --git a/src/main/java/htsjdk/samtools/SAMTextWriter.java b/src/main/java/htsjdk/samtools/SAMTextWriter.java
index 0786d67..70dd4a2 100644
--- a/src/main/java/htsjdk/samtools/SAMTextWriter.java
+++ b/src/main/java/htsjdk/samtools/SAMTextWriter.java
@@ -122,6 +122,7 @@ public class SAMTextWriter extends SAMFileWriterImpl {
      *
      * @param alignment SAMRecord.
      */
+    @Override
     public void writeAlignment(final SAMRecord alignment) {
         try {
             out.write(alignment.getReadName());
@@ -188,6 +189,7 @@ public class SAMTextWriter extends SAMFileWriterImpl {
      *
      * @param textHeader String containing the text to write.
      */
+    @Override
     public void writeHeader(final String textHeader) {
         try {
             out.write(textHeader);
@@ -199,6 +201,7 @@ public class SAMTextWriter extends SAMFileWriterImpl {
     /**
      * Do any required flushing here.
      */
+    @Override
     public void finish() {
         try {
             out.close();
@@ -212,6 +215,7 @@ public class SAMTextWriter extends SAMFileWriterImpl {
      *
      * @return Output filename, or null if there isn't one.
      */
+    @Override
     public String getFilename() {
         if (file == null) {
             return null;
diff --git a/src/main/java/htsjdk/samtools/SAMTools.java b/src/main/java/htsjdk/samtools/SAMTools.java
index 551f846..911198e 100644
--- a/src/main/java/htsjdk/samtools/SAMTools.java
+++ b/src/main/java/htsjdk/samtools/SAMTools.java
@@ -31,7 +31,10 @@ import java.io.File;
 
 /**
  * Command line utility for manipulating SAM/BAM files.
+ * @deprecated since 07/2017. This class does not add anything to the HTSJDK library except an example of how to iterate over a SAM/BAM file.
+ * In addition, it is not tested.
  */
+ at Deprecated
 public class SAMTools {
     private String mCommand = null;
     private File mInputFile = null;
diff --git a/src/main/java/htsjdk/samtools/SAMUtils.java b/src/main/java/htsjdk/samtools/SAMUtils.java
index 25b6799..5b81de9 100644
--- a/src/main/java/htsjdk/samtools/SAMUtils.java
+++ b/src/main/java/htsjdk/samtools/SAMUtils.java
@@ -43,14 +43,17 @@ import java.util.Map;
 import java.util.TreeMap;
 import java.util.regex.Pattern;
 
-
 /**
  * Utilty methods.
  */
 public final class SAMUtils {
-    /** regex for semicolon, used in {@link SAMUtils#getOtherCanonicalAlignments(SAMRecord)} */
+    /**
+     * regex for semicolon, used in {@link SAMUtils#getOtherCanonicalAlignments(SAMRecord)}
+     */
     private static final Pattern SEMICOLON_PAT = Pattern.compile("[;]");
-    /** regex for comma, used in {@link SAMUtils#getOtherCanonicalAlignments(SAMRecord)} */
+    /**
+     * regex for comma, used in {@link SAMUtils#getOtherCanonicalAlignments(SAMRecord)}
+     */
     private static final Pattern COMMA_PAT = Pattern.compile("[,]");
 
     // Representation of bases, one for when in low-order nybble, one for when in high-order nybble.
@@ -87,32 +90,31 @@ public final class SAMUtils {
     private static final byte COMPRESSED_K_HIGH = (byte) (COMPRESSED_K_LOW << 4);
     private static final byte COMPRESSED_D_HIGH = (byte) (COMPRESSED_D_LOW << 4);
     private static final byte COMPRESSED_B_HIGH = (byte) (COMPRESSED_B_LOW << 4);
-    
-    private static final byte [] COMPRESSED_LOOKUP_TABLE = 
-            new byte[]{
-                '=',
-                'A',
-                'C',
-                'M',
-                'G',
-                'R',
-                'S',
-                'V',
-                'T',
-                'W',
-                'Y',
-                'H',
-                'K',
-                'D',
-                'B',
-                'N'
-            };
-    
+
+    private static final byte[] COMPRESSED_LOOKUP_TABLE = {
+            '=',
+            'A',
+            'C',
+            'M',
+            'G',
+            'R',
+            'S',
+            'V',
+            'T',
+            'W',
+            'Y',
+            'H',
+            'K',
+            'D',
+            'B',
+            'N'
+    };
+
     public static final int MAX_PHRED_SCORE = 93;
 
     /**
-     * Convert from a byte array containing =AaCcGgTtNn represented as ASCII, to a byte array half as long,
-     * with =, A, C, G, T converted to 0, 1, 2, 4, 8, 15.
+     * Convert from a byte array containing =AaCcGgTtNnMmRrSsVvWwYyHhKkDdBb represented as ASCII, to a byte array half as long,
+     * with for example, =, A, C, G, T converted to 0, 1, 2, 4, 8, 15.
      *
      * @param readBases Bases as ASCII bytes.
      * @return New byte array with bases represented as nybbles, in BAM binary format.
@@ -126,17 +128,17 @@ public final class SAMUtils {
         }
         // Last nybble
         if (i == readBases.length) {
-            compressedBases[i / 2] = charToCompressedBaseHigh((char) readBases[i - 1]);
+            compressedBases[i / 2] = charToCompressedBaseHigh(readBases[i - 1]);
         }
         return compressedBases;
     }
 
     /**
-     * Convert from a byte array with basese stored in nybbles, with =, A, C, G, T represented as 0, 1, 2, 4, 8, 15,
+     * Convert from a byte array with bases stored in nybbles, with for example,=, A, C, G, T, N represented as 0, 1, 2, 4, 8, 15,
      * to a a byte array containing =AaCcGgTtNn represented as ASCII.
      *
-     * @param length Number of bases (not bytes) to convert.
-     * @param compressedBases Bases represented as nybbles, in BAM binary format.
+     * @param length           Number of bases (not bytes) to convert.
+     * @param compressedBases  Bases represented as nybbles, in BAM binary format.
      * @param compressedOffset Byte offset in compressedBases to start.
      * @return New byte array with bases as ASCII bytes.
      */
@@ -158,10 +160,11 @@ public final class SAMUtils {
     /**
      * Convert from ASCII byte to BAM nybble representation of a base in low-order nybble.
      *
-     * @param base One of =AaCcGgTtNn.
+     * @param base One of =AaCcGgTtNnMmRrSsVvWwYyHhKkDdBb.
      * @return Low-order nybble-encoded equivalent.
+     * @throws IllegalArgumentException if the base is not one of =AaCcGgTtNnMmRrSsVvWwYyHhKkDdBb.
      */
-    private static byte charToCompressedBaseLow(final int base) {
+    private static byte charToCompressedBaseLow(final byte base) {
         switch (base) {
             case '=':
                 return COMPRESSED_EQUAL_LOW;
@@ -214,17 +217,18 @@ public final class SAMUtils {
             case 'b':
                 return COMPRESSED_B_LOW;
             default:
-                throw new IllegalArgumentException("Bad  byte passed to charToCompressedBase: " + base);
+                throw new IllegalArgumentException("Bad base passed to charToCompressedBaseLow: " + Character.toString((char) base) + "(" + base + ")");
         }
     }
 
     /**
      * Convert from ASCII byte to BAM nybble representation of a base in high-order nybble.
      *
-     * @param base One of =AaCcGgTtNn.
+     * @param base One of =AaCcGgTtNnMmRrSsVvWwYyHhKkDdBb.
      * @return High-order nybble-encoded equivalent.
+     * @throws IllegalArgumentException if the base is not one of =AaCcGgTtNnMmRrSsVvWwYyHhKkDdBb.
      */
-    private static byte charToCompressedBaseHigh(final int base) {
+    private static byte charToCompressedBaseHigh(final byte base) {
         switch (base) {
             case '=':
                 return COMPRESSED_EQUAL_HIGH;
@@ -277,20 +281,22 @@ public final class SAMUtils {
             case 'b':
                 return COMPRESSED_B_HIGH;
             default:
-                throw new IllegalArgumentException("Bad  byte passed to charToCompressedBase: " + base);
+                throw new IllegalArgumentException("Bad base passed to charToCompressedBaseHigh: " + Character.toString((char) base) + "(" + base + ")");
         }
     }
-    
+
     /**
      * Returns the byte corresponding to a certain nybble
+     *
      * @param base One of COMPRESSED_*_LOW, a low-order nybble encoded base.
-     * @return ASCII base, one of ACGTN=.
+     * @return ASCII base, one of =ACGTNMRSVWYHKDB.
+     * @throws IllegalArgumentException if the base is not one of =ACGTNMRSVWYHKDB.
      */
-    private static byte compressedBaseToByte(byte base){
-        try{
+    private static byte compressedBaseToByte(byte base) {
+        try {
             return COMPRESSED_LOOKUP_TABLE[base];
-        }catch(IndexOutOfBoundsException e){
-            throw new IllegalArgumentException("Bad  byte passed to charToCompressedBase: " + base);
+        } catch (IndexOutOfBoundsException e) {
+            throw new IllegalArgumentException("Bad base passed to charToCompressedBase: " + Character.toString((char) base) + "(" + base + ")");
         }
     }
 
@@ -301,7 +307,7 @@ public final class SAMUtils {
      * @return ASCII base, one of ACGTN=.
      */
     private static byte compressedBaseToByteLow(final int base) {
-        return compressedBaseToByte((byte)(base & 0xf));
+        return compressedBaseToByte((byte) (base & 0xf));
     }
 
     /**
@@ -311,13 +317,13 @@ public final class SAMUtils {
      * @return ASCII base, one of ACGTN=.
      */
     private static byte compressedBaseToByteHigh(final int base) {
-        return compressedBaseToByte((byte)((base >> 4) & 0xf));
+        return compressedBaseToByte((byte) ((base >> 4) & 0xf));
     }
 
     /**
      * Convert bases in place into canonical form, upper case, and with no-call represented as N.
      *
-     * @param bases
+     * @param bases byte array of bases to "normalize", in place.
      */
     static void normalizeBases(final byte[] bases) {
         for (int i = 0; i < bases.length; ++i) {
@@ -431,11 +437,11 @@ public final class SAMUtils {
     /**
      * Handle a list of validation errors according to the validation stringency.
      *
-     * @param validationErrors List of errors to report, or null if there are no errors.
-     * @param samRecordIndex Record number of the SAMRecord corresponding to the validation errors, or -1 if
-     * the record number is not known.
+     * @param validationErrors     List of errors to report, or null if there are no errors.
+     * @param samRecordIndex       Record number of the SAMRecord corresponding to the validation errors, or -1 if
+     *                             the record number is not known.
      * @param validationStringency If STRICT, throw a SAMFormatException.  If LENIENT, print the validation
-     * errors to stderr.  If SILENT, do nothing.
+     *                             errors to stderr.  If SILENT, do nothing.
      */
     public static void processValidationErrors(final List<SAMValidationError> validationErrors,
                                                final long samRecordIndex,
@@ -461,11 +467,10 @@ public final class SAMUtils {
         } else if (validationStringency == ValidationStringency.LENIENT) {
             System.err.println("Ignoring SAM validation error: " + validationError);
         }
-
     }
 
     private static final SAMHeaderRecordComparator<SAMReadGroupRecord> HEADER_RECORD_COMPARATOR =
-            new SAMHeaderRecordComparator<SAMReadGroupRecord>(
+            new SAMHeaderRecordComparator<>(
                     SAMReadGroupRecord.PLATFORM_UNIT_TAG,
                     SAMReadGroupRecord.LIBRARY_TAG,
                     SAMReadGroupRecord.DATE_RUN_PRODUCED_TAG,
@@ -473,7 +478,8 @@ public final class SAMUtils {
                     SAMReadGroupRecord.SEQUENCING_CENTER_TAG,
                     SAMReadGroupRecord.PLATFORM_TAG,
                     SAMReadGroupRecord.DESCRIPTION_TAG,
-                    SAMReadGroupRecord.READ_GROUP_ID_TAG    // We don't actually want to compare with ID but it's suitable
+                    SAMReadGroupRecord.READ_GROUP_ID_TAG
+                    // We don't actually want to compare with ID but it's suitable
                     // "just in case" since it's the only one that's actually required
             );
 
@@ -494,11 +500,11 @@ public final class SAMUtils {
 
         // Sort the read group records by their first
         final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(referenceFasta).open(input);
-        final List<SAMReadGroupRecord> sortedRecords = new ArrayList<SAMReadGroupRecord>(reader.getFileHeader().getReadGroups());
+        final List<SAMReadGroupRecord> sortedRecords = new ArrayList<>(reader.getFileHeader().getReadGroups());
         Collections.sort(sortedRecords, HEADER_RECORD_COMPARATOR);
 
         for (final SAMReadGroupRecord rgRecord : sortedRecords) {
-            final TreeMap<String, String> sortedAttributes = new TreeMap<String, String>();
+            final TreeMap<String, String> sortedAttributes = new TreeMap<>();
             for (final Map.Entry<String, String> attributeEntry : rgRecord.getAttributes()) {
                 sortedAttributes.put(attributeEntry.getKey(), attributeEntry.getValue());
             }
@@ -536,7 +542,7 @@ public final class SAMUtils {
 
         final List<SAMProgramRecord> pgs = header.getProgramRecords();
         if (!pgs.isEmpty()) {
-            final List<String> referencedIds = new ArrayList<String>();
+            final List<String> referencedIds = new ArrayList<>();
             for (final SAMProgramRecord pg : pgs) {
                 if (pg.getPreviousProgramGroupId() != null) {
                     referencedIds.add(pg.getPreviousProgramGroupId());
@@ -557,7 +563,7 @@ public final class SAMUtils {
 
     /**
      * Strip mapping information from a SAMRecord.
-     *
+     * <p>
      * WARNING: by clearing the secondary and supplementary flags,
      * this may have the affect of producing multiple distinct records with the
      * same read name and flags, which may lead to invalid SAM/BAM output.
@@ -565,7 +571,7 @@ public final class SAMUtils {
      */
     public static void makeReadUnmapped(final SAMRecord rec) {
         if (rec.getReadNegativeStrandFlag()) {
-            SAMRecordUtil.reverseComplement(rec);
+            rec.reverseComplement(true);
             rec.setReadNegativeStrandFlag(false);
         }
         rec.setDuplicateReadFlag(false);
@@ -619,13 +625,13 @@ public final class SAMUtils {
     /**
      * Tests if the provided record is mapped entirely beyond the end of the reference (i.e., the alignment start is greater than the
      * length of the sequence to which the record is mapped).
+     *
      * @param record must not have a null SamFileHeader
      */
     public static boolean recordMapsEntirelyBeyondEndOfReference(final SAMRecord record) {
         if (record.getHeader() == null) {
             throw new SAMException("A non-null SAMHeader is required to resolve the mapping position: " + record.getReadName());
-        }
-        else {
+        } else {
             return record.getHeader().getSequence(record.getReferenceIndex()).getSequenceLength() < record.getAlignmentStart();
         }
     }
@@ -643,7 +649,6 @@ public final class SAMUtils {
         else return mapq1 - mapq2;
     }
 
-
     /**
      * Hokey algorithm for combining two MAPQs into values that are comparable, being cognizant of the fact
      * that in MAPQ world, 1 > 255 > 0. In this algorithm, 255 is treated as if it were 0.01, so that
@@ -652,11 +657,17 @@ public final class SAMUtils {
      * invocations of this method.
      */
     public static int combineMapqs(int m1, int m2) {
-        if (m1 == 255) m1 = 1;
-        else m1 *= 100;
+        if (m1 == 255) {
+            m1 = 1;
+        } else {
+            m1 *= 100;
+        }
 
-        if (m2 == 255) m2 = 1;
-        else m2 *= 100;
+        if (m2 == 255) {
+            m2 = 1;
+        } else {
+            m2 *= 100;
+        }
 
         return m1 + m2;
 
@@ -679,15 +690,15 @@ public final class SAMUtils {
      * reference sequence. Note that clipped portions, and inserted and deleted bases (vs. the reference)
      * are not represented in the alignment blocks.
      *
-     * @param cigar The cigar containing the alignment information
+     * @param cigar          The cigar containing the alignment information
      * @param alignmentStart The start (1-based) of the alignment
-     * @param cigarTypeName The type of cigar passed - for error logging.
+     * @param cigarTypeName  The type of cigar passed - for error logging.
      * @return List of alignment blocks
      */
     public static List<AlignmentBlock> getAlignmentBlocks(final Cigar cigar, final int alignmentStart, final String cigarTypeName) {
         if (cigar == null) return Collections.emptyList();
 
-        final List<AlignmentBlock> alignmentBlocks = new ArrayList<AlignmentBlock>();
+        final List<AlignmentBlock> alignmentBlocks = new ArrayList<>();
         int readBase = 1;
         int refBase = alignmentStart;
 
@@ -718,7 +729,7 @@ public final class SAMUtils {
                     refBase += length;
                     break;
                 default:
-                    throw new IllegalStateException("Case statement didn't deal with " + cigarTypeName + " op: " + e.getOperator());
+                    throw new IllegalStateException("Case statement didn't deal with " + cigarTypeName + " op: " + e.getOperator() + "in CIGAR: " + cigar);
             }
         }
         return Collections.unmodifiableList(alignmentBlocks);
@@ -726,7 +737,7 @@ public final class SAMUtils {
 
     /**
      * @param alignmentStart The start (1-based) of the alignment
-     * @param cigar The cigar containing the alignment information
+     * @param cigar          The cigar containing the alignment information
      * @return the alignment start (1-based, inclusive) adjusted for clipped bases.  For example if the read
      * has an alignment start of 100 but the first 4 bases were clipped (hard or soft clipped)
      * then this method will return 96.
@@ -750,7 +761,7 @@ public final class SAMUtils {
 
     /**
      * @param alignmentEnd The end (1-based) of the alignment
-     * @param cigar The cigar containing the alignment information
+     * @param cigar        The cigar containing the alignment information
      * @return the alignment end (1-based, inclusive) adjusted for clipped bases.  For example if the read
      * has an alignment end of 100 but the last 7 bases were clipped (hard or soft clipped)
      * then this method will return 107.
@@ -788,7 +799,7 @@ public final class SAMUtils {
     /**
      * Returns the Mate Cigar or null if there is none.
      *
-     * @param rec the SAM record
+     * @param rec            the SAM record
      * @param withValidation true if we are to validate the mate cigar before returning, false otherwise.
      * @return Cigar object for the read's mate, or null if there is none.
      */
@@ -832,11 +843,11 @@ public final class SAMUtils {
      */
     public static int getMateAlignmentEnd(final SAMRecord rec) {
         if (rec.getMateUnmappedFlag()) {
-            throw new RuntimeException("getMateAlignmentEnd called on an unmapped mate.");
+            throw new RuntimeException("getMateAlignmentEnd called on an unmapped mate: " + rec);
         }
         final Cigar mateCigar = SAMUtils.getMateCigar(rec);
         if (mateCigar == null) {
-            throw new SAMException("Mate CIGAR (Tag MC) not found.");
+            throw new SAMException("Mate CIGAR (Tag MC) not found:" + rec);
         }
         return CoordMath.getEnd(rec.getMateAlignmentStart(), mateCigar.getReferenceLength());
     }
@@ -851,15 +862,14 @@ public final class SAMUtils {
      */
     public static int getMateUnclippedStart(final SAMRecord rec) {
         if (rec.getMateUnmappedFlag())
-            throw new RuntimeException("getMateUnclippedStart called on an unmapped mate.");
+            throw new RuntimeException("getMateUnclippedStart called on an unmapped mate: " + rec);
         final Cigar mateCigar = getMateCigar(rec);
         if (mateCigar == null) {
-            throw new SAMException("Mate CIGAR (Tag MC) not found.");
+            throw new SAMException("Mate CIGAR (Tag MC) not found: " + rec);
         }
         return SAMUtils.getUnclippedStart(rec.getMateAlignmentStart(), mateCigar);
     }
 
-
     /**
      * @param rec the SAM record
      * @return the mate alignment end (1-based, inclusive) adjusted for clipped bases.  For example if the mate
@@ -870,20 +880,20 @@ public final class SAMUtils {
      */
     public static int getMateUnclippedEnd(final SAMRecord rec) {
         if (rec.getMateUnmappedFlag()) {
-            throw new RuntimeException("getMateUnclippedEnd called on an unmapped mate.");
+            throw new RuntimeException("getMateUnclippedEnd called on an unmapped mate: " + rec);
         }
         final Cigar mateCigar = SAMUtils.getMateCigar(rec);
         if (mateCigar == null) {
-            throw new SAMException("Mate CIGAR (Tag MC) not found.");
+            throw new SAMException("Mate CIGAR (Tag MC) not found: " + rec);
         }
         return SAMUtils.getUnclippedEnd(getMateAlignmentEnd(rec), mateCigar);
     }
 
     /**
      * @param rec the SAM record
-     * Returns blocks of the mate sequence that have been aligned directly to the
-     * reference sequence. Note that clipped portions of the mate and inserted and
-     * deleted bases (vs. the reference) are not represented in the alignment blocks.
+     *            Returns blocks of the mate sequence that have been aligned directly to the
+     *            reference sequence. Note that clipped portions of the mate and inserted and
+     *            deleted bases (vs. the reference) are not represented in the alignment blocks.
      */
     public static List<AlignmentBlock> getMateAlignmentBlocks(final SAMRecord rec) {
         return getAlignmentBlocks(getMateCigar(rec), rec.getMateAlignmentStart(), "mate cigar");
@@ -893,12 +903,12 @@ public final class SAMUtils {
      * Run all validations of the mate's CIGAR.  These include validation that the CIGAR makes sense independent of
      * placement, plus validation that CIGAR + placement yields all bases with M operator within the range of the reference.
      *
-     * @param rec the SAM record
-     * @param cigar The cigar containing the alignment information
-     * @param referenceIndex The reference index
+     * @param rec             the SAM record
+     * @param cigar           The cigar containing the alignment information
+     * @param referenceIndex  The reference index
      * @param alignmentBlocks The alignment blocks (parsed from the cigar)
-     * @param recordNumber For error reporting.  -1 if not known.
-     * @param cigarTypeName For error reporting.  "Read CIGAR" or "Mate Cigar"
+     * @param recordNumber    For error reporting.  -1 if not known.
+     * @param cigarTypeName   For error reporting.  "Read CIGAR" or "Mate Cigar"
      * @return List of errors, or null if no errors.
      */
 
@@ -913,16 +923,15 @@ public final class SAMUtils {
         if (referenceIndex != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) {
             SAMFileHeader samHeader = rec.getHeader();
             if (null == samHeader) {
-                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                if (ret == null) ret = new ArrayList<>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.MISSING_HEADER,
                         cigarTypeName + " A non-null SAMHeader is required to validate cigar elements for: ", rec.getReadName(), recordNumber));
-            }
-            else {
+            } else {
                 final SAMSequenceRecord sequence = samHeader.getSequence(referenceIndex);
                 final int referenceSequenceLength = sequence.getSequenceLength();
                 for (final AlignmentBlock alignmentBlock : alignmentBlocks) {
                     if (alignmentBlock.getReferenceStart() + alignmentBlock.getLength() - 1 > referenceSequenceLength) {
-                        if (ret == null) ret = new ArrayList<SAMValidationError>();
+                        if (ret == null) ret = new ArrayList<>();
                         ret.add(new SAMValidationError(SAMValidationError.Type.CIGAR_MAPS_OFF_REFERENCE,
                                 cigarTypeName + " M operator maps off end of reference", rec.getReadName(), recordNumber));
                         break;
@@ -937,7 +946,7 @@ public final class SAMUtils {
      * Run all validations of the mate's CIGAR.  These include validation that the CIGAR makes sense independent of
      * placement, plus validation that CIGAR + placement yields all bases with M operator within the range of the reference.
      *
-     * @param rec the SAM record
+     * @param rec          the SAM record
      * @param recordNumber For error reporting.  -1 if not known.
      * @return List of errors, or null if no errors.
      */
@@ -951,7 +960,7 @@ public final class SAMUtils {
                 }
             } else {
                 if (getMateCigarString(rec) != null) {
-                    ret = new ArrayList<SAMValidationError>();
+                    ret = new ArrayList<>();
                     if (!rec.getReadPairedFlag()) {
                         // If the read is not paired, and the Mate Cigar String (MC Attribute) exists, that is a validation error
                         ret.add(new SAMValidationError(SAMValidationError.Type.MATE_CIGAR_STRING_INVALID_PRESENCE,
@@ -981,11 +990,11 @@ public final class SAMUtils {
     }
 
     /**
-     * Returns a string that is the the read group ID and read name separated by a colon.  This is meant to cannonically
+     * Returns a string that is the the read group ID and read name separated by a colon.  This is meant to canonically
      * identify a given record within a set of records.
      *
-     * @param record
-     * @return
+     * @param record SAMRecord for which "canonical" read name is requested
+     * @return The record's readgroup-id (if non-null) and the read name, separated by a colon, ':'
      */
     public static String getCanonicalRecordName(final SAMRecord record) {
         String name = record.getStringAttribute(ReservedTagConstants.READ_GROUP_ID);
@@ -999,7 +1008,7 @@ public final class SAMUtils {
      * or the given record's start position is greater than its mate's start position, zero is automatically returned.
      * NB: This method assumes that the record's mate is not contained within the given record's alignment.
      *
-     * @param rec
+     * @param rec SAMRecord that needs clipping due to overlapping pairs.
      * @return the number of bases at the end of the read that need to be clipped such that there would be no overlapping bases with its mate.
      * Read bases include only those from insertion, match, or mismatch Cigar operators.
      */
@@ -1010,7 +1019,8 @@ public final class SAMUtils {
 
         // Only clip records that are left-most in genomic order and overlapping.
         if (rec.getMateAlignmentStart() < rec.getAlignmentStart()) return 0; // right-most, so ignore.
-        else if (rec.getMateAlignmentStart() == rec.getAlignmentStart() && rec.getFirstOfPairFlag()) return 0; // same start, so pick the first end
+        else if (rec.getMateAlignmentStart() == rec.getAlignmentStart() && rec.getFirstOfPairFlag())
+            return 0; // same start, so pick the first end
 
         // Find the number of read bases after the given mate's alignment start.
         int numBasesToClip = 0;
@@ -1023,12 +1033,11 @@ public final class SAMUtils {
             if (refStartPos <= refPos + refBasesLength - 1) { // add to clipped bases
                 if (operator == CigarOperator.MATCH_OR_MISMATCH) { // M
                     if (refStartPos < refPos) numBasesToClip += refBasesLength; // use all of the bases
-                    else numBasesToClip += (refPos + refBasesLength) - refStartPos;  // since the mate's alignment start can be in the middle of a cigar element
-                }
-                else if (operator == CigarOperator.SOFT_CLIP || operator == CigarOperator.HARD_CLIP || operator == CigarOperator.PADDING || operator == CigarOperator.SKIPPED_REGION) {
+                    else
+                        numBasesToClip += (refPos + refBasesLength) - refStartPos;  // since the mate's alignment start can be in the middle of a cigar element
+                } else if (operator == CigarOperator.SOFT_CLIP || operator == CigarOperator.HARD_CLIP || operator == CigarOperator.PADDING || operator == CigarOperator.SKIPPED_REGION) {
                     // ignore
-                }
-                else { // ID
+                } else { // ID
                     numBasesToClip += operator.consumesReadBases() ? el.getLength() : 0; // clip all the bases in the read from this operator
                 }
             }
@@ -1041,14 +1050,14 @@ public final class SAMUtils {
     }
 
     /**
-     * Returns a (possibly new) record that has been clipped if isa  mapped paired and has overlapping bases with its mate.
+     * Returns a (possibly new) record that has been clipped if input is a mapped paired and has overlapping bases with its mate.
      * See {@link #getNumOverlappingAlignedBasesToClip(SAMRecord)} for how the number of overlapping bases is computed.
      * NB: this does not properly consider a cigar like: 100M20S10H.
      * NB: This method assumes that the record's mate is not contained within the given record's alignment.
      *
-     * @param record the record from which to clip bases.
+     * @param record        the record from which to clip bases.
      * @param noSideEffects if true a modified clone of the original record is returned, otherwise we modify the record directly.
-     * @return
+     * @return a (possibly new) record that has been clipped
      */
     public static SAMRecord clipOverlappingAlignedBases(final SAMRecord record, final boolean noSideEffects) {
         return clipOverlappingAlignedBases(record, getNumOverlappingAlignedBasesToClip(record), noSideEffects);
@@ -1060,18 +1069,20 @@ public final class SAMUtils {
      * NB: this does not properly consider a cigar like: 100M20S10H.
      * NB: This method assumes that the record's mate is not contained within the given record's alignment.
      *
-     * @param record the record from which to clip bases.
+     * @param record                    the record from which to clip bases.
      * @param numOverlappingBasesToClip the number of bases to clip at the end of the read.
-     * @param noSideEffects if true a modified clone of the original record is returned, otherwise we modify the record directly.
-     * @return
+     * @param noSideEffects             if true a modified clone of the original record is returned, otherwise we modify the record directly.
+     * @return Returns a (possibly new) SAMRecord with the given number of bases soft-clipped
      */
     public static SAMRecord clipOverlappingAlignedBases(final SAMRecord record, final int numOverlappingBasesToClip, final boolean noSideEffects) {
         // NB: ignores how to handle supplemental records when present for both ends by just using the mate information in the record.
 
-        if (numOverlappingBasesToClip <= 0 || record.getReadUnmappedFlag() || record.getMateUnmappedFlag()) return record;
+        if (numOverlappingBasesToClip <= 0 || record.getReadUnmappedFlag() || record.getMateUnmappedFlag()) {
+            return record;
+        }
 
         try {
-            final SAMRecord rec = noSideEffects ? ((SAMRecord)record.clone()) : record;
+            final SAMRecord rec = noSideEffects ? ((SAMRecord) record.clone()) : record;
 
             // watch out for when the second read overlaps all of the first read
             if (rec.getMateAlignmentStart() <= rec.getAlignmentStart()) { // make it unmapped
@@ -1082,7 +1093,7 @@ public final class SAMUtils {
             // 1-based index of first base in read to clip.
             int clipFrom = rec.getReadLength() - numOverlappingBasesToClip + 1;
             // we have to check if the last cigar element is soft-clipping, so we can subtract that from clipFrom
-            final CigarElement cigarElement = rec.getCigar().getCigarElement(rec.getCigarLength()-1);
+            final CigarElement cigarElement = rec.getCigar().getCigarElement(rec.getCigarLength() - 1);
             if (CigarOperator.SOFT_CLIP == cigarElement.getOperator()) clipFrom -= cigarElement.getLength();
             // FIXME: does not properly consider a cigar like: 100M20S10H
 
@@ -1108,100 +1119,102 @@ public final class SAMUtils {
      * Extract a List of 'other canonical alignments' from a SAM record. Those alignments are stored as a string in the 'SA' tag as defined
      * in the SAM specification.
      * The name, sequence and qualities, mate data are copied from the original record.
+     *
      * @param record must be non null and must have a non-null associated header.
      * @return a list of 'other canonical alignments' SAMRecords. The list is empty if the 'SA' attribute is missing.
      */
     public static List<SAMRecord> getOtherCanonicalAlignments(final SAMRecord record) {
-        if( record == null ) throw new IllegalArgumentException("record is null");
-        if( record.getHeader() == null ) throw new IllegalArgumentException("record.getHeader() is null");
+        if (record == null) throw new IllegalArgumentException("record is null");
+        if (record.getHeader() == null) throw new IllegalArgumentException("record.getHeader() is null");
         /* extract value of SA tag */
-        final Object saValue = record.getAttribute( SAMTagUtil.getSingleton().SA );
-        if( saValue == null ) return Collections.emptyList();
-        if( ! (saValue instanceof String) ) throw new SAMException(
-                "Expected a String for attribute 'SA' but got " + saValue.getClass() );
+        final Object saValue = record.getAttribute(SAMTagUtil.getSingleton().SA);
+        if (saValue == null) return Collections.emptyList();
+        if (!(saValue instanceof String)) throw new SAMException(
+                "Expected a String for attribute 'SA' but got " + saValue.getClass() + ". Record: " + record);
 
         final SAMRecordFactory samReaderFactory = new DefaultSAMRecordFactory();
 
         /* the spec says: "Other canonical alignments in a chimeric alignment, formatted as a
          * semicolon-delimited list: (rname,pos,strand,CIGAR,mapQ,NM;)+.
          * Each element in the list represents a part of the chimeric alignment.
-         * Conventionally, at a supplementary line, the  1rst element points to the primary line.
+         * Conventionally, at a supplementary line, the 1st element points to the primary line.
          */
 
         /* break string using semicolon */
-        final String semiColonStrs[] = SEMICOLON_PAT.split((String)saValue);
+        final String semiColonStrs[] = SEMICOLON_PAT.split((String) saValue);
 
         /* the result list */
-        final List<SAMRecord> alignments = new ArrayList<>( semiColonStrs.length );
+        final List<SAMRecord> alignments = new ArrayList<>(semiColonStrs.length);
 
         /* base SAM flag */
-        int record_flag = record.getFlags() ;
+        int record_flag = record.getFlags();
         record_flag &= ~SAMFlag.PROPER_PAIR.flag;
         record_flag &= ~SAMFlag.SUPPLEMENTARY_ALIGNMENT.flag;
         record_flag &= ~SAMFlag.READ_REVERSE_STRAND.flag;
 
-
-        for(int i=0; i< semiColonStrs.length;++i  ) {
+        for (int i = 0; i < semiColonStrs.length; ++i) {
             final String semiColonStr = semiColonStrs[i];
             /* ignore empty string */
-            if( semiColonStr.isEmpty() ) continue;
+            if (semiColonStr.isEmpty()) continue;
 
             /* break string using comma */
             final String commaStrs[] = COMMA_PAT.split(semiColonStr);
-            if( commaStrs.length != 6 )  throw new SAMException("Bad 'SA' attribute in " + semiColonStr);
+            if (commaStrs.length != 6)
+                throw new SAMException("Bad 'SA' attribute in " + semiColonStr + ". Record: " + record);
 
             /* create the new record */
-            final SAMRecord otherRec = samReaderFactory.createSAMRecord( record.getHeader() );
+            final SAMRecord otherRec = samReaderFactory.createSAMRecord(record.getHeader());
 
             /* copy fields from the original record */
-            otherRec.setReadName( record.getReadName() );
-            otherRec.setReadBases( record.getReadBases() );
-            otherRec.setBaseQualities( record.getBaseQualities() );
-            if( record.getReadPairedFlag() && !record.getMateUnmappedFlag()) {
-                otherRec.setMateReferenceIndex( record.getMateReferenceIndex() );
-                otherRec.setMateAlignmentStart( record.getMateAlignmentStart() );
+            otherRec.setReadName(record.getReadName());
+            otherRec.setReadBases(record.getReadBases());
+            otherRec.setBaseQualities(record.getBaseQualities());
+            if (record.getReadPairedFlag() && !record.getMateUnmappedFlag()) {
+                otherRec.setMateReferenceIndex(record.getMateReferenceIndex());
+                otherRec.setMateAlignmentStart(record.getMateAlignmentStart());
             }
 
 
             /* get reference sequence */
-            final int tid = record.getHeader().getSequenceIndex( commaStrs[0] );
-            if( tid == -1 ) throw new SAMException("Unknown contig in " + semiColonStr);
-            otherRec.setReferenceIndex( tid );
+            final int tid = record.getHeader().getSequenceIndex(commaStrs[0]);
+            if (tid == -1)
+                throw new SAMException("Unknown contig in " + semiColonStr + ". Record: " + record);
+            otherRec.setReferenceIndex(tid);
 
             /* fill POS */
             final int alignStart;
             try {
                 alignStart = Integer.parseInt(commaStrs[1]);
-            } catch( final NumberFormatException err ) {
-                throw new SAMException("bad POS in "+semiColonStr, err);
+            } catch (final NumberFormatException err) {
+                throw new SAMException("bad POS in " + semiColonStr + ". Record: " + record, err);
             }
 
-            otherRec.setAlignmentStart( alignStart );
+            otherRec.setAlignmentStart(alignStart);
 
             /* set TLEN */
-            if( record.getReadPairedFlag() &&
-                !record.getMateUnmappedFlag() &&
-                record.getMateReferenceIndex() == tid ) {
-                otherRec.setInferredInsertSize( record.getMateAlignmentStart() - alignStart );
+            if (record.getReadPairedFlag() &&
+                    !record.getMateUnmappedFlag() &&
+                    record.getMateReferenceIndex() == tid) {
+                otherRec.setInferredInsertSize(record.getMateAlignmentStart() - alignStart);
             }
 
             /* set FLAG */
-           int other_flag = record_flag;
-           other_flag |= (commaStrs[2].equals("+") ? 0 : SAMFlag.READ_REVERSE_STRAND.flag) ;
+            int other_flag = record_flag;
+            other_flag |= (commaStrs[2].equals("+") ? 0 : SAMFlag.READ_REVERSE_STRAND.flag);
            /* spec: Conventionally, at a supplementary line, the  1st element points to the primary line */
-           if( !( record.getSupplementaryAlignmentFlag() && i==0 ) ) {
-               other_flag |= SAMFlag.SUPPLEMENTARY_ALIGNMENT.flag;
-           }
-           otherRec.setFlags(other_flag);
+            if (!(record.getSupplementaryAlignmentFlag() && i == 0)) {
+                other_flag |= SAMFlag.SUPPLEMENTARY_ALIGNMENT.flag;
+            }
+            otherRec.setFlags(other_flag);
 
            /* set CIGAR */
-           otherRec.setCigar( TextCigarCodec.decode( commaStrs[3] ) );
+            otherRec.setCigar(TextCigarCodec.decode(commaStrs[3]));
 
             /* set MAPQ */
             try {
-                otherRec.setMappingQuality( Integer.parseInt(commaStrs[4]) );
+                otherRec.setMappingQuality(Integer.parseInt(commaStrs[4]));
             } catch (final NumberFormatException err) {
-                throw new SAMException("bad MAPQ in "+semiColonStr, err);
+                throw new SAMException("bad MAPQ in " + semiColonStr + ". Record: " + record, err);
             }
 
             /* fill NM */
@@ -1210,16 +1223,16 @@ public final class SAMUtils {
                     otherRec.setAttribute(SAMTagUtil.getSingleton().NM, Integer.parseInt(commaStrs[5]));
                 }
             } catch (final NumberFormatException err) {
-                throw new SAMException("bad NM in "+semiColonStr, err);
+                throw new SAMException("bad NM in " + semiColonStr + ". Record: " + record, err);
             }
 
             /* if strand is not the same: reverse-complement */
-            if( otherRec.getReadNegativeStrandFlag() != record.getReadNegativeStrandFlag() ) {
-                SAMRecordUtil.reverseComplement(otherRec);
+            if (otherRec.getReadNegativeStrandFlag() != record.getReadNegativeStrandFlag()) {
+                otherRec.reverseComplement(true);
             }
 
             /* add the alignment */
-            alignments.add( otherRec );
+            alignments.add(otherRec);
         }
         return alignments;
     }
diff --git a/src/main/java/htsjdk/samtools/SAMValidationError.java b/src/main/java/htsjdk/samtools/SAMValidationError.java
index d560b11..edd49c1 100644
--- a/src/main/java/htsjdk/samtools/SAMValidationError.java
+++ b/src/main/java/htsjdk/samtools/SAMValidationError.java
@@ -171,6 +171,9 @@ public class SAMValidationError implements Serializable {
 
         HEADER_RECORD_MISSING_REQUIRED_TAG,
 
+        /** Header tag contains illegal value */
+        HEADER_TAG_NON_CONFORMING_VALUE,
+
         /** Date string is not ISO-8601 */
         INVALID_DATE_STRING(Severity.WARNING),
 
@@ -205,7 +208,22 @@ public class SAMValidationError implements Serializable {
         MISMATCH_MATE_CIGAR_STRING,
 
         /** There is a Cigar String (stored in the MC Tag) for a read whose mate is NOT mapped. */
-        MATE_CIGAR_STRING_INVALID_PRESENCE;
+        MATE_CIGAR_STRING_INVALID_PRESENCE,
+
+        /** The mate reference of the unpaired read should be "*" */
+        INVALID_UNPAIRED_MATE_REFERENCE,
+
+        /** The unaligned mate read start position should be 0 */
+        INVALID_UNALIGNED_MATE_START,
+
+        /** Mismatch between the number of bases covered by the CIGAR and sequence */
+        MISMATCH_CIGAR_SEQ_LENGTH,
+
+        /** Mismatch between the sequence and quality length */
+        MISMATCH_SEQ_QUAL_LENGTH,
+
+        /** Mismatch between file and sequence dictionaries */
+        MISMATCH_FILE_SEQ_DICT;
 
         public final Severity severity;
 
diff --git a/src/main/java/htsjdk/samtools/SamFileHeaderMerger.java b/src/main/java/htsjdk/samtools/SamFileHeaderMerger.java
index b3f588c..d3cf16a 100644
--- a/src/main/java/htsjdk/samtools/SamFileHeaderMerger.java
+++ b/src/main/java/htsjdk/samtools/SamFileHeaderMerger.java
@@ -98,6 +98,7 @@ public class SamFileHeaderMerger {
 
     //HeaderRecordFactory that creates SAMReadGroupRecord instances.
     private static final HeaderRecordFactory<SAMReadGroupRecord> READ_GROUP_RECORD_FACTORY = new HeaderRecordFactory<SAMReadGroupRecord>() {
+        @Override
         public SAMReadGroupRecord createRecord(final String id, final SAMReadGroupRecord srcReadGroupRecord) {
             return new SAMReadGroupRecord(id, srcReadGroupRecord);
         }
@@ -105,6 +106,7 @@ public class SamFileHeaderMerger {
 
     //HeaderRecordFactory that creates SAMProgramRecord instances.
     private static final HeaderRecordFactory<SAMProgramRecord> PROGRAM_RECORD_FACTORY = new HeaderRecordFactory<SAMProgramRecord>() {
+        @Override
         public SAMProgramRecord createRecord(final String id, final SAMProgramRecord srcProgramRecord) {
             return new SAMProgramRecord(id, srcProgramRecord);
         }
@@ -112,6 +114,7 @@ public class SamFileHeaderMerger {
 
     //comparator used to sort lists of program group and read group records
     private static final Comparator<AbstractSAMHeaderRecord> RECORD_ID_COMPARATOR = new Comparator<AbstractSAMHeaderRecord>() {
+        @Override
         public int compare(final AbstractSAMHeaderRecord o1, final AbstractSAMHeaderRecord o2) {
             return o1.getId().compareTo(o2.getId());
         }
diff --git a/src/main/java/htsjdk/samtools/SamFileValidator.java b/src/main/java/htsjdk/samtools/SamFileValidator.java
index e40bfe9..3e316a2 100644
--- a/src/main/java/htsjdk/samtools/SamFileValidator.java
+++ b/src/main/java/htsjdk/samtools/SamFileValidator.java
@@ -88,6 +88,7 @@ public class SamFileValidator {
     private Histogram<Type> errorsByType;
     private PairEndInfoMap pairEndInfoByName;
     private ReferenceSequenceFileWalker refFileWalker;
+    private SAMSequenceDictionary samSequenceDictionary;
     private boolean verbose;
     private int maxVerboseOutput;
     private SAMSortOrderChecker orderChecker;
@@ -96,6 +97,8 @@ public class SamFileValidator {
     private boolean bisulfiteSequenced;
     private IndexValidationStringency indexValidationStringency;
     private boolean sequenceDictionaryEmptyAndNoWarningEmitted;
+    private int numWarnings;
+    private int numErrors;
 
     private final int maxTempFiles;
 
@@ -111,6 +114,8 @@ public class SamFileValidator {
         this.ignoreWarnings = false;
         this.bisulfiteSequenced = false;
         this.sequenceDictionaryEmptyAndNoWarningEmitted = false;
+        this.numWarnings = 0;
+        this.numErrors = 0;
     }
 
     Histogram<Type> getErrorsByType() {
@@ -150,7 +155,7 @@ public class SamFileValidator {
             for (final Histogram.Bin<Type> bin : errorsByType.values()) {
                 errorsAndWarningsByType.increment(bin.getId().getHistogramString(), bin.getValue());
             }
-            final MetricsFile<ValidationMetrics, String> metricsFile = new MetricsFile<ValidationMetrics, String>();
+            final MetricsFile<ValidationMetrics, String> metricsFile = new MetricsFile<>();
             errorsByType.setBinLabel("Error Type");
             errorsByType.setValueLabel("Count");
             metricsFile.setHistogram(errorsAndWarningsByType);
@@ -176,7 +181,7 @@ public class SamFileValidator {
         } catch (MaxOutputExceededException e) {
             out.println("Maximum output of [" + maxVerboseOutput + "] errors reached.");
         }
-        boolean result = errorsByType.isEmpty();
+        final boolean result = errorsByType.isEmpty();
         cleanup();
         return result;
     }
@@ -245,13 +250,13 @@ public class SamFileValidator {
             // For the coordinate-sorted map, need to detect mate pairs in which the mateReferenceIndex on one end
             // does not match the readReference index on the other end, so the pairs weren't united and validated.
             inMemoryPairMap = new InMemoryPairEndInfoMap();
-            CloseableIterator<Map.Entry<String, PairEndInfo>> it = ((CoordinateSortedPairEndInfoMap) pairEndInfoByName).iterator();
+            final CloseableIterator<Map.Entry<String, PairEndInfo>> it = pairEndInfoByName.iterator();
             while (it.hasNext()) {
-                Map.Entry<String, PairEndInfo> entry = it.next();
-                PairEndInfo pei = inMemoryPairMap.remove(entry.getValue().readReferenceIndex, entry.getKey());
+                final Map.Entry<String, PairEndInfo> entry = it.next();
+                final PairEndInfo pei = inMemoryPairMap.remove(entry.getValue().readReferenceIndex, entry.getKey());
                 if (pei != null) {
                     // Found a mismatch btw read.mateReferenceIndex and mate.readReferenceIndex
-                    List<SAMValidationError> errors = pei.validateMates(entry.getValue(), entry.getKey());
+                    final List<SAMValidationError> errors = pei.validateMates(entry.getValue(), entry.getKey());
                     for (final SAMValidationError error : errors) {
                         addError(error);
                     }
@@ -301,8 +306,7 @@ public class SamFileValidator {
                 if (cigarIsValid) {
                     try {
                         validateNmTag(record, recordNumber);
-                    }
-                    catch (SAMException e) {
+                    } catch (SAMException e) {
                         if (hasValidSortOrder) {
                             // If a CRAM file has an invalid sort order, the ReferenceFileWalker will throw a
                             // SAMException due to an out of order request when retrieving reference bases during NM
@@ -402,10 +406,7 @@ public class SamFileValidator {
     }
 
     private boolean validateCigar(final SAMRecord record, final long recordNumber) {
-        if (record.getReadUnmappedFlag()) {
-            return true;
-        }
-        return validateCigar(record, recordNumber, true);
+        return record.getReadUnmappedFlag() || validateCigar(record, recordNumber, true);
     }
 
     private boolean validateMateCigar(final SAMRecord record, final long recordNumber) {
@@ -455,6 +456,7 @@ public class SamFileValidator {
         }
         if (reference != null) {
             this.refFileWalker = new ReferenceSequenceFileWalker(reference);
+            this.samSequenceDictionary = reference.getSequenceDictionary();
         }
     }
 
@@ -522,6 +524,12 @@ public class SamFileValidator {
         }
         if (fileHeader.getSequenceDictionary().isEmpty()) {
             sequenceDictionaryEmptyAndNoWarningEmitted = true;
+        } else {
+            if (samSequenceDictionary != null) {
+                if (!fileHeader.getSequenceDictionary().isSameDictionary(samSequenceDictionary)) {
+                    addError(new SAMValidationError(Type.MISMATCH_FILE_SEQ_DICT, "Mismatch between file and sequence dictionary", null));
+                }
+            }
         }
         if (fileHeader.getReadGroups().isEmpty()) {
             addError(new SAMValidationError(Type.MISSING_READ_GROUP, "Read groups is empty", null));
@@ -537,7 +545,7 @@ public class SamFileValidator {
         }
 
         final List<SAMReadGroupRecord> rgs = fileHeader.getReadGroups();
-        final Set<String> readGroupIDs = new HashSet<String>();
+        final Set<String> readGroupIDs = new HashSet<>();
 
         for (final SAMReadGroupRecord record : rgs) {
             final String readGroupID = record.getReadGroupId();
@@ -554,12 +562,12 @@ public class SamFileValidator {
                         "A platform (PL) attribute was not found for read group ",
                         readGroupID));
             }
-            else { 
+            else {
                 // NB: cannot be null, so not catching a NPE
                 try {
                     SAMReadGroupRecord.PlatformValue.valueOf(platformValue.toUpperCase());
                 } catch (IllegalArgumentException e) {
-                    addError(new SAMValidationError(Type.INVALID_PLATFORM_VALUE, 
+                    addError(new SAMValidationError(Type.INVALID_PLATFORM_VALUE,
                             "The platform (PL) attribute (" + platformValue + ") + was not one of the valid values for read group ",
                             readGroupID));
                 }
@@ -567,11 +575,41 @@ public class SamFileValidator {
         }
     }
 
+    /**
+     * Number of warnings during SAM file validation
+     *
+     * @return number of warnings
+     */
+    public int getNumWarnings() {
+        return this.numWarnings;
+    }
+
+    /**
+     * Number of errors during SAM file validation
+     *
+     * @return number of errors
+     */
+    public int getNumErrors() {
+        return this.numErrors;
+    }
+
     private void addError(final SAMValidationError error) {
         // Just ignore an error if it's of a type we're not interested in
         if (this.errorsToIgnore.contains(error.getType())) return;
 
-        if (this.ignoreWarnings && error.getType().severity == SAMValidationError.Severity.WARNING) return;
+        switch (error.getType().severity) {
+            case WARNING:
+                if ( this.ignoreWarnings ) {
+                    return;
+                }
+                this.numWarnings++;
+                break;
+            case ERROR:
+                this.numErrors++;
+                break;
+            default:
+                throw new SAMException("Unknown SAM validation error severity: " + error.getType().severity);
+        }
 
         this.errorsByType.increment(error.getType());
         if (verbose) {
@@ -659,11 +697,10 @@ public class SamFileValidator {
             this.firstOfPairFlag = record.getFirstOfPairFlag();
         }
 
-        private PairEndInfo(int readAlignmentStart, int readReferenceIndex, boolean readNegStrandFlag, boolean readUnmappedFlag,
-                            String readCigarString,
-                            int mateAlignmentStart, int mateReferenceIndex, boolean mateNegStrandFlag, boolean mateUnmappedFlag,
-                            String mateCigarString,
-                            boolean firstOfPairFlag, long recordNumber) {
+        private PairEndInfo(final int readAlignmentStart, final int readReferenceIndex, final boolean readNegStrandFlag, final boolean readUnmappedFlag,
+                            final String readCigarString,
+                            final int mateAlignmentStart, final int mateReferenceIndex, final boolean mateNegStrandFlag, final boolean mateUnmappedFlag,
+                            final String mateCigarString, final boolean firstOfPairFlag, final long recordNumber) {
             this.readAlignmentStart = readAlignmentStart;
             this.readReferenceIndex = readReferenceIndex;
             this.readNegStrandFlag = readNegStrandFlag;
@@ -679,7 +716,7 @@ public class SamFileValidator {
         }
 
         public List<SAMValidationError> validateMates(final PairEndInfo mate, final String readName) {
-            final List<SAMValidationError> errors = new ArrayList<SAMValidationError>();
+            final List<SAMValidationError> errors = new ArrayList<>();
             validateMateFields(this, mate, readName, errors);
             validateMateFields(mate, this, readName, errors);
             // Validations that should not be repeated on both ends
@@ -750,21 +787,25 @@ public class SamFileValidator {
 
         PairEndInfo remove(int mateReferenceIndex, String key);
 
+        @Override
         CloseableIterator<Map.Entry<String, PairEndInfo>> iterator();
     }
 
     private class CoordinateSortedPairEndInfoMap implements PairEndInfoMap {
         private final CoordinateSortedPairInfoMap<String, PairEndInfo> onDiskMap =
-                new CoordinateSortedPairInfoMap<String, PairEndInfo>(maxTempFiles, new Codec());
+                new CoordinateSortedPairInfoMap<>(maxTempFiles, new Codec());
 
+        @Override
         public void put(int mateReferenceIndex, String key, PairEndInfo value) {
             onDiskMap.put(mateReferenceIndex, key, value);
         }
 
+        @Override
         public PairEndInfo remove(int mateReferenceIndex, String key) {
             return onDiskMap.remove(mateReferenceIndex, key);
         }
 
+        @Override
         public CloseableIterator<Map.Entry<String, PairEndInfo>> iterator() {
             return onDiskMap.iterator();
         }
@@ -773,14 +814,17 @@ public class SamFileValidator {
             private DataInputStream in;
             private DataOutputStream out;
 
+            @Override
             public void setOutputStream(final OutputStream os) {
                 this.out = new DataOutputStream(os);
             }
 
+            @Override
             public void setInputStream(final InputStream is) {
                 this.in = new DataInputStream(is);
             }
 
+            @Override
             public void encode(final String key, final PairEndInfo record) {
                 try {
                     out.writeUTF(key);
@@ -802,6 +846,7 @@ public class SamFileValidator {
                 }
             }
 
+            @Override
             public Map.Entry<String, PairEndInfo> decode() {
                 try {
                     final String key = in.readUTF();
@@ -836,33 +881,40 @@ public class SamFileValidator {
     }
 
     private static class InMemoryPairEndInfoMap implements PairEndInfoMap {
-        private final Map<String, PairEndInfo> map = new HashMap<String, PairEndInfo>();
+        private final Map<String, PairEndInfo> map = new HashMap<>();
 
+        @Override
         public void put(int mateReferenceIndex, String key, PairEndInfo value) {
             if (mateReferenceIndex != value.mateReferenceIndex)
                 throw new IllegalArgumentException("mateReferenceIndex does not agree with PairEndInfo");
             map.put(key, value);
         }
 
+        @Override
         public PairEndInfo remove(int mateReferenceIndex, String key) {
             return map.remove(key);
         }
 
+        @Override
         public CloseableIterator<Map.Entry<String, PairEndInfo>> iterator() {
             final Iterator<Map.Entry<String, PairEndInfo>> it = map.entrySet().iterator();
             return new CloseableIterator<Map.Entry<String, PairEndInfo>>() {
+                @Override
                 public void close() {
                     // do nothing
                 }
 
+                @Override
                 public boolean hasNext() {
                     return it.hasNext();
                 }
 
+                @Override
                 public Map.Entry<String, PairEndInfo> next() {
                     return it.next();
                 }
 
+                @Override
                 public void remove() {
                     it.remove();
                 }
diff --git a/src/main/java/htsjdk/samtools/SamInputResource.java b/src/main/java/htsjdk/samtools/SamInputResource.java
index f25d97b..a039e5a 100644
--- a/src/main/java/htsjdk/samtools/SamInputResource.java
+++ b/src/main/java/htsjdk/samtools/SamInputResource.java
@@ -29,6 +29,7 @@ import htsjdk.samtools.seekablestream.SeekablePathStream;
 import htsjdk.samtools.seekablestream.SeekableStream;
 import htsjdk.samtools.seekablestream.SeekableStreamFactory;
 import htsjdk.samtools.sra.SRAAccession;
+import htsjdk.samtools.util.IOUtil;
 import htsjdk.samtools.util.Lazy;
 import htsjdk.samtools.util.RuntimeIOException;
 
@@ -39,9 +40,12 @@ import java.io.InputStream;
 import java.net.MalformedURLException;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.nio.channels.SeekableByteChannel;
 import java.nio.file.FileSystemNotFoundException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.util.function.Function;
+import java.util.function.Supplier;
 
 /**
  * Describes a SAM-like resource, including its data (where the records are), and optionally an index.
@@ -89,7 +93,15 @@ public class SamInputResource {
     public static SamInputResource of(final File file) { return new SamInputResource(new FileInputResource(file)); }
 
     /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
-    public static SamInputResource of(final Path path) { return new SamInputResource(new PathInputResource(path)); }
+    public static SamInputResource of(final Path path) {
+        return new SamInputResource(new PathInputResource(path));
+    }
+
+    /** Creates a {@link SamInputResource} reading from the provided resource, with no index,
+     *  and with a wrapper to apply to the SeekableByteChannel for custom prefetching/buffering. */
+    public static SamInputResource of(final Path path, Function<SeekableByteChannel, SeekableByteChannel> wrapper) {
+        return new SamInputResource(new PathInputResource(path, wrapper));
+    }
 
     /** Creates a {@link SamInputResource} reading from the provided resource, with no index. */
     public static SamInputResource of(final InputStream inputStream) { return new SamInputResource(new InputStreamInputResource(inputStream)); }
@@ -125,6 +137,12 @@ public class SamInputResource {
         return this;
     }
 
+    /** Updates the index to point at the provided resource, with the provided wrapper, then returns itself. */
+    public SamInputResource index(final Path path, Function<SeekableByteChannel, SeekableByteChannel> wrapper) {
+        this.index = new PathInputResource(path, wrapper);
+        return this;
+    }
+
     /** Updates the index to point at the provided resource, then returns itself. */
     public SamInputResource index(final InputStream inputStream) {
         this.index = new InputStreamInputResource(inputStream);
@@ -213,9 +231,9 @@ abstract class InputResource {
 class FileInputResource extends InputResource {
 
     final File fileResource;
-    final Lazy<SeekableStream> lazySeekableStream = new Lazy<SeekableStream>(new Lazy.LazyInitializer<SeekableStream>() {
+    final Lazy<SeekableStream> lazySeekableStream = new Lazy<>(new Supplier<SeekableStream>() {
         @Override
-        public SeekableStream make() {
+        public SeekableStream get() {
             try {
                 return new SeekableFileStream(fileResource);
             } catch (final FileNotFoundException e) {
@@ -268,11 +286,12 @@ class FileInputResource extends InputResource {
 class PathInputResource extends InputResource {
 
     final Path pathResource;
-    final Lazy<SeekableStream> lazySeekableStream = new Lazy<SeekableStream>(new Lazy.LazyInitializer<SeekableStream>() {
+    final Function<SeekableByteChannel, SeekableByteChannel> wrapper;
+    final Lazy<SeekableStream> lazySeekableStream = new Lazy<>(new Supplier<SeekableStream>() {
         @Override
-        public SeekableStream make() {
+        public SeekableStream get() {
             try {
-                return new SeekablePathStream(pathResource);
+                return new SeekablePathStream(pathResource, wrapper);
             } catch (final IOException e) {
                 throw new RuntimeIOException(e);
             }
@@ -281,8 +300,14 @@ class PathInputResource extends InputResource {
 
 
     PathInputResource(final Path pathResource) {
+        this(pathResource, Function.identity());
+    }
+
+    //  wrapper applies to the SeekableByteChannel for custom prefetching/buffering.
+    PathInputResource(final Path pathResource, Function<SeekableByteChannel, SeekableByteChannel> wrapper) {
         super(Type.PATH);
         this.pathResource = pathResource;
+        this.wrapper = wrapper;
     }
 
     @Override
@@ -327,9 +352,9 @@ class PathInputResource extends InputResource {
 class UrlInputResource extends InputResource {
 
     final URL urlResource;
-    final Lazy<SeekableStream> lazySeekableStream = new Lazy<SeekableStream>(new Lazy.LazyInitializer<SeekableStream>() {
+    final Lazy<SeekableStream> lazySeekableStream = new Lazy<>(new Supplier<SeekableStream>() {
         @Override
-        public SeekableStream make() {
+        public SeekableStream get() {
             try { return SeekableStreamFactory.getInstance().getStreamFor(urlResource); }
             catch (final IOException ioe) { throw new RuntimeIOException(ioe); }
         }
@@ -348,8 +373,8 @@ class UrlInputResource extends InputResource {
     @Override
     public Path asPath() {
         try {
-            return Paths.get(urlResource.toURI());
-        } catch (URISyntaxException | IllegalArgumentException |
+            return IOUtil.getPath(urlResource.toExternalForm());
+        } catch (IOException | IllegalArgumentException |
             FileSystemNotFoundException | SecurityException e) {
             return null;
         }
diff --git a/src/main/java/htsjdk/samtools/SamPairUtil.java b/src/main/java/htsjdk/samtools/SamPairUtil.java
index ee1707b..4849850 100644
--- a/src/main/java/htsjdk/samtools/SamPairUtil.java
+++ b/src/main/java/htsjdk/samtools/SamPairUtil.java
@@ -424,6 +424,7 @@ public class SamPairUtil {
          */
         public long getNumMateCigarsAdded() { return this.numMateCigarsAdded; }
 
+        @Override
         public boolean hasNext() {
             return (!records.isEmpty() || super.hasNext());
         }
@@ -495,12 +496,14 @@ public class SamPairUtil {
             }
         }
 
+        @Override
         public SAMRecord next() {
             advance();
             if (records.isEmpty()) throw new IllegalStateException("Unexpectedly found an empty record list");
             return this.records.poll();
         }
 
+        @Override
         public SAMRecord peek() {
             advance();
             if (records.isEmpty()) throw new IllegalStateException("Unexpectedly found an empty record list");
diff --git a/src/main/java/htsjdk/samtools/SamReader.java b/src/main/java/htsjdk/samtools/SamReader.java
index 2f1b2f9..08f93ec 100644
--- a/src/main/java/htsjdk/samtools/SamReader.java
+++ b/src/main/java/htsjdk/samtools/SamReader.java
@@ -164,6 +164,7 @@ public interface SamReader extends Iterable<SAMRecord>, Closeable {
      * Only a single open iterator on a SAM or BAM file may be extant at any one time.  If you want to start
      * a second iteration, the first one must be closed first.
      */
+    @Override
     public SAMRecordIterator iterator();
 
     /**
@@ -381,7 +382,11 @@ public interface SamReader extends Iterable<SAMRecord>, Closeable {
             this.resource = resource;
         }
 
-        PrimitiveSamReader underlyingReader() {
+        /**
+         * Access the underlying {@link PrimitiveSamReader} used by this adapter.
+         * @return the {@link PrimitiveSamReader} used by this adapter.
+         */
+        public PrimitiveSamReader underlyingReader() {
             return p;
         }
 
@@ -554,6 +559,7 @@ public interface SamReader extends Iterable<SAMRecord>, Closeable {
             wrappedIterator = iterator;
         }
 
+        @Override
         public SAMRecordIterator assertSorted(final SAMFileHeader.SortOrder sortOrder) {
 
             if (sortOrder == null || sortOrder == SAMFileHeader.SortOrder.unsorted) {
@@ -565,6 +571,7 @@ public interface SamReader extends Iterable<SAMRecord>, Closeable {
             return this;
         }
 
+        @Override
         public SAMRecord next() {
             final SAMRecord result = wrappedIterator.next();
             if (comparator != null) {
@@ -587,10 +594,13 @@ public interface SamReader extends Iterable<SAMRecord>, Closeable {
             return result;
         }
 
+        @Override
         public void close() { wrappedIterator.close(); }
 
+        @Override
         public boolean hasNext() { return wrappedIterator.hasNext(); }
 
+        @Override
         public void remove() { wrappedIterator.remove(); }
     }
 
diff --git a/src/main/java/htsjdk/samtools/SamReaderFactory.java b/src/main/java/htsjdk/samtools/SamReaderFactory.java
index 8769f48..3d6a80f 100644
--- a/src/main/java/htsjdk/samtools/SamReaderFactory.java
+++ b/src/main/java/htsjdk/samtools/SamReaderFactory.java
@@ -29,13 +29,16 @@ import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.seekablestream.SeekableStream;
 import htsjdk.samtools.sra.SRAAccession;
 import htsjdk.samtools.util.*;
+import htsjdk.samtools.util.zip.InflaterFactory;
 
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.channels.SeekableByteChannel;
 import java.nio.file.Path;
 import java.util.Collections;
 import java.util.EnumSet;
+import java.util.function.Function;
 import java.util.zip.GZIPInputStream;
 
 /**
@@ -74,13 +77,31 @@ import java.util.zip.GZIPInputStream;
 public abstract class SamReaderFactory {
 
     private static ValidationStringency defaultValidationStringency = ValidationStringency.DEFAULT_STRINGENCY;
-    
+
     abstract public SamReader open(final File file);
 
+    /**
+     * Open the specified path (without using any wrappers).
+     *
+     * @param path the SAM or BAM file to open.
+     */
     public SamReader open(final Path path) {
-        final SamInputResource r = SamInputResource.of(path);
+        return open(path, null, null);
+    }
+
+    /**
+     * Open the specified path, using the specified wrappers for prefetching/caching.
+     *
+     * @param path the SAM or BAM file to open
+     * @param dataWrapper the wrapper for the data (or null for none)
+     * @param indexWrapper the wrapper for the index (or null for none)
+     */
+    public SamReader open(final Path path,
+            Function<SeekableByteChannel, SeekableByteChannel> dataWrapper,
+            Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) {
+        final SamInputResource r = SamInputResource.of(path, dataWrapper);
         final Path indexMaybe = SamFiles.findIndex(path);
-        if (indexMaybe != null) r.index(indexMaybe);
+        if (indexMaybe != null) r.index(indexMaybe, indexWrapper);
         return open(r);
     }
 
@@ -93,6 +114,13 @@ public abstract class SamReaderFactory {
     /** Set this factory's {@link htsjdk.samtools.SAMRecordFactory} to the provided one, then returns itself. */
     abstract public SamReaderFactory samRecordFactory(final SAMRecordFactory samRecordFactory);
 
+    /**
+     * Set this factory's {@link htsjdk.samtools.util.zip.InflaterFactory} to the provided one, then returns itself.
+     * Note: The inflaterFactory provided here is only used for BAM decompression implemented with {@link BAMFileReader},
+     * it is not used for CRAM or other formats like a gzipped SAM file.
+     */
+    abstract public SamReaderFactory inflaterFactory(final InflaterFactory inflaterFactory);
+
     /** Enables the provided {@link Option}s, then returns itself. */
     abstract public SamReaderFactory enable(final Option... options);
 
@@ -118,18 +146,20 @@ public abstract class SamReaderFactory {
     abstract public SamReaderFactory validationStringency(final ValidationStringency validationStringency);
 
     /** Set whether readers created by this factory will use asynchronous IO.
-     * If this methods is not called, this flag will default to the value of {@link Defaults#USE_ASYNC_IO_FOR_SAMTOOLS}.
+     * If this methods is not called, this flag will default to the value of {@link Defaults#USE_ASYNC_IO_READ_FOR_SAMTOOLS}.
      * Note that this option may not be applicable to all readers returned from this factory.
      * Returns the factory itself. */
     abstract public SamReaderFactory setUseAsyncIo(final boolean asynchronousIO);
 
     private static SamReaderFactoryImpl DEFAULT =
-            new SamReaderFactoryImpl(Option.DEFAULTS, defaultValidationStringency, DefaultSAMRecordFactory.getInstance());
+            new SamReaderFactoryImpl(Option.DEFAULTS, defaultValidationStringency,
+                    DefaultSAMRecordFactory.getInstance(), BlockGunzipper.getDefaultInflaterFactory());
 
     public static void setDefaultValidationStringency(final ValidationStringency defaultValidationStringency) {
         SamReaderFactory.defaultValidationStringency = defaultValidationStringency;
         // The default may have changed, so reset the default SamReader
-        DEFAULT = new SamReaderFactoryImpl(Option.DEFAULTS, defaultValidationStringency, DefaultSAMRecordFactory.getInstance());
+        DEFAULT = new SamReaderFactoryImpl(Option.DEFAULTS, defaultValidationStringency,
+                DefaultSAMRecordFactory.getInstance(), BlockGunzipper.getDefaultInflaterFactory());
     }
 
     /** Creates a copy of the default {@link SamReaderFactory}. */
@@ -138,11 +168,12 @@ public abstract class SamReaderFactory {
     }
 
     /**
-     * Creates an "empty" factory with no enabled {@link Option}s, {@link ValidationStringency#DEFAULT_STRINGENCY}, and
-     * {@link htsjdk.samtools.DefaultSAMRecordFactory}.
+     * Creates an "empty" factory with no enabled {@link Option}s, {@link ValidationStringency#DEFAULT_STRINGENCY},
+     * no path wrapper, and {@link htsjdk.samtools.DefaultSAMRecordFactory}.
      */
     public static SamReaderFactory make() {
-        return new SamReaderFactoryImpl(EnumSet.noneOf(Option.class), ValidationStringency.DEFAULT_STRINGENCY, DefaultSAMRecordFactory.getInstance());
+        return new SamReaderFactoryImpl(EnumSet.noneOf(Option.class), ValidationStringency.DEFAULT_STRINGENCY,
+                DefaultSAMRecordFactory.getInstance(), BlockGunzipper.getDefaultInflaterFactory());
     }
 
     private static class SamReaderFactoryImpl extends SamReaderFactory {
@@ -153,12 +184,14 @@ public abstract class SamReaderFactory {
         private SAMRecordFactory samRecordFactory;
         private CustomReaderFactory customReaderFactory;
         private CRAMReferenceSource referenceSource;
+        private InflaterFactory inflaterFactory;
 
-        private SamReaderFactoryImpl(final EnumSet<Option> enabledOptions, final ValidationStringency validationStringency, final SAMRecordFactory samRecordFactory) {
+        private SamReaderFactoryImpl(final EnumSet<Option> enabledOptions, final ValidationStringency validationStringency, final SAMRecordFactory samRecordFactory, final InflaterFactory inflaterFactory) {
             this.enabledOptions = EnumSet.copyOf(enabledOptions);
             this.samRecordFactory = samRecordFactory;
             this.validationStringency = validationStringency;
             this.customReaderFactory = CustomReaderFactory.getInstance();
+            this.inflaterFactory = inflaterFactory;
         }
    
         @Override
@@ -187,6 +220,12 @@ public abstract class SamReaderFactory {
         }
 
         @Override
+        public SamReaderFactory inflaterFactory(final InflaterFactory inflaterFactory) {
+            this.inflaterFactory = inflaterFactory;
+            return this;
+        }
+
+        @Override
         public SamReaderFactory enable(final Option... options) {
             Collections.addAll(this.enabledOptions, options);
             return this;
@@ -273,13 +312,15 @@ public abstract class SamReaderFactory {
                             // TODO: Throw an exception here?  An index _may_ have been provided, but we're ignoring it
                             bufferedIndexStream = null;
                         }
+
                         primitiveSamReader = new BAMFileReader(
                                 IOUtil.maybeBufferedSeekableStream(data.asUnbufferedSeekableStream()),
                                 bufferedIndexStream,
                                 false,
                                 asynchronousIO,
                                 validationStringency,
-                                this.samRecordFactory
+                                this.samRecordFactory,
+                                this.inflaterFactory
                         );
                     } else if (SamStreams.sourceLikeCram(data.asUnbufferedSeekableStream())) {
                         if (referenceSource == null) {
@@ -319,18 +360,22 @@ public abstract class SamReaderFactory {
                             if (null == sourceSeekable || null == indexSeekable) {
                                 // not seekable.
                                 // it's OK that we consumed a bit of the stream already, this ctor expects it.
-                                primitiveSamReader = new BAMFileReader(bufferedStream, indexFile, false, asynchronousIO, validationStringency, this.samRecordFactory);
+                                primitiveSamReader = new BAMFileReader(bufferedStream, indexFile, false, asynchronousIO,
+                                        validationStringency, this.samRecordFactory, this.inflaterFactory);
                             } else {
                                 // seekable.
                                 // need to return to the beginning because it's the same stream we used earlier
                                 // and read a bit from, and that form of the ctor expects the stream to start at 0.
                                 sourceSeekable.seek(0);
                                 primitiveSamReader = new BAMFileReader(
-                                        sourceSeekable, indexSeekable, false, asynchronousIO, validationStringency, this.samRecordFactory);
+                                        sourceSeekable, indexSeekable, false, asynchronousIO, validationStringency,
+                                        this.samRecordFactory, this.inflaterFactory);
                             }
                         } else {
                             bufferedStream.close();
-                            primitiveSamReader = new BAMFileReader(sourceFile, indexFile, false, asynchronousIO, validationStringency, this.samRecordFactory);
+                            primitiveSamReader = new BAMFileReader(
+                                sourceFile, indexFile, false, asynchronousIO,
+                                validationStringency, this.samRecordFactory, this.inflaterFactory);
                         }
                     } else if (BlockCompressedInputStream.isValidFile(bufferedStream)) {
                         primitiveSamReader = new SAMTextReader(new BlockCompressedInputStream(bufferedStream), validationStringency, this.samRecordFactory);
@@ -388,7 +433,7 @@ public abstract class SamReaderFactory {
         }
 
         public static SamReaderFactory copyOf(final SamReaderFactoryImpl target) {
-            return new SamReaderFactoryImpl(target.enabledOptions, target.validationStringency, target.samRecordFactory);
+            return new SamReaderFactoryImpl(target.enabledOptions, target.validationStringency, target.samRecordFactory, target.inflaterFactory);
         }
     }
 
diff --git a/src/main/java/htsjdk/samtools/StreamInflatingIndexingOutputStream.java b/src/main/java/htsjdk/samtools/StreamInflatingIndexingOutputStream.java
index 6b653fe..b579ff9 100644
--- a/src/main/java/htsjdk/samtools/StreamInflatingIndexingOutputStream.java
+++ b/src/main/java/htsjdk/samtools/StreamInflatingIndexingOutputStream.java
@@ -10,6 +10,7 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.PipedInputStream;
 import java.io.PipedOutputStream;
+import java.nio.file.Path;
 
 /**
  * OutputStream implementation that writes output to an underlying output stream while also copying the
@@ -22,11 +23,15 @@ class StreamInflatingIndexingOutputStream extends OutputStream {
     private final Thread thread;
 
     public StreamInflatingIndexingOutputStream(final OutputStream s1, final File indexFile) {
+        this(s1, indexFile.toPath());
+    }
+
+    public StreamInflatingIndexingOutputStream(final OutputStream s1, final Path indexPath) {
         try {
             this.s1 = s1;
             this.s2 = new PipedOutputStream();
             final PipedInputStream pin = new PipedInputStream(this.s2, Defaults.NON_ZERO_BUFFER_SIZE);
-            this.thread = new Thread(new Indexer(indexFile, pin), "BamIndexingThread");
+            this.thread = new Thread(new Indexer(indexPath, pin), "BamIndexingThread");
             this.thread.start();
         } catch (final IOException ioe) {
             throw new RuntimeIOException(ioe);
@@ -72,15 +77,15 @@ class StreamInflatingIndexingOutputStream extends OutputStream {
 
 /**
  * A little class that takes an InputStream from which it reads a BAM file, generates
- * a BAMIndex and then writes the index to the File provided.  All operations are designed
+ * a BAMIndex and then writes the index to the Path provided.  All operations are designed
  * to be carried out in a separate thread.
  */
 class Indexer implements Runnable {
-    private final File index;
+    private final Path index;
     private final InputStream stream;
 
-    /** Constructs an indexer that reads from the stream provided and writes an index to the File provided. */
-    Indexer(final File index, final InputStream stream) {
+    /** Constructs an indexer that reads from the stream provided and writes an index to the Path provided. */
+    Indexer(final Path index, final InputStream stream) {
         this.index = index;
         this.stream = stream;
     }
diff --git a/src/main/java/htsjdk/samtools/TextTagCodec.java b/src/main/java/htsjdk/samtools/TextTagCodec.java
index 60363e1..40dc8ac 100644
--- a/src/main/java/htsjdk/samtools/TextTagCodec.java
+++ b/src/main/java/htsjdk/samtools/TextTagCodec.java
@@ -158,14 +158,17 @@ public class TextTagCodec {
         final String stringVal = numFields == TextTagCodec.NUM_TAG_FIELDS ? fields[2] : "";
         final Object val = convertStringToObject(type, stringVal);
         return new Map.Entry<String, Object>() {
+            @Override
             public String getKey() {
                 return key;
             }
 
+            @Override
             public Object getValue() {
                 return val;
             }
 
+            @Override
             public Object setValue(final Object o) {
                 throw new UnsupportedOperationException();
             }
diff --git a/src/main/java/htsjdk/samtools/TextualBAMIndexWriter.java b/src/main/java/htsjdk/samtools/TextualBAMIndexWriter.java
index d790270..da418fd 100644
--- a/src/main/java/htsjdk/samtools/TextualBAMIndexWriter.java
+++ b/src/main/java/htsjdk/samtools/TextualBAMIndexWriter.java
@@ -68,6 +68,7 @@ class TextualBAMIndexWriter implements BAMIndexWriter {
     /**
      * Write this content as human-readable text
      */
+    @Override
     public void writeReference(final BAMIndexContent content) {
 
         final int reference = content.getReferenceSequence();
@@ -172,6 +173,7 @@ class TextualBAMIndexWriter implements BAMIndexWriter {
      *
      * @param noCoordinateCount the count of records seen with no coordinate positions in the start coordinate
      */
+    @Override
     public void writeNoCoordinateRecordCount(final Long noCoordinateCount) {
         pw.println("No Coordinate Count=" + noCoordinateCount);
     }
@@ -179,6 +181,7 @@ class TextualBAMIndexWriter implements BAMIndexWriter {
     /**
      * Any necessary processing at the end of the file
      */
+    @Override
     public void close() {
         pw.close();
     }
diff --git a/src/main/java/htsjdk/samtools/cram/build/CramNormalizer.java b/src/main/java/htsjdk/samtools/cram/build/CramNormalizer.java
index 1be1aa5..b2dd67c 100644
--- a/src/main/java/htsjdk/samtools/cram/build/CramNormalizer.java
+++ b/src/main/java/htsjdk/samtools/cram/build/CramNormalizer.java
@@ -32,6 +32,7 @@ import htsjdk.samtools.cram.ref.CRAMReferenceSource;
 import htsjdk.samtools.cram.structure.CramCompressionRecord;
 import htsjdk.samtools.cram.structure.SubstitutionMatrix;
 import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.SequenceUtil;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -242,7 +243,8 @@ public class CramNormalizer {
             } else
                 System.arraycopy(ref, alignmentStart - refOffsetZeroBased,
                         bases, 0, bases.length);
-            return bases;
+
+            return SequenceUtil.toBamReadBasesInPlace(bases);
         }
         final List<ReadFeature> variations = record.readFeatures;
         for (final ReadFeature variation : variations) {
@@ -256,6 +258,7 @@ public class CramNormalizer {
                     final Substitution substitution = (Substitution) variation;
                     byte refBase = getByteOrDefault(ref, alignmentStart + posInSeq
                             - refOffsetZeroBased, (byte) 'N');
+                    // substitution requires ACGTN only:
                     refBase = Utils.normalizeBase(refBase);
                     final byte base = substitutionMatrix.base(refBase, substitution.getCode());
                     substitution.setBase(base);
@@ -304,11 +307,7 @@ public class CramNormalizer {
             }
         }
 
-        for (int i = 0; i < bases.length; i++) {
-            bases[i] = Utils.normalizeBase(bases[i]);
-        }
-
-        return bases;
+        return SequenceUtil.toBamReadBasesInPlace(bases);
     }
 
     private static byte getByteOrDefault(final byte[] array, final int pos,
diff --git a/src/main/java/htsjdk/samtools/cram/build/Sam2CramRecordFactory.java b/src/main/java/htsjdk/samtools/cram/build/Sam2CramRecordFactory.java
index b7ffcb1..6c59e13 100644
--- a/src/main/java/htsjdk/samtools/cram/build/Sam2CramRecordFactory.java
+++ b/src/main/java/htsjdk/samtools/cram/build/Sam2CramRecordFactory.java
@@ -17,50 +17,21 @@
  */
 package htsjdk.samtools.cram.build;
 
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.*;
 import htsjdk.samtools.SAMRecord.SAMTagAndValue;
-import htsjdk.samtools.SAMTag;
 import htsjdk.samtools.cram.common.CramVersions;
 import htsjdk.samtools.cram.common.Version;
-import htsjdk.samtools.cram.encoding.readfeatures.BaseQualityScore;
-import htsjdk.samtools.cram.encoding.readfeatures.Deletion;
-import htsjdk.samtools.cram.encoding.readfeatures.HardClip;
-import htsjdk.samtools.cram.encoding.readfeatures.InsertBase;
-import htsjdk.samtools.cram.encoding.readfeatures.Padding;
-import htsjdk.samtools.cram.encoding.readfeatures.ReadFeature;
-import htsjdk.samtools.cram.encoding.readfeatures.RefSkip;
-import htsjdk.samtools.cram.encoding.readfeatures.SoftClip;
-import htsjdk.samtools.cram.encoding.readfeatures.Substitution;
+import htsjdk.samtools.cram.encoding.readfeatures.*;
 import htsjdk.samtools.cram.structure.CramCompressionRecord;
 import htsjdk.samtools.cram.structure.ReadTag;
 import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.SequenceUtil;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
+import java.util.*;
 
 public class Sam2CramRecordFactory {
-
-    public static final String UNKNOWN_READ_GROUP_ID = "UNKNOWN";
-    public static final String UNKNOWN_READ_GROUP_SAMPLE = "UNKNOWN";
-
-    private final static byte QS_asciiOffset = 33;
-    public final static byte unsetQualityScore = 32;
-    public final static byte ignorePositionsWithQualityScore = -1;
-
     private byte[] refBases;
     private final Version version;
-    private byte[] refSNPs;
 
     final private SAMFileHeader header;
 
@@ -68,9 +39,6 @@ public class Sam2CramRecordFactory {
 
     private final Map<String, Integer> readGroupMap = new HashMap<String, Integer>();
 
-    private long landedRefMaskScores = 0;
-    private long landedTotalScores = 0;
-
     public boolean captureAllTags = false;
     public boolean preserveReadNames = false;
     public final Set<String> captureTags = new TreeSet<String>();
@@ -151,8 +119,14 @@ public class Sam2CramRecordFactory {
         } else cramRecord.readFeatures = Collections.emptyList();
 
         cramRecord.readBases = record.getReadBases();
+
+        /**
+         * CRAM read bases are limited to ACGTN, see https://github.com/samtools/hts-specs/blob/master/CRAMv3.pdf passage 10.2 on read bases.
+         * However, BAM format allows upper case IUPAC codes without a dot, so we follow the same approach to reproduce the behaviour of samtools.
+         */
+        // copy read bases to avoid changing the original record:
+        cramRecord.readBases = SequenceUtil.toBamReadBasesInPlace(Arrays.copyOf(record.getReadBases(), record.getReadLength()));
         cramRecord.qualityScores = record.getBaseQualities();
-        landedTotalScores += cramRecord.readLength;
         if (version.compatibleWith(CramVersions.CRAM_v3))
             cramRecord.setUnknownBases(record.getReadBases() == SAMRecord.NULL_SEQUENCE);
 
@@ -187,7 +161,7 @@ public class Sam2CramRecordFactory {
      * A wrapper method to provide better diagnostics for ArrayIndexOutOfBoundsException.
      *
      * @param cramRecord CRAM record
-     * @param samRecord SAM record
+     * @param samRecord  SAM record
      * @return a list of read features created for the given {@link htsjdk.samtools.SAMRecord}
      */
     private List<ReadFeature> checkedCreateVariations(final CramCompressionRecord cramRecord, final SAMRecord samRecord) {
@@ -247,7 +221,7 @@ public class Sam2CramRecordFactory {
                 case M:
                 case X:
                 case EQ:
-                    addSubstitutionsAndMaskedBases(cramRecord, features, zeroBasedPositionInRead, alignmentStartOffset,
+                    addMismatchReadFeatures(cramRecord.alignmentStart, features, zeroBasedPositionInRead, alignmentStartOffset,
                             cigarElementLength, bases, qualityScore);
                     break;
                 default:
@@ -291,57 +265,47 @@ public class Sam2CramRecordFactory {
         }
     }
 
-    private void addSubstitutionsAndMaskedBases(final CramCompressionRecord cramRecord, final List<ReadFeature> features, final int fromPosInRead, final int
+    /**
+     * Processes a stretch of read bases marked as match or mismatch and emits appropriate read features.
+     * Briefly the algorithm is:
+     * <ul><li>emit nothing for a read base matching corresponding reference base.</li>
+     * <li>emit a {@link Substitution} read feature for each ACTGN-ACTGN mismatch.</li>
+     * <li>emit {@link ReadBase} for a non-ACTGN mismatch. The side effect is the quality score stored twice.</li>
+     * <p>
+     * IMPORTANT: reference and read bases are always compared for match/mismatch in upper case due to BAM limitations.
+     *
+     * @param alignmentStart       CRAM record alignment start
+     * @param features             a list of read features to add to
+     * @param fromPosInRead        a zero based position in the read to start with
+     * @param alignmentStartOffset offset into the reference array
+     * @param nofReadBases         how many read bases to process
+     * @param bases                the read bases array
+     * @param qualityScore         the quality score array
+     */
+    void addMismatchReadFeatures(final int alignmentStart, final List<ReadFeature> features, final int fromPosInRead, final int
             alignmentStartOffset, final int nofReadBases, final byte[] bases, final byte[] qualityScore) {
-        int oneBasedPositionInRead;
-        final boolean noQS = (qualityScore.length == 0);
+        int oneBasedPositionInRead = fromPosInRead + 1;
+        int refIndex = alignmentStart + alignmentStartOffset - 1;
 
-        int i;
-        boolean qualityAdded;
         byte refBase;
-        for (i = 0; i < nofReadBases; i++) {
-            oneBasedPositionInRead = i + fromPosInRead + 1;
-            final int referenceCoordinates = cramRecord.alignmentStart + i + alignmentStartOffset - 1;
-            qualityAdded = false;
-            if (referenceCoordinates >= refBases.length) refBase = 'N';
-            else refBase = refBases[referenceCoordinates];
-            refBase = Utils.normalizeBase(refBase);
-
-            if (bases[i + fromPosInRead] != refBase) {
-                final Substitution substitution = new Substitution();
-                substitution.setPosition(oneBasedPositionInRead);
-                substitution.setBase(bases[i + fromPosInRead]);
-                substitution.setReferenceBase(refBase);
-
-                features.add(substitution);
-
-                if (noQS) continue;
-            }
-
-            if (noQS) continue;
-
-            if (refSNPs != null) {
-                final byte snpOrNot = refSNPs[referenceCoordinates];
-                if (snpOrNot != 0) {
-                    final byte score = (byte) (QS_asciiOffset + qualityScore[i + fromPosInRead]);
-                    features.add(new BaseQualityScore(oneBasedPositionInRead, score));
-                    qualityAdded = true;
-                    landedRefMaskScores++;
+        for (int i = 0; i < nofReadBases; i++, oneBasedPositionInRead++, refIndex++) {
+            if (refIndex >= refBases.length) refBase = 'N';
+            else refBase = refBases[refIndex];
+
+            final byte readBase = bases[i + fromPosInRead];
+
+            if (readBase != refBase) {
+                final boolean isSubstitution = SequenceUtil.isUpperACGTN(readBase) && SequenceUtil.isUpperACGTN(refBase);
+                if (isSubstitution) {
+                    features.add(new Substitution(oneBasedPositionInRead, readBase, refBase));
+                } else {
+                    final byte score = qualityScore[i + fromPosInRead];
+                    features.add(new ReadBase(oneBasedPositionInRead, readBase, score));
                 }
             }
-
-            if (qualityAdded) landedTotalScores++;
         }
     }
 
-    public long getLandedRefMaskScores() {
-        return landedRefMaskScores;
-    }
-
-    public long getLandedTotalScores() {
-        return landedTotalScores;
-    }
-
     public byte[] getRefBases() {
         return refBases;
     }
@@ -350,14 +314,6 @@ public class Sam2CramRecordFactory {
         this.refBases = refBases;
     }
 
-    public byte[] getRefSNPs() {
-        return refSNPs;
-    }
-
-    public void setRefSNPs(final byte[] refSNPs) {
-        this.refSNPs = refSNPs;
-    }
-
     public Map<String, Integer> getReadGroupMap() {
         return readGroupMap;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayLenEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayLenEncoding.java
index 0c76a5b..0c45577 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayLenEncoding.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayLenEncoding.java
@@ -60,6 +60,7 @@ public class ByteArrayLenEncoding implements Encoding<byte[]> {
         return new EncodingParams(ID, byteArrayOutputStream.toByteArray());
     }
 
+    @Override
     public byte[] toByteArray() {
         final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
         try {
@@ -78,6 +79,7 @@ public class ByteArrayLenEncoding implements Encoding<byte[]> {
         return byteArrayOutputStream.toByteArray();
     }
 
+    @Override
     public void fromByteArray(final byte[] data) {
         final ByteBuffer buffer = ByteBuffer.wrap(data);
 
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayStopEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayStopEncoding.java
index c46d967..c62334d 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayStopEncoding.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/ByteArrayStopEncoding.java
@@ -56,6 +56,7 @@ public class ByteArrayStopEncoding implements Encoding<byte[]> {
         return new EncodingParams(ID, e.toByteArray());
     }
 
+    @Override
     public byte[] toByteArray() {
         final ByteBuffer buf = ByteBuffer.allocate(1024);
         buf.order(ByteOrder.LITTLE_ENDIAN);
@@ -69,6 +70,7 @@ public class ByteArrayStopEncoding implements Encoding<byte[]> {
         return array;
     }
 
+    @Override
     public void fromByteArray(final byte[] data) {
         final ByteBuffer buf = ByteBuffer.wrap(data);
         buf.order(ByteOrder.LITTLE_ENDIAN);
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteArrayEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteArrayEncoding.java
index 2fc707c..107a484 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteArrayEncoding.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteArrayEncoding.java
@@ -38,10 +38,12 @@ public class ExternalByteArrayEncoding implements Encoding<byte[]> {
         return new EncodingParams(encodingId, e.toByteArray());
     }
 
+    @Override
     public byte[] toByteArray() {
         return ITF8.writeUnsignedITF8(contentId);
     }
 
+    @Override
     public void fromByteArray(final byte[] data) {
         contentId = ITF8.readUnsignedITF8(data);
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteEncoding.java
index 0fed720..75a63cc 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteEncoding.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/ExternalByteEncoding.java
@@ -38,10 +38,12 @@ public class ExternalByteEncoding implements Encoding<Byte> {
         return new EncodingParams(encodingId, externalByteEncoding.toByteArray());
     }
 
+    @Override
     public byte[] toByteArray() {
         return ITF8.writeUnsignedITF8(contentId);
     }
 
+    @Override
     public void fromByteArray(final byte[] data) {
         contentId = ITF8.readUnsignedITF8(data);
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/ExternalIntegerEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalIntegerEncoding.java
index a7c5736..1f0ecba 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/ExternalIntegerEncoding.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/ExternalIntegerEncoding.java
@@ -38,10 +38,12 @@ public class ExternalIntegerEncoding implements Encoding<Integer> {
         return new EncodingParams(encodingId, externalIntegerEncoding.toByteArray());
     }
 
+    @Override
     public byte[] toByteArray() {
         return ITF8.writeUnsignedITF8(contentId);
     }
 
+    @Override
     public void fromByteArray(final byte[] data) {
         contentId = ITF8.readUnsignedITF8(data);
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/ExternalLongEncoding.java b/src/main/java/htsjdk/samtools/cram/encoding/ExternalLongEncoding.java
index 402cea8..b3ba54e 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/ExternalLongEncoding.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/ExternalLongEncoding.java
@@ -38,10 +38,12 @@ public class ExternalLongEncoding implements Encoding<Long> {
         return new EncodingParams(encodingId, externalLongEncoding.toByteArray());
     }
 
+    @Override
     public byte[] toByteArray() {
         return ITF8.writeUnsignedITF8(contentId);
     }
 
+    @Override
     public void fromByteArray(final byte[] data) {
         contentId = ITF8.readUnsignedITF8(data);
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerCodec.java b/src/main/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerCodec.java
index e5962a1..579f28b 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerCodec.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/GolombRiceIntegerCodec.java
@@ -38,6 +38,7 @@ class GolombRiceIntegerCodec extends AbstractBitCodec<Integer> {
         mask = ~(~0 << log2m);
     }
 
+    @Override
     public final Integer read(final BitInputStream bitInputStream) throws IOException {
 
         int unary = 0;
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanTree.java b/src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanTree.java
index 43500c4..bd4316d 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanTree.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/huffman/HuffmanTree.java
@@ -24,6 +24,7 @@ public abstract class HuffmanTree<T> implements Comparable<HuffmanTree<T>> {
         frequency = freq;
     }
 
+    @Override
     public int compareTo(@SuppressWarnings("NullableProblems") final HuffmanTree<T> tree) {
         return frequency - tree.frequency;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/BaseQualityScore.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/BaseQualityScore.java
index 41a69d2..07ee305 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/BaseQualityScore.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/BaseQualityScore.java
@@ -44,6 +44,7 @@ public class BaseQualityScore implements Serializable, ReadFeature {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/HardClip.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/HardClip.java
index 3c3c7ad..0e5678b 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/HardClip.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/HardClip.java
@@ -41,10 +41,12 @@ public class HardClip implements Serializable, ReadFeature {
         return operator;
     }
 
+    @Override
     public int getPosition() {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/InsertBase.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/InsertBase.java
index 5970413..d4a611e 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/InsertBase.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/InsertBase.java
@@ -42,10 +42,12 @@ public class InsertBase implements Serializable, ReadFeature {
         return operator;
     }
 
+    @Override
     public int getPosition() {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Insertion.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Insertion.java
index e0182c3..2055ba0 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Insertion.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Insertion.java
@@ -42,10 +42,12 @@ public class Insertion implements Serializable, ReadFeature {
         return operator;
     }
 
+    @Override
     public int getPosition() {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Padding.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Padding.java
index 85e90fd..f9a201f 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Padding.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Padding.java
@@ -42,10 +42,12 @@ public class Padding implements Serializable, ReadFeature {
         return operator;
     }
 
+    @Override
     public int getPosition() {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/ReadBase.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/ReadBase.java
index 73ae208..f56d677 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/ReadBase.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/ReadBase.java
@@ -46,6 +46,7 @@ public class ReadBase implements Serializable, ReadFeature {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/RefSkip.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/RefSkip.java
index 1b99f09..e9e5ae3 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/RefSkip.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/RefSkip.java
@@ -42,10 +42,12 @@ public class RefSkip implements Serializable, ReadFeature {
         return operator;
     }
 
+    @Override
     public int getPosition() {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/SoftClip.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/SoftClip.java
index b142595..7eaac67 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/SoftClip.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/SoftClip.java
@@ -51,10 +51,12 @@ public class SoftClip implements Serializable, ReadFeature {
         return operator;
     }
 
+    @Override
     public int getPosition() {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Substitution.java b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Substitution.java
index b2ed5de..1747c44 100644
--- a/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Substitution.java
+++ b/src/main/java/htsjdk/samtools/cram/encoding/readfeatures/Substitution.java
@@ -22,6 +22,8 @@ import java.io.Serializable;
 /**
  * A substitution event captured in read coordinates. It is characterized by position in read, read base and reference base.
  * The class is also responsible for converting combinations of read base and reference base into a byte value (code).
+ *
+ * Both reference and read bases must be ACGTN only.
  */
 public class Substitution implements Serializable, ReadFeature {
     public static final int NO_CODE = -1;
@@ -31,11 +33,11 @@ public class Substitution implements Serializable, ReadFeature {
      */
     private int position;
     /**
-     * The read base (ACGTN)
+     * The read base, allowed values are ACGTN.
      */
     private byte base = -1;
     /**
-     * The reference sequence base matching the position of this substitution.
+     * The reference sequence base matching the position of this substitution, allowed values are ACGTN.
      */
     private byte referenceBase = -1;
     /**
@@ -43,6 +45,15 @@ public class Substitution implements Serializable, ReadFeature {
      */
     private byte code = NO_CODE;
 
+    public Substitution() {
+    }
+
+    public Substitution(int position, byte base, byte referenceBase) {
+        this.position = position;
+        this.base = base;
+        this.referenceBase = referenceBase;
+    }
+
     public byte getCode() {
         return code;
     }
@@ -58,10 +69,12 @@ public class Substitution implements Serializable, ReadFeature {
         return operator;
     }
 
+    @Override
     public int getPosition() {
         return position;
     }
 
+    @Override
     public void setPosition(final int position) {
         this.position = position;
     }
diff --git a/src/main/java/htsjdk/samtools/cram/io/CountingInputStream.java b/src/main/java/htsjdk/samtools/cram/io/CountingInputStream.java
index b5e5642..41cb22a 100644
--- a/src/main/java/htsjdk/samtools/cram/io/CountingInputStream.java
+++ b/src/main/java/htsjdk/samtools/cram/io/CountingInputStream.java
@@ -37,42 +37,50 @@ public class CountingInputStream extends InputStream {
         return delegate.read();
     }
 
+    @Override
     public int read(@SuppressWarnings("NullableProblems") final byte[] b) throws IOException {
         final int read = delegate.read(b);
         count += read;
         return read;
     }
 
+    @Override
     public int read(@SuppressWarnings("NullableProblems") final byte[] b, final int off, final int length) throws IOException {
         final int read = delegate.read(b, off, length);
         count += read;
         return read;
     }
 
+    @Override
     public long skip(final long n) throws IOException {
         final long skipped = delegate.skip(n);
         count += skipped;
         return skipped;
     }
 
+    @Override
     public int available() throws IOException {
         return delegate.available();
     }
 
+    @Override
     public void close() throws IOException {
         if (delegate != null)
             delegate.close();
     }
 
+    @Override
     public void mark(final int readLimit) {
         delegate.mark(readLimit);
     }
 
+    @Override
     public void reset() throws IOException {
         delegate.reset();
         count = 0;
     }
 
+    @Override
     public boolean markSupported() {
         return delegate.markSupported();
     }
diff --git a/src/main/java/htsjdk/samtools/cram/io/DefaultBitInputStream.java b/src/main/java/htsjdk/samtools/cram/io/DefaultBitInputStream.java
index 519cf9d..fef9e2b 100644
--- a/src/main/java/htsjdk/samtools/cram/io/DefaultBitInputStream.java
+++ b/src/main/java/htsjdk/samtools/cram/io/DefaultBitInputStream.java
@@ -41,6 +41,7 @@ public class DefaultBitInputStream extends DataInputStream implements BitInputSt
         this.throwEOF = true;
     }
 
+    @Override
     public final boolean readBit() throws IOException {
         if (--nofBufferedBits >= 0)
             return ((byteBuffer >>> nofBufferedBits) & 1) == 1;
@@ -55,6 +56,7 @@ public class DefaultBitInputStream extends DataInputStream implements BitInputSt
         return ((byteBuffer >>> 7) & 1) == 1;
     }
 
+    @Override
     public final int readBits(int n) throws IOException {
         if (n == 0)
             return 0;
@@ -77,6 +79,7 @@ public class DefaultBitInputStream extends DataInputStream implements BitInputSt
         return x & ((1 << n) - 1);
     }
 
+    @Override
     public final long readLongBits(int n) throws IOException {
         if (n > 64)
             throw new RuntimeException("More then 64 bits are requested in one read from bit stream.");
@@ -108,6 +111,7 @@ public class DefaultBitInputStream extends DataInputStream implements BitInputSt
         return x | (byteBuffer >>> nofBufferedBits);
     }
 
+    @Override
     public void reset() {
         nofBufferedBits = 0;
         byteBuffer = 0;
diff --git a/src/main/java/htsjdk/samtools/cram/io/DefaultBitOutputStream.java b/src/main/java/htsjdk/samtools/cram/io/DefaultBitOutputStream.java
index 2d702ee..95d6789 100644
--- a/src/main/java/htsjdk/samtools/cram/io/DefaultBitOutputStream.java
+++ b/src/main/java/htsjdk/samtools/cram/io/DefaultBitOutputStream.java
@@ -53,6 +53,7 @@ public class DefaultBitOutputStream extends OutputStream implements BitOutputStr
                 + Integer.toBinaryString(bufferByte).substring(0, bufferedNumberOfBits);
     }
 
+    @Override
     public void write(final long bitContainer, final int nofBits) throws IOException {
         if (nofBits == 0)
             return;
@@ -95,6 +96,7 @@ public class DefaultBitOutputStream extends OutputStream implements BitOutputStr
         }
     }
 
+    @Override
     public void write(final int bitContainer, final int nofBits) throws IOException {
         write_int_LSB_0(bitContainer, nofBits);
     }
@@ -109,6 +111,7 @@ public class DefaultBitOutputStream extends OutputStream implements BitOutputStr
         }
     }
 
+    @Override
     public void write(byte bitContainer, final int nofBits) throws IOException {
         if (nofBits < 0 || nofBits > 8)
             throw new IOException("Expecting 0 to 8 bits.");
@@ -145,6 +148,7 @@ public class DefaultBitOutputStream extends OutputStream implements BitOutputStr
         write(bit ? (byte) 1 : (byte) 0, 1);
     }
 
+    @Override
     public void write(final boolean bit, final long repeat) throws IOException {
         for (long i = 0; i < repeat; i++)
             write(bit);
diff --git a/src/main/java/htsjdk/samtools/cram/ref/ReferenceSource.java b/src/main/java/htsjdk/samtools/cram/ref/ReferenceSource.java
index e73fb41..61b3a41 100644
--- a/src/main/java/htsjdk/samtools/cram/ref/ReferenceSource.java
+++ b/src/main/java/htsjdk/samtools/cram/ref/ReferenceSource.java
@@ -20,6 +20,7 @@ package htsjdk.samtools.cram.ref;
 import htsjdk.samtools.Defaults;
 import htsjdk.samtools.SAMException;
 import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.SAMUtils;
 import htsjdk.samtools.cram.build.Utils;
 import htsjdk.samtools.cram.io.InputStreamUtils;
 import htsjdk.samtools.reference.ReferenceSequence;
@@ -27,6 +28,7 @@ import htsjdk.samtools.reference.ReferenceSequenceFile;
 import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
 import htsjdk.samtools.util.Log;
 import htsjdk.samtools.util.SequenceUtil;
+import htsjdk.samtools.util.StringUtil;
 
 import java.io.File;
 import java.io.IOException;
@@ -52,21 +54,17 @@ import java.util.regex.Pattern;
  */
 public class ReferenceSource implements CRAMReferenceSource {
     private static final Log log = Log.getInstance(ReferenceSource.class);
-    private ReferenceSequenceFile rsFile;
+    private final ReferenceSequenceFile rsFile;
     private int downloadTriesBeforeFailing = 2;
 
-    private final Map<String, WeakReference<byte[]>> cacheW = new HashMap<String, WeakReference<byte[]>>();
-
-    private ReferenceSource() {
-    }
+    private final Map<String, WeakReference<byte[]>> cacheW = new HashMap<>();
 
     public ReferenceSource(final File file) {
         this(file == null ? null : file.toPath());
     }
 
     public ReferenceSource(final Path path) {
-        if (path != null)
-            rsFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(path);
+        this( path == null ? null : ReferenceSequenceFileFactory.getReferenceSequenceFile(path));
     }
 
     public ReferenceSource(final ReferenceSequenceFile rsFile) {
@@ -102,7 +100,7 @@ public class ReferenceSource implements CRAMReferenceSource {
             }
         }
         else if (Defaults.USE_CRAM_REF_DOWNLOAD) {
-            return new ReferenceSource();
+            return new ReferenceSource((ReferenceSequenceFile)null);
         }
         else {
             throw new IllegalStateException(
@@ -124,15 +122,19 @@ public class ReferenceSource implements CRAMReferenceSource {
         return null;
     }
 
-    // Upper case and normalize (-> ACGTN) in-place, and add to the cache
+    // Upper case (in-place), and add to the cache
     private byte[] addToCache(final String sequenceName, final byte[] bases) {
+        // Normalize to upper case only. We can't use the cram normalization utility Utils.normalizeBases, since
+        // we don't want to normalize ambiguity codes, we can't use SamUtils.normalizeBases, since we don't want
+        // to normalize no-call ('.') bases.
         for (int i = 0; i < bases.length; i++) {
-            bases[i] = Utils.normalizeBase(bases[i]);
+            bases[i] = StringUtil.toUpperCase(bases[i]);
         }
-        cacheW.put(sequenceName, new WeakReference<byte[]>(bases));
+        cacheW.put(sequenceName, new WeakReference<>(bases));
         return bases;
     }
 
+    @Override
     public synchronized byte[] getReferenceBases(final SAMSequenceRecord record,
                                                  final boolean tryNameVariants) {
         { // check cache by sequence name:
@@ -249,7 +251,7 @@ public class ReferenceSource implements CRAMReferenceSource {
             Pattern.CASE_INSENSITIVE);
 
     List<String> getVariants(final String name) {
-        final List<String> variants = new ArrayList<String>();
+        final List<String> variants = new ArrayList<>();
 
         if (name.equals("M"))
             variants.add("MT");
diff --git a/src/main/java/htsjdk/samtools/fastq/BasicFastqWriter.java b/src/main/java/htsjdk/samtools/fastq/BasicFastqWriter.java
index 8a5afd3..0c9596a 100644
--- a/src/main/java/htsjdk/samtools/fastq/BasicFastqWriter.java
+++ b/src/main/java/htsjdk/samtools/fastq/BasicFastqWriter.java
@@ -58,12 +58,10 @@ public class BasicFastqWriter implements FastqWriter,Flushable {
 
     @Override
     public void write(final FastqRecord rec) {
-        writer.print(FastqConstants.SEQUENCE_HEADER);
-        writer.println(rec.getReadHeader());
-        writer.println(rec.getReadString());
-        writer.print(FastqConstants.QUALITY_HEADER);
-        writer.println(rec.getBaseQualityHeader() == null ? "" : rec.getBaseQualityHeader());
-        writer.println(rec.getBaseQualityString());
+        // encode without creating a String
+        FastqEncoder.write(writer, rec);
+        // and print a new line
+        writer.println();
         if (writer.checkError()) {
             throw new SAMException("Error in writing fastq file " + path);
         }
diff --git a/src/main/java/htsjdk/samtools/fastq/FastqConstants.java b/src/main/java/htsjdk/samtools/fastq/FastqConstants.java
index f5d4150..4e9b95e 100644
--- a/src/main/java/htsjdk/samtools/fastq/FastqConstants.java
+++ b/src/main/java/htsjdk/samtools/fastq/FastqConstants.java
@@ -29,7 +29,9 @@ package htsjdk.samtools.fastq;
 public class FastqConstants {
     public static final String SEQUENCE_HEADER = "@" ;
     public static final String QUALITY_HEADER = "+" ;
-    
+    public static final String FIRST_OF_PAIR = "/1";
+    public static final String SECOND_OF_PAIR = "/2";
+
     public enum FastqExtensions {
         FASTQ(".fastq"),
         FASTQ_GZ(".fastq.gz"),
diff --git a/src/main/java/htsjdk/samtools/fastq/FastqEncoder.java b/src/main/java/htsjdk/samtools/fastq/FastqEncoder.java
new file mode 100644
index 0000000..fdbd02d
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/fastq/FastqEncoder.java
@@ -0,0 +1,113 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 Daniel Gomez-Sanchez
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.fastq;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.util.SequenceUtil;
+
+import java.io.IOException;
+
+/**
+ * Codec for encoding records into FASTQ format.
+ *
+ * @author Daniel Gomez-Sanchez (magicDGS)
+ */
+public final class FastqEncoder {
+
+    // cannot be instantiated because it is an utility class
+    private FastqEncoder() {}
+
+    /**
+     * Encodes a FastqRecord in the String FASTQ format.
+     */
+    public static String encode(final FastqRecord record) {
+        // reserve some memory based on the read length
+        int capacity = record.getReadLength() * 2 +  5;
+        // reserve some memory based on the read name
+        if (record.getReadName() != null) {
+            capacity += record.getReadName().length();
+        }
+        return write(new StringBuilder(capacity), record).toString();
+    }
+
+    /**
+     * Writes a FastqRecord into the Appendable output.
+     * @throws SAMException if any I/O error occurs.
+     */
+    public static Appendable write(final Appendable out,final FastqRecord record) {
+        final String readName = record.getReadName();
+        final String readString = record.getReadString();
+        final String qualHeader = record.getBaseQualityHeader();
+        final String qualityString = record.getBaseQualityString();
+        try {
+            return out.append(FastqConstants.SEQUENCE_HEADER)
+                    .append(readName == null ? "" : readName).append('\n')
+                    .append(readString == null ? "" : readString).append('\n')
+                    .append(FastqConstants.QUALITY_HEADER)
+                    .append(qualHeader == null ? "" : qualHeader).append('\n')
+                    .append(qualityString == null ? "" : qualityString);
+        } catch (IOException e) {
+            throw new SAMException(e);
+        }
+    }
+
+    /**
+     * Encodes a SAMRecord in the String FASTQ format.
+     * @see #encode(FastqRecord)
+     * @see #asSAMRecord(FastqRecord, SAMFileHeader)
+     */
+    public static String encode(final SAMRecord record) {
+        return encode(asFastqRecord(record));
+    }
+
+    /**
+     * Converts a {@link SAMRecord} into a {@link FastqRecord}.
+     */
+    public static FastqRecord asFastqRecord(final SAMRecord record) {
+        String readName = record.getReadName();
+        if(record.getReadPairedFlag() && (record.getFirstOfPairFlag() || record.getSecondOfPairFlag())) {
+            readName += (record.getFirstOfPairFlag()) ? FastqConstants.FIRST_OF_PAIR : FastqConstants.SECOND_OF_PAIR;
+        }
+        return new FastqRecord(readName, record.getReadString(), null, record.getBaseQualityString());
+    }
+
+    /**
+     * Converts a {@link FastqRecord} into a simple unmapped {@link SAMRecord}.
+     */
+    public static SAMRecord asSAMRecord(final FastqRecord record, final SAMFileHeader header) {
+        // construct the SAMRecord and set the unmapped flag
+        final SAMRecord samRecord = new SAMRecord(header);
+        samRecord.setReadUnmappedFlag(true);
+        // get the read name from the FastqRecord correctly formatted
+        final String readName = SequenceUtil.getSamReadNameFromFastqHeader(record.getReadName());
+        // set the basic information from the FastqRecord
+        samRecord.setReadName(readName);
+        samRecord.setReadBases(record.getReadBases());
+        samRecord.setBaseQualities(record.getBaseQualities());
+        return samRecord;
+    }
+
+}
diff --git a/src/main/java/htsjdk/samtools/fastq/FastqReader.java b/src/main/java/htsjdk/samtools/fastq/FastqReader.java
index 8086dfa..c5d52f8 100755
--- a/src/main/java/htsjdk/samtools/fastq/FastqReader.java
+++ b/src/main/java/htsjdk/samtools/fastq/FastqReader.java
@@ -41,6 +41,22 @@ import java.util.NoSuchElementException;
  * directly.  It is provided so that this class can be used in Java for-each loop.
  */
 public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>, Closeable {
+    /** Enum of the types of lines we see in Fastq. */
+    protected enum LineType {
+        SequenceHeader("Sequence Header"),
+        SequenceLine("Sequence Line"),
+        QualityHeader("Quality Header"),
+        QualityLine("Quality Line");
+
+        private String printable;
+
+        LineType(String printable) {
+            this.printable = printable;
+        }
+
+        @Override public String toString() { return this.printable; }
+    }
+
     final private File fastqFile;
     final private BufferedReader reader;
     private FastqRecord nextRecord;
@@ -58,10 +74,7 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
      * @param skipBlankLines should we skip blank lines ?
      */
     public FastqReader(final File file, final boolean skipBlankLines) {
-        this.skipBlankLines=skipBlankLines;
-        fastqFile = file;
-        reader = IOUtil.openFileForBufferedReading(fastqFile);
-        nextRecord = readNextRecord();
+        this(file, IOUtil.openFileForBufferedReading(file), skipBlankLines);
     }
 
     public FastqReader(final BufferedReader reader) {
@@ -77,8 +90,8 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
     public FastqReader(final File file, final BufferedReader reader,boolean skipBlankLines) {
         this.fastqFile = file;
         this.reader = reader;
-        this.nextRecord = readNextRecord();
         this.skipBlankLines = skipBlankLines;
+        this.nextRecord = readNextRecord();
     }
 
     public FastqReader(final File file, final BufferedReader reader) {
@@ -87,7 +100,6 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
 
     private FastqRecord readNextRecord() {
         try {
-
             // Read sequence header
             final String seqHeader = readLineConditionallySkippingBlanks();
             if (seqHeader == null) return null ;
@@ -95,23 +107,23 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
                 throw new SAMException(error("Missing sequence header"));
             }
             if (!seqHeader.startsWith(FastqConstants.SEQUENCE_HEADER)) {
-                throw new SAMException(error("Sequence header must start with "+ FastqConstants.SEQUENCE_HEADER+": "+seqHeader));
+                throw new SAMException(error("Sequence header must start with " + FastqConstants.SEQUENCE_HEADER + ": " + seqHeader));
             }
 
             // Read sequence line
             final String seqLine = readLineConditionallySkippingBlanks();
-            checkLine(seqLine,"sequence line");
+            checkLine(seqLine, LineType.SequenceLine);
 
             // Read quality header
             final String qualHeader = readLineConditionallySkippingBlanks();
-            checkLine(qualHeader,"quality header");
+            checkLine(qualHeader, LineType.QualityHeader);
             if (!qualHeader.startsWith(FastqConstants.QUALITY_HEADER)) {
-                throw new SAMException(error("Quality header must start with "+ FastqConstants.QUALITY_HEADER+": "+qualHeader));
+                throw new SAMException(error("Quality header must start with " + FastqConstants.QUALITY_HEADER + ": "+ qualHeader));
             }
 
             // Read quality line
             final String qualLine = readLineConditionallySkippingBlanks();
-            checkLine(qualLine,"quality line");
+            checkLine(qualLine, LineType.QualityLine);
 
             // Check sequence and quality lines are same length
             if (seqLine.length() != qualLine.length()) {
@@ -124,12 +136,14 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
             return frec ;
 
         } catch (IOException e) {
-            throw new SAMException(String.format("Error reading fastq '%s'", getAbsolutePath()), e);
+            throw new SAMException(error(e.getMessage()), e);
         }
     }
 
+    @Override
     public boolean hasNext() { return nextRecord != null; }
 
+    @Override
     public FastqRecord next() {
         if (!hasNext()) {
             throw new NoSuchElementException("next() called when !hasNext()");
@@ -139,6 +153,7 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
         return rec;
     }
 
+    @Override
     public void remove() { throw new UnsupportedOperationException("Unsupported operation"); }
 
     /**
@@ -146,6 +161,7 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
      * start iteration from the beginning of the file.  Developers should probably not call iterator()
      * directly.  It is provided so that this class can be used in Java for-each loop.
      */
+    @Override
     public Iterator<FastqRecord> iterator() { return this; }
 
     public int getLineNumber() { return line ; }
@@ -161,21 +177,23 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
         try {
             reader.close();
         } catch (IOException e) {
-            throw new SAMException("IO problem in fastq file "+getAbsolutePath(), e);
+            throw new SAMException(error(e.getMessage()), e);
         }
     }
 
-    private void checkLine(final String line, final String kind) {
+    /** Checks that the line is neither null (representing EOF) or empty (blank line in file). */
+    protected void checkLine(final String line, final LineType kind) {
         if (line == null) {
-            throw new SAMException(error("File is too short - missing "+kind+" line"));
+            throw new SAMException(error("File is too short - missing " + kind));
         }
         if (StringUtil.isBlank(line)) {
-            throw new SAMException(error("Missing "+kind));
+            throw new SAMException(error("Missing " + kind));
         }
     }
 
-    private String error(final String msg) {
-        return msg + " at line "+line+" in fastq "+getAbsolutePath();
+    /** Generates an error message with line number information. */
+    protected String error(final String msg) {
+        return msg + " at line " + line + " in fastq " + getAbsolutePath();
     }
 
     private String getAbsolutePath() {
@@ -194,6 +212,6 @@ public class FastqReader implements Iterator<FastqRecord>, Iterable<FastqRecord>
 
     @Override
     public String toString() {
-        return "FastqReader["+(this.fastqFile == null?"":this.fastqFile)+ " Line:"+getLineNumber()+"]";
+        return "FastqReader[" + (this.fastqFile == null ? "" : this.fastqFile) + " Line:" + getLineNumber() + "]";
     }
 }
diff --git a/src/main/java/htsjdk/samtools/fastq/FastqRecord.java b/src/main/java/htsjdk/samtools/fastq/FastqRecord.java
index b1d3f75..9fbcd39 100755
--- a/src/main/java/htsjdk/samtools/fastq/FastqRecord.java
+++ b/src/main/java/htsjdk/samtools/fastq/FastqRecord.java
@@ -23,62 +23,169 @@
  */
 package htsjdk.samtools.fastq;
 
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMUtils;
+import htsjdk.samtools.util.StringUtil;
+
 import java.io.Serializable;
 
 /**
- * Represents a fastq record, fairly literally, i.e. without any conversion.
+ * Simple representation of a FASTQ record, without any conversion
  */
 public class FastqRecord implements Serializable {
     private static final long serialVersionUID = 1L;
-    private final String seqHeaderPrefix;
-    private final String seqLine;
-    private final String qualHeaderPrefix;
-    private final String qualLine;
-
-    public FastqRecord(final String seqHeaderPrefix, final String seqLine, final String qualHeaderPrefix, final String qualLine) {
-        if (seqHeaderPrefix != null && !seqHeaderPrefix.isEmpty()) this.seqHeaderPrefix = seqHeaderPrefix;
-        else this.seqHeaderPrefix = null;
-        if (qualHeaderPrefix != null && !qualHeaderPrefix.isEmpty()) this.qualHeaderPrefix = qualHeaderPrefix;
-        else this.qualHeaderPrefix = null;
-        this.seqLine = seqLine ;
-        this.qualLine = qualLine ;
-    }
-    
-    /** copy constructor */
+    private final String readName;
+    private final String readString;
+    private final String qualityHeader;
+    private final String baseQualityString;
+
+    /**
+     * Default constructor
+     *
+     * @param readName      the read name (without {@link FastqConstants#SEQUENCE_HEADER})
+     * @param readBases     the read sequence bases
+     * @param qualityHeader the quality header (without {@link FastqConstants#SEQUENCE_HEADER})
+     * @param baseQualities the base quality scores
+     */
+    public FastqRecord(final String readName, final String readBases, final String qualityHeader, final String baseQualities) {
+        if (readName != null && !readName.isEmpty()) {
+            this.readName = readName;
+        } else {
+            this.readName = null;
+        }
+        if (qualityHeader != null && !qualityHeader.isEmpty()) {
+            this.qualityHeader = qualityHeader;
+        } else {
+            this.qualityHeader = null;
+        }
+        this.readString = readBases;
+        this.baseQualityString = baseQualities;
+    }
+
+    /**
+     * Constructor for byte[] arrays
+     *
+     * @param readName      the read name (without {@link FastqConstants#SEQUENCE_HEADER})
+     * @param readBases     the read sequence bases as ASCII bytes ACGTN=.
+     * @param qualityHeader the quality header (without {@link FastqConstants#SEQUENCE_HEADER})
+     * @param baseQualities the base qualities as binary PHRED scores (not ASCII)
+     */
+    public FastqRecord(final String readName, final byte[] readBases, final String qualityHeader, final byte[] baseQualities) {
+        this(readName, StringUtil.bytesToString(readBases), qualityHeader, SAMUtils.phredToFastq(baseQualities));
+    }
+
+    /**
+     * Copy constructor
+     *
+     * @param other record to copy
+     */
     public FastqRecord(final FastqRecord other) {
-        if( other == null ) throw new IllegalArgumentException("new FastqRecord(null)");
-        this.seqHeaderPrefix = other.seqHeaderPrefix;
-        this.seqLine = other.seqLine;
-        this.qualHeaderPrefix = other.qualHeaderPrefix;
-        this.qualLine = other.qualLine;
-    }
-
-    /** @return the read name */
-    public String getReadHeader() { return seqHeaderPrefix; }
-    /** @return the read DNA sequence */
-    public String getReadString() { return seqLine; }
-    /** @return the quality header */
-    public String getBaseQualityHeader() { return qualHeaderPrefix; }
-    /** @return the quality string */
-    public String getBaseQualityString() { return qualLine; }
-    /** shortcut to getReadString().length() */
-    public int length() { return this.seqLine==null?0:this.seqLine.length();}
-    
+        if (other == null) {
+            throw new IllegalArgumentException("new FastqRecord(null)");
+        }
+        this.readName = other.readName;
+        this.readString = other.readString;
+        this.qualityHeader = other.qualityHeader;
+        this.baseQualityString = other.baseQualityString;
+    }
+
+    /**
+     * @return the read name
+     * @deprecated since 02/2017. Use {@link #getReadName()} instead
+     */
+    @Deprecated
+    public String getReadHeader() {
+        return getReadName();
+    }
+
+    /**
+     * Get the read name
+     *
+     * @return the read name
+     */
+    public String getReadName() {
+        return readName;
+    }
+
+    /**
+     * Get the DNA sequence
+     *
+     * @return read sequence as a string of ACGTN=.
+     */
+    public String getReadString() {
+        return readString;
+    }
+
+    /**
+     * Get the DNA sequence.
+     *
+     * @return read sequence as ASCII bytes ACGTN=; {@link SAMRecord#NULL_SEQUENCE} if no bases are present.
+     */
+    public byte[] getReadBases() {
+        return (readString == null) ? SAMRecord.NULL_SEQUENCE : StringUtil.stringToBytes(readString);
+    }
+
+    /**
+     * Get the base qualities encoded as a FASTQ string
+     *
+     * @return the quality string
+     */
+    public String getBaseQualityString() {
+        return baseQualityString;
+    }
+
+    /**
+     * Get the base qualities as binary PHRED scores (not ASCII)
+     *
+     * @return the base quality; {@link SAMRecord#NULL_QUALS} if no bases are present.
+     */
+    public byte[] getBaseQualities() {
+        return (baseQualityString == null) ? SAMRecord.NULL_QUALS : SAMUtils.fastqToPhred(baseQualityString);
+    }
+
+    /**
+     * Get the read length
+     *
+     * @return number of bases in the read
+     */
+    public int getReadLength() {
+        return (readString == null) ? 0 : readString.length();
+    }
+
+    /**
+     * Get the base quality header
+     *
+     * @return the base quality header
+     */
+    public String getBaseQualityHeader() {
+        return qualityHeader;
+    }
+
+    /**
+     * shortcut to getReadString().length()
+     *
+     * @deprecated since 02/2017. Use {@link #getReadLength()} instead
+     */
+    @Deprecated
+    public int length() {
+        return getReadLength();
+    }
+
     @Override
     public int hashCode() {
         final int prime = 31;
         int result = 1;
         result = prime
                 * result
-                + ((qualHeaderPrefix == null) ? 0 : qualHeaderPrefix.hashCode());
+                + ((qualityHeader == null) ? 0 : qualityHeader.hashCode());
         result = prime * result
-                + ((qualLine == null) ? 0 : qualLine.hashCode());
+                + ((baseQualityString == null) ? 0 : baseQualityString.hashCode());
         result = prime * result
-                + ((seqHeaderPrefix == null) ? 0 : seqHeaderPrefix.hashCode());
-        result = prime * result + ((seqLine == null) ? 0 : seqLine.hashCode());
+                + ((readName == null) ? 0 : readName.hashCode());
+        result = prime * result + ((readString == null) ? 0 : readString.hashCode());
         return result;
     }
-    
+
     @Override
     public boolean equals(Object obj) {
         if (this == obj)
@@ -88,37 +195,45 @@ public class FastqRecord implements Serializable {
         if (getClass() != obj.getClass())
             return false;
         FastqRecord other = (FastqRecord) obj;
-        if (seqLine == null) {
-            if (other.seqLine != null)
+        if (readString == null) {
+            if (other.readString != null)
                 return false;
-        } else if (!seqLine.equals(other.seqLine))
+        } else if (!readString.equals(other.readString))
             return false;
-        if (qualHeaderPrefix == null) {
-            if (other.qualHeaderPrefix != null)
+        if (qualityHeader == null) {
+            if (other.qualityHeader != null)
                 return false;
-        } else if (!qualHeaderPrefix.equals(other.qualHeaderPrefix))
+        } else if (!qualityHeader.equals(other.qualityHeader))
             return false;
-        if (qualLine == null) {
-            if (other.qualLine != null)
+        if (baseQualityString == null) {
+            if (other.baseQualityString != null)
                 return false;
-        } else if (!qualLine.equals(other.qualLine))
+        } else if (!baseQualityString.equals(other.baseQualityString))
             return false;
-        if (seqHeaderPrefix == null) {
-            if (other.seqHeaderPrefix != null)
+        if (readName == null) {
+            if (other.readName != null)
                 return false;
-        } else if (!seqHeaderPrefix.equals(other.seqHeaderPrefix))
+        } else if (!readName.equals(other.readName))
             return false;
-        
+
         return true;
     }
-    
+
+    /**
+     * Returns the record as the String FASTQ format.
+     * @see FastqEncoder#encode(FastqRecord)
+     */
+    public String toFastQString() {
+        return FastqEncoder.encode(this);
+    }
+
+    /**
+     * Returns {@link #toFastQString()}
+     */
     @Override
     public String toString() {
-        return new StringBuilder().
-                append(FastqConstants.SEQUENCE_HEADER).append(this.seqHeaderPrefix==null?"":this.seqHeaderPrefix).append('\n').
-                append(this.seqLine==null?"":this.seqLine).append('\n').
-                append(FastqConstants.QUALITY_HEADER).append(this.qualHeaderPrefix==null?"":this.qualHeaderPrefix).append('\n').
-                append(this.qualLine==null?"":this.qualLine).
-                toString();
-        }
+        // TODO: this should be change in the future for a simpler and more informative form such as
+        // TODO: return String.format("%s: %s bp", readName, getReadLength());
+        return toFastQString();
+    }
 }
diff --git a/src/main/java/htsjdk/samtools/fastq/FastqWriter.java b/src/main/java/htsjdk/samtools/fastq/FastqWriter.java
index e37aec5..3b2a1b6 100644
--- a/src/main/java/htsjdk/samtools/fastq/FastqWriter.java
+++ b/src/main/java/htsjdk/samtools/fastq/FastqWriter.java
@@ -9,5 +9,6 @@ import java.io.Closeable;
  */
 public interface FastqWriter extends Closeable {
     void write(final FastqRecord rec);
+    @Override
     void close();
 }
diff --git a/src/main/java/htsjdk/samtools/filter/AggregateFilter.java b/src/main/java/htsjdk/samtools/filter/AggregateFilter.java
index f396c59..62b804b 100644
--- a/src/main/java/htsjdk/samtools/filter/AggregateFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/AggregateFilter.java
@@ -51,6 +51,7 @@ public class AggregateFilter implements SamRecordFilter {
      * @param record    the SAMRecord to evaluate
      * @return  true if the SAMRecord matches at least one filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         for (final SamRecordFilter filter : filters) {
             if (filter.filterOut(record)) {
@@ -68,6 +69,7 @@ public class AggregateFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
          for (final SamRecordFilter filter : filters) {
             if (filter.filterOut(first, second)) {
diff --git a/src/main/java/htsjdk/samtools/filter/AlignedFilter.java b/src/main/java/htsjdk/samtools/filter/AlignedFilter.java
index c70453d..cebdc0b 100644
--- a/src/main/java/htsjdk/samtools/filter/AlignedFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/AlignedFilter.java
@@ -45,6 +45,7 @@ public class AlignedFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         if (includeAligned) {
             if (!record.getReadUnmappedFlag()) {
@@ -68,6 +69,7 @@ public class AlignedFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
 
         if (includeAligned) {
diff --git a/src/main/java/htsjdk/samtools/filter/DuplicateReadFilter.java b/src/main/java/htsjdk/samtools/filter/DuplicateReadFilter.java
index c79b3cc..2fe773f 100644
--- a/src/main/java/htsjdk/samtools/filter/DuplicateReadFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/DuplicateReadFilter.java
@@ -34,6 +34,7 @@ public class DuplicateReadFilter implements SamRecordFilter {
      * @param record the SAMRecord to evaluate
      * @return true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         return record.getDuplicateReadFlag();
     }
@@ -46,6 +47,7 @@ public class DuplicateReadFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         throw new UnsupportedOperationException("Paired DuplicateReadFilter filter not implemented!");
     }
diff --git a/src/main/java/htsjdk/samtools/filter/FailsVendorReadQualityFilter.java b/src/main/java/htsjdk/samtools/filter/FailsVendorReadQualityFilter.java
index 7c6825c..661286d 100644
--- a/src/main/java/htsjdk/samtools/filter/FailsVendorReadQualityFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/FailsVendorReadQualityFilter.java
@@ -38,6 +38,7 @@ public class FailsVendorReadQualityFilter implements SamRecordFilter {
      * @param record    the SAMRecord to evaluate
      * @return  true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         return record.getReadFailsVendorQualityCheckFlag();
     }
@@ -50,6 +51,7 @@ public class FailsVendorReadQualityFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         // if either fails, exclude them both
         return (first.getReadFailsVendorQualityCheckFlag() || second.getReadFailsVendorQualityCheckFlag());
diff --git a/src/main/java/htsjdk/samtools/filter/FilteringIterator.java b/src/main/java/htsjdk/samtools/filter/FilteringIterator.java
index 3ce9f96..4cdaebe 100644
--- a/src/main/java/htsjdk/samtools/filter/FilteringIterator.java
+++ b/src/main/java/htsjdk/samtools/filter/FilteringIterator.java
@@ -36,7 +36,7 @@ import java.util.Iterator;
  *
  * @author Kathleen Tibbetts
  *
- * use {@link FilteringSamIterator} instead
+ * @deprecated use {@link FilteringSamIterator} instead
  */
 
 @Deprecated /** use {@link FilteringSamIterator} instead **/
diff --git a/src/main/java/htsjdk/samtools/filter/FilteringSamIterator.java b/src/main/java/htsjdk/samtools/filter/FilteringSamIterator.java
index 7ac1c0a..a70156a 100644
--- a/src/main/java/htsjdk/samtools/filter/FilteringSamIterator.java
+++ b/src/main/java/htsjdk/samtools/filter/FilteringSamIterator.java
@@ -87,6 +87,7 @@ public class FilteringSamIterator implements CloseableIterator<SAMRecord> {
      *
      * @return true if the iteration has more elements.  Otherwise returns false.
      */
+    @Override
     public boolean hasNext() {
         return next != null;
     }
@@ -98,6 +99,7 @@ public class FilteringSamIterator implements CloseableIterator<SAMRecord> {
      * @throws java.util.NoSuchElementException
      *
      */
+    @Override
     public SAMRecord next() {
         if (next == null) {
             throw new NoSuchElementException("Iterator has no more elements.");
@@ -112,10 +114,12 @@ public class FilteringSamIterator implements CloseableIterator<SAMRecord> {
      *
      * @throws UnsupportedOperationException
      */
+    @Override
     public void remove() {
         throw new UnsupportedOperationException("Remove() not supported by FilteringSamIterator");
     }
 
+    @Override
     public void close() {
         CloserUtil.close(iterator);
     }
diff --git a/src/main/java/htsjdk/samtools/filter/IntervalFilter.java b/src/main/java/htsjdk/samtools/filter/IntervalFilter.java
index ff3620a..ef5c98a 100644
--- a/src/main/java/htsjdk/samtools/filter/IntervalFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/IntervalFilter.java
@@ -65,6 +65,7 @@ public class IntervalFilter implements SamRecordFilter {
      * @param record the SAMRecord to evaluate
      * @return true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         while (currentInterval != null &&
                 (currentSequenceIndex < record.getReferenceIndex() ||
@@ -93,6 +94,7 @@ public class IntervalFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         // This can never be implemented because if the bam is coordinate sorted,
         // which it has to be for this filter, it will never get both the first and second reads together
diff --git a/src/main/java/htsjdk/samtools/filter/IntervalKeepPairFilter.java b/src/main/java/htsjdk/samtools/filter/IntervalKeepPairFilter.java
index 5a7961b..c4e01aa 100644
--- a/src/main/java/htsjdk/samtools/filter/IntervalKeepPairFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/IntervalKeepPairFilter.java
@@ -65,6 +65,7 @@ public class IntervalKeepPairFilter implements SamRecordFilter {
      * @param record the SAMRecord to evaluate
      * @return true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         if (record.isSecondaryOrSupplementary()) {
            return true;
@@ -102,6 +103,7 @@ public class IntervalKeepPairFilter implements SamRecordFilter {
      *
      * @return true if both SAMRecords do not overlap the interval list
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         return filterOut(first) && filterOut(second);
     }
diff --git a/src/main/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilter.java b/src/main/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilter.java
index 0f2364c..cda45e0 100644
--- a/src/main/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilter.java
@@ -35,6 +35,7 @@ public class NotPrimaryAlignmentFilter implements SamRecordFilter {
      * @param record the SAMRecord to evaluate
      * @return true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         return record.getNotPrimaryAlignmentFlag();
     }
@@ -47,6 +48,7 @@ public class NotPrimaryAlignmentFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         // if either fails, exclude them both
         return (first.getNotPrimaryAlignmentFlag() || second.getNotPrimaryAlignmentFlag());
diff --git a/src/main/java/htsjdk/samtools/filter/ReadNameFilter.java b/src/main/java/htsjdk/samtools/filter/ReadNameFilter.java
index e4b2a20..f6d1e8c 100644
--- a/src/main/java/htsjdk/samtools/filter/ReadNameFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/ReadNameFilter.java
@@ -41,7 +41,7 @@ import java.util.Set;
 public class ReadNameFilter implements SamRecordFilter {
 
     private boolean includeReads = false;
-    private Set<String> readNameFilterSet = new HashSet<String>();
+    private Set<String> readNameFilterSet = new HashSet<>();
 
     public ReadNameFilter(final File readNameFilterFile, final boolean includeReads) {
 
@@ -79,6 +79,7 @@ public class ReadNameFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         if (includeReads) {
             if (readNameFilterSet.contains(record.getReadName())) {
@@ -101,6 +102,7 @@ public class ReadNameFilter implements SamRecordFilter {
      *
      * @return true if the pair of records matches filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         if (includeReads) {
             if (readNameFilterSet.contains(first.getReadName()) &&
diff --git a/src/main/java/htsjdk/samtools/filter/SecondaryAlignmentFilter.java b/src/main/java/htsjdk/samtools/filter/SecondaryAlignmentFilter.java
index d91212d..22741ae 100644
--- a/src/main/java/htsjdk/samtools/filter/SecondaryAlignmentFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/SecondaryAlignmentFilter.java
@@ -9,11 +9,13 @@ public class SecondaryAlignmentFilter implements SamRecordFilter {
     /**
      * Returns true if the read is marked as secondary.
      */
+    @Override
     public boolean filterOut(final SAMRecord record) { return record.getNotPrimaryAlignmentFlag(); }
 
     /**
      * Returns true if either read is marked as secondary.
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         return first.getNotPrimaryAlignmentFlag() || second.getNotPrimaryAlignmentFlag();
     }
diff --git a/src/main/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilter.java b/src/main/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilter.java
index ae57fd9..b7d21d1 100644
--- a/src/main/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilter.java
@@ -13,6 +13,7 @@ public class SecondaryOrSupplementaryFilter  implements SamRecordFilter {
      * @param record the SAMRecord to evaluate
      * @return true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         return record.isSecondaryOrSupplementary();
     }
@@ -25,6 +26,7 @@ public class SecondaryOrSupplementaryFilter  implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         // if either fails, exclude them both
         return first.isSecondaryOrSupplementary() || second.isSecondaryOrSupplementary();
diff --git a/src/main/java/htsjdk/samtools/filter/SolexaNoiseFilter.java b/src/main/java/htsjdk/samtools/filter/SolexaNoiseFilter.java
index bfb31d6..ce169ef 100644
--- a/src/main/java/htsjdk/samtools/filter/SolexaNoiseFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/SolexaNoiseFilter.java
@@ -40,6 +40,7 @@ public class SolexaNoiseFilter implements SamRecordFilter {
      * @param record    the SAMRecord to evaluate
      * @return  true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord record) {
         final byte[] sequence = record.getReadBases();
         for (final byte base : sequence) {
@@ -59,6 +60,7 @@ public class SolexaNoiseFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         // only filter out the pair if both first and second reads have all As
         return (filterOut(first) && filterOut(second));
diff --git a/src/main/java/htsjdk/samtools/filter/TagFilter.java b/src/main/java/htsjdk/samtools/filter/TagFilter.java
index 5182e83..00ca8a4 100644
--- a/src/main/java/htsjdk/samtools/filter/TagFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/TagFilter.java
@@ -66,6 +66,7 @@ public class TagFilter implements SamRecordFilter {
      * @param record    the SAMRecord to evaluate
      * @return  true if the SAMRecord matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(SAMRecord record) {
         return values.contains(record.getAttribute(tag));
     }
@@ -78,6 +79,7 @@ public class TagFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         // both first and second must have the tag in order for it to be filtered out
          return values.contains(first.getAttribute(tag)) && values.contains(second.getAttribute(tag));
diff --git a/src/main/java/htsjdk/samtools/filter/WholeReadClippedFilter.java b/src/main/java/htsjdk/samtools/filter/WholeReadClippedFilter.java
index 2a1566c..6df3c44 100644
--- a/src/main/java/htsjdk/samtools/filter/WholeReadClippedFilter.java
+++ b/src/main/java/htsjdk/samtools/filter/WholeReadClippedFilter.java
@@ -57,6 +57,7 @@ public class WholeReadClippedFilter implements SamRecordFilter {
      *
      * @return true if the SAMRecords matches the filter, otherwise false
      */
+    @Override
     public boolean filterOut(final SAMRecord first, final SAMRecord second) {
         // if either fails, exclude them both
         return (filterOut(first) || filterOut(second));
diff --git a/src/main/java/htsjdk/samtools/metrics/StringHeader.java b/src/main/java/htsjdk/samtools/metrics/StringHeader.java
index ced1595..949dd41 100644
--- a/src/main/java/htsjdk/samtools/metrics/StringHeader.java
+++ b/src/main/java/htsjdk/samtools/metrics/StringHeader.java
@@ -43,6 +43,7 @@ public class StringHeader implements Header {
         setValue(value);
     }
 
+    @Override
     public void parse(String in) { value = in.trim(); }
     public String toString() { return value; }
 
diff --git a/src/main/java/htsjdk/samtools/metrics/VersionHeader.java b/src/main/java/htsjdk/samtools/metrics/VersionHeader.java
index ae08455..82093aa 100644
--- a/src/main/java/htsjdk/samtools/metrics/VersionHeader.java
+++ b/src/main/java/htsjdk/samtools/metrics/VersionHeader.java
@@ -37,6 +37,7 @@ public class VersionHeader implements Header {
     private String versionedItem;
     private String versionString;
 
+    @Override
     public void parse(String in) {
         String[] fields = in.split("\t");
         this.versionedItem = fields[0];
diff --git a/src/main/java/htsjdk/samtools/reference/AbstractFastaSequenceFile.java b/src/main/java/htsjdk/samtools/reference/AbstractFastaSequenceFile.java
index 86f11fe..736107b 100644
--- a/src/main/java/htsjdk/samtools/reference/AbstractFastaSequenceFile.java
+++ b/src/main/java/htsjdk/samtools/reference/AbstractFastaSequenceFile.java
@@ -93,29 +93,16 @@ abstract class AbstractFastaSequenceFile implements ReferenceSequenceFile {
         if (path == null) {
             return null;
         }
-        // Try and locate the dictionary
-        Path dictionary = path.toAbsolutePath();
-        Path dictionaryExt = path.toAbsolutePath();
-        boolean fileTypeSupported = false;
-        for (final String extension : ReferenceSequenceFileFactory.FASTA_EXTENSIONS) {
-            String filename = dictionary.getFileName().toString();
-            if (filename.endsWith(extension)) {
-                dictionaryExt = dictionary.resolveSibling(filename + IOUtil
-                    .DICT_FILE_EXTENSION);
-                String filenameNoExt = filename.substring(0, filename.lastIndexOf(extension));
-                dictionary = dictionary.resolveSibling(filenameNoExt+ IOUtil.DICT_FILE_EXTENSION);
-                fileTypeSupported = true;
-                break;
-            }
-        }
-        if (!fileTypeSupported)
-            throw new IllegalArgumentException("File is not a supported reference file type: " + path.toAbsolutePath());
-
-        if (Files.exists(dictionary))
+        // Try and locate the dictionary with the default method
+        final Path dictionary = ReferenceSequenceFileFactory.getDefaultDictionaryForReferenceSequence(path); path.toAbsolutePath();
+        if (Files.exists(dictionary)) {
             return dictionary;
+        }
         // try without removing the file extension
-        if (Files.exists(dictionaryExt))
+        final Path dictionaryExt = path.resolveSibling(path.getFileName().toString() + IOUtil.DICT_FILE_EXTENSION);
+        if (Files.exists(dictionaryExt)) {
             return dictionaryExt;
+        }
         else return null;
     }
 
@@ -128,6 +115,7 @@ abstract class AbstractFastaSequenceFile implements ReferenceSequenceFile {
      * Returns the list of sequence records associated with the reference sequence if found
      * otherwise null.
      */
+    @Override
     public SAMSequenceDictionary getSequenceDictionary() {
         return this.sequenceDictionary;
     }
@@ -143,14 +131,17 @@ abstract class AbstractFastaSequenceFile implements ReferenceSequenceFile {
     }
 
     /** default implementation -- override if index is supported */
+    @Override
     public boolean isIndexed() {return false;}
 
     /** default implementation -- override if index is supported */
+    @Override
     public ReferenceSequence getSequence( String contig ) {
         throw new UnsupportedOperationException();
     }
 
     /** default implementation -- override if index is supported */
+    @Override
     public ReferenceSequence getSubsequenceAt( String contig, long start, long stop ) {
         throw new UnsupportedOperationException("Index does not appear to exist for " + getAbsolutePath() + ".  samtools faidx can be used to create an index");
     }
diff --git a/src/main/java/htsjdk/samtools/reference/FastaSequenceFile.java b/src/main/java/htsjdk/samtools/reference/FastaSequenceFile.java
index 72c0583..744d797 100644
--- a/src/main/java/htsjdk/samtools/reference/FastaSequenceFile.java
+++ b/src/main/java/htsjdk/samtools/reference/FastaSequenceFile.java
@@ -62,10 +62,12 @@ public class FastaSequenceFile extends AbstractFastaSequenceFile {
     /**
      * It's good to call this to free up memory.
      */
+    @Override
     public void close() {
         in.close();
     }
 
+    @Override
     public ReferenceSequence nextSequence() {
         this.sequenceIndex += 1;
 
@@ -83,6 +85,7 @@ public class FastaSequenceFile extends AbstractFastaSequenceFile {
         return new ReferenceSequence(name, this.sequenceIndex, bases);
     }
 
+    @Override
     public void reset() {
         this.sequenceIndex = -1;
         this.in.close();
diff --git a/src/main/java/htsjdk/samtools/reference/FastaSequenceIndex.java b/src/main/java/htsjdk/samtools/reference/FastaSequenceIndex.java
index e314fcc..3668fe6 100644
--- a/src/main/java/htsjdk/samtools/reference/FastaSequenceIndex.java
+++ b/src/main/java/htsjdk/samtools/reference/FastaSequenceIndex.java
@@ -31,6 +31,9 @@ import htsjdk.samtools.util.IOUtil;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
@@ -39,7 +42,7 @@ import java.util.Scanner;
 import java.util.regex.MatchResult;
 
 /**
- * Reads a fasta index file (.fai), as generated by `samtools faidx`.
+ * Reads/writes a fasta index file (.fai), as generated by `samtools faidx`.
  */
 public class FastaSequenceIndex implements Iterable<FastaSequenceIndexEntry> {
     /**
@@ -159,6 +162,27 @@ public class FastaSequenceIndex implements Iterable<FastaSequenceIndexEntry> {
     }
 
     /**
+     * Writes this index to the specified path.
+     *
+     * @param indexFile index file to output the index in the .fai format
+     *
+     * @throws IOException if an IO error occurs.
+     */
+    public void write(final Path indexFile) throws IOException {
+        try (final PrintStream writer = new PrintStream(Files.newOutputStream(indexFile))) {
+            sequenceEntries.values().forEach(se ->
+                    writer.println(String.join("\t",
+                            se.getContig(),
+                            String.valueOf(se.getSize()),
+                            String.valueOf(se.getLocation()),
+                            String.valueOf(se.getBasesPerLine()),
+                            String.valueOf(se.getBytesPerLine()))
+                    )
+            );
+        }
+    }
+
+    /**
      * Does the given contig name have a corresponding entry?
      * @param contigName The contig name for which to search.
      * @return True if contig name is present; false otherwise.
@@ -184,6 +208,7 @@ public class FastaSequenceIndex implements Iterable<FastaSequenceIndexEntry> {
      * Creates an iterator which can iterate through all entries in a fasta index.
      * @return iterator over all fasta index entries.
      */
+    @Override
     public Iterator<FastaSequenceIndexEntry> iterator() {
         return sequenceEntries.values().iterator();
     }
diff --git a/src/main/java/htsjdk/samtools/reference/FastaSequenceIndexCreator.java b/src/main/java/htsjdk/samtools/reference/FastaSequenceIndexCreator.java
new file mode 100644
index 0000000..76e66f4
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/reference/FastaSequenceIndexCreator.java
@@ -0,0 +1,180 @@
+/*
+ * The MIT License (MIT)
+ *
+ * Copyright (c) 2017 Daniel Gomez-Sanchez
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package htsjdk.samtools.reference;
+
+import htsjdk.samtools.SAMException;
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.tribble.readers.AsciiLineReader;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+/**
+ * Static methods to create an {@link FastaSequenceIndex}.
+ *
+ * @author Daniel Gomez-Sanchez (magicDGS)
+ */
+public final class FastaSequenceIndexCreator {
+
+    // cannot be instantiated because it is an utility class
+    private FastaSequenceIndexCreator() {}
+
+    /**
+     * Creates a FASTA .fai index for the provided FASTA.
+     *
+     * @param fastaFile the file to build the index from.
+     * @param overwrite if the .fai index already exists override it if {@code true}; otherwise, throws a {@link SAMException}.
+     *
+     * @throws SAMException if the fai file already exists or the file is malformed.
+     * @throws IOException  if an IO error occurs.
+     */
+    public static void create(final Path fastaFile, final boolean overwrite) throws IOException {
+        // get the index to write the file in
+        final Path indexFile = ReferenceSequenceFileFactory.getFastaIndexFileName(fastaFile);
+        if (!overwrite && Files.exists(indexFile)) {
+            // throw an exception if the file already exists
+            throw new SAMException("Index file " + indexFile + " already exists for " + fastaFile);
+        }
+        // build the index
+        final FastaSequenceIndex index = buildFromFasta(fastaFile);
+        index.write(indexFile);
+    }
+
+    /**
+     * Builds a FastaSequenceIndex on the fly from a FASTA file.
+     *
+     * Note: this also alows to create an index for a compressed file, but does not generate the
+     * .gzi index required for use it with samtools.
+     *
+     * @param fastaFile the FASTA file.
+     *
+     * @return a fai index.
+     *
+     * @throws SAMException for formatting errors.
+     * @throws IOException  if an IO error occurs.
+     */
+    public static FastaSequenceIndex buildFromFasta(final Path fastaFile) throws IOException {
+        try(final AsciiLineReader in = AsciiLineReader.from(IOUtil.openFileForReading(fastaFile))) {
+
+            // sanity check reference format:
+            // 1. Non-empty file
+            // 2. Header name starts with >
+            String previous = in.readLine();
+            if (previous == null) {
+                throw new SAMException("Cannot index empty file: " + fastaFile);
+            } else if (previous.charAt(0) != '>') {
+                throw new SAMException("Wrong sequence header: " + previous);
+            }
+
+            // initialize the sequence index
+            int sequenceIndex = -1;
+            // the location should be kept before iterating over the rest of the lines
+            long location = in.getPosition();
+
+            // initialize an empty index and the entry builder to null
+            final FastaSequenceIndex index = new FastaSequenceIndex();
+            FaiEntryBuilder entry = null;
+
+            // read the lines two by two
+            for (String line = in.readLine(); previous != null; line = in.readLine()) {
+                // in this case, the previous line contains a header and the current line the first sequence
+                if (previous.charAt(0) == '>') {
+                    // first entry should be skipped; otherwise it should be added to the index
+                    if (entry != null) index.add(entry.build());
+                    // creates a new entry (and update sequence index)
+                    entry = new FaiEntryBuilder(sequenceIndex++, previous, line, in.getLineTerminatorLength(), location);
+                } else if (line != null && line.charAt(0) == '>') {
+                    // update the location, next iteration the sequence will be handled
+                    location = in.getPosition();
+                } else if (line != null && !line.isEmpty()) {
+                    // update in case it is not a blank-line
+                    entry.updateWithSequence(line, in.getLineTerminatorLength());
+                }
+                // set the previous to the current line
+                previous = line;
+            }
+            // add the last entry
+            index.add(entry.build());
+
+            // and return the index
+            return index;
+        }
+    }
+
+    // utility class for building the FastaSequenceIndexEntry
+    private static class FaiEntryBuilder {
+        private final int index;
+        private final String contig;
+        private final long location;
+        // the bytes per line is the bases per line plus the length of the end of the line
+        private final int basesPerLine;
+        private final int endOfLineLength;
+
+        // the size is updated for each line in the input using updateWithSequence
+        private long size;
+        // flag to check if the supposedly last line was already reached
+        private boolean lessBasesFound;
+
+        private FaiEntryBuilder(final int index, final String header, final String firstSequenceLine, final int endOfLineLength, final long location) {
+            if (header == null || header.charAt(0) != '>') {
+                throw new SAMException("Wrong sequence header: " + header);
+            } else if (firstSequenceLine == null) {
+                throw new SAMException("Empty sequences could not be indexed");
+            }
+            this.index = index;
+            // parse the contig name (without the starting '>' and truncating white-spaces)
+            this.contig =  SAMSequenceRecord.truncateSequenceName(header.substring(1).trim());
+            this.location = location;
+            this.basesPerLine = firstSequenceLine.length();
+            this.endOfLineLength = endOfLineLength;
+            this.size = firstSequenceLine.length();
+            this.lessBasesFound = false;
+        }
+
+        private void updateWithSequence(final String sequence, final int endOfLineLength) {
+            if (this.endOfLineLength != endOfLineLength) {
+                throw new SAMException(String.format("Different end of line for the same sequence was found."));
+            }
+            if (sequence.length() > basesPerLine) {
+                throw new SAMException(String.format("Sequence line for {} was longer than the expected length ({}): {}",
+                        contig, basesPerLine, sequence));
+            } else if (sequence.length() < basesPerLine) {
+                if (lessBasesFound) {
+                    throw new SAMException(String.format("Only last line could have less than {} bases for '{}' sequence, but at least two are different. Last sequence line: {}",
+                            basesPerLine, contig, sequence));
+                }
+                lessBasesFound = true;
+            }
+            // update size
+            this.size += sequence.length();
+        }
+
+        private FastaSequenceIndexEntry build() {
+            return new FastaSequenceIndexEntry(contig, location, size, basesPerLine, basesPerLine + endOfLineLength, index);
+        }
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/reference/IndexedFastaSequenceFile.java b/src/main/java/htsjdk/samtools/reference/IndexedFastaSequenceFile.java
index 60cc3b1..5c31878 100644
--- a/src/main/java/htsjdk/samtools/reference/IndexedFastaSequenceFile.java
+++ b/src/main/java/htsjdk/samtools/reference/IndexedFastaSequenceFile.java
@@ -111,6 +111,7 @@ public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile implemen
         this(path, new FastaSequenceIndex((findRequiredFastaIndexFile(path))));
     }
 
+    @Override
     public boolean isIndexed() {return true;}
 
     private static File findFastaIndex(File fastaFile) {
@@ -135,18 +136,14 @@ public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile implemen
     }
 
     private static Path findFastaIndex(Path fastaFile) {
-        Path indexFile = getFastaIndexFileName(fastaFile);
+        Path indexFile = ReferenceSequenceFileFactory.getFastaIndexFileName(fastaFile);
         if (!Files.exists(indexFile)) return null;
         return indexFile;
     }
 
-    private static Path getFastaIndexFileName(Path fastaFile) {
-        return fastaFile.resolveSibling(fastaFile.getFileName() + ".fai");
-    }
-
     private static Path findRequiredFastaIndexFile(Path fastaFile) throws FileNotFoundException {
         Path ret = findFastaIndex(fastaFile);
-        if (ret == null) throw new FileNotFoundException(getFastaIndexFileName(fastaFile) + " not found.");
+        if (ret == null) throw new FileNotFoundException(ReferenceSequenceFileFactory.getFastaIndexFileName(fastaFile) + " not found.");
         return ret;
     }
 
@@ -190,6 +187,7 @@ public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile implemen
      * Retrieves the sequence dictionary for the fasta file.
      * @return sequence dictionary of the fasta.
      */
+    @Override
     public SAMSequenceDictionary getSequenceDictionary() {
         return sequenceDictionary;
     }
@@ -199,6 +197,7 @@ public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile implemen
      * @param contig contig whose data should be returned.
      * @return The full sequence associated with this contig.
      */
+    @Override
     public ReferenceSequence getSequence( String contig ) {
         return getSubsequenceAt( contig, 1, (int)index.getIndexEntry(contig).getSize() );
     }
@@ -210,6 +209,7 @@ public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile implemen
      * @param stop inclusive, 1-based stop of region.
      * @return The partial reference sequence associated with this range.
      */
+    @Override
     public ReferenceSequence getSubsequenceAt( String contig, long start, long stop ) {
         if(start > stop + 1)
             throw new SAMException(String.format("Malformed query; start point %d lies after end point %d",start,stop));
@@ -300,6 +300,7 @@ public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile implemen
      * Gets the next sequence if available, or null if not present.
      * @return next sequence if available, or null if not present.
      */
+    @Override
     public ReferenceSequence nextSequence() {
         if( !indexIterator.hasNext() )
             return null;
@@ -309,6 +310,7 @@ public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile implemen
     /**
      * Reset the iterator over the index.
      */
+    @Override
     public void reset() {
         indexIterator = index.iterator();
     }
diff --git a/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFile.java b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFile.java
index e7d3c28..49f526c 100644
--- a/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFile.java
+++ b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFile.java
@@ -86,5 +86,6 @@ public interface ReferenceSequenceFile extends Closeable {
      */
     public String toString();
 
+    @Override
     public void close() throws IOException;
 }
diff --git a/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileFactory.java b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileFactory.java
index 5978072..6547068 100644
--- a/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileFactory.java
+++ b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileFactory.java
@@ -24,6 +24,8 @@
 
 package htsjdk.samtools.reference;
 
+import htsjdk.samtools.util.IOUtil;
+
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.nio.file.Path;
@@ -113,24 +115,61 @@ public class ReferenceSequenceFileFactory {
      * @param preferIndexed if true attempt to return an indexed reader that supports non-linear traversal, else return the non-indexed reader
      */
     public static ReferenceSequenceFile getReferenceSequenceFile(final Path path, final boolean truncateNamesAtWhitespace, final boolean preferIndexed) {
-        final String name = path.getFileName().toString();
-        for (final String ext : FASTA_EXTENSIONS) {
-            if (name.endsWith(ext)) {
-                // Using faidx requires truncateNamesAtWhitespace
-                if (truncateNamesAtWhitespace && preferIndexed && IndexedFastaSequenceFile.canCreateIndexedFastaReader(path)) {
-                    try {
-                        return new IndexedFastaSequenceFile(path);
-                    }
-                    catch (final FileNotFoundException e) {
-                        throw new IllegalStateException("Should never happen, because existence of files has been checked.", e);
-                    }
-                }
-                else {
-                    return new FastaSequenceFile(path, truncateNamesAtWhitespace);
-                }
+        // this should thrown an exception if the fasta file is not supported
+        getFastaExtension(path);
+        // Using faidx requires truncateNamesAtWhitespace
+        if (truncateNamesAtWhitespace && preferIndexed && IndexedFastaSequenceFile.canCreateIndexedFastaReader(path)) {
+            try {
+                return new IndexedFastaSequenceFile(path);
             }
+            catch (final FileNotFoundException e) {
+                throw new IllegalStateException("Should never happen, because existence of files has been checked.", e);
+            }
+        } else {
+            return new FastaSequenceFile(path, truncateNamesAtWhitespace);
         }
+    }
+
+    /**
+     * Returns the default dictionary name for a FASTA file.
+     *
+     * @param file the reference sequence file on disk.
+     */
+    public static File getDefaultDictionaryForReferenceSequence(final File file) {
+        return getDefaultDictionaryForReferenceSequence(file.toPath()).toFile();
+    }
+
+    /**
+     * Returns the default dictionary name for a FASTA file.
+     *
+     * @param path the reference sequence file path.
+     */
+    public static Path getDefaultDictionaryForReferenceSequence(final Path path) {
+        final String name = path.getFileName().toString();
+        final int extensionIndex = name.length() - getFastaExtension(path).length();
+        return path.resolveSibling(name.substring(0, extensionIndex) + IOUtil.DICT_FILE_EXTENSION);
+    }
 
-        throw new IllegalArgumentException("File is not a supported reference file type: " + path.toAbsolutePath());
+    /**
+     * Returns the FASTA extension for the path.
+     *
+     * @param path the reference sequence file path.
+     *
+     * @throws IllegalArgumentException if the file is not a supported reference file.
+     */
+    public static String getFastaExtension(final Path path) {
+        final String name = path.getFileName().toString();
+        return FASTA_EXTENSIONS.stream().filter(name::endsWith).findFirst()
+                .orElseGet(() -> {throw new IllegalArgumentException("File is not a supported reference file type: " + path.toAbsolutePath());});
     }
+
+    /**
+     * Returns the index name for a FASTA file.
+     *
+     * @param fastaFile the reference sequence file path.
+     */
+    public static Path getFastaIndexFileName(Path fastaFile) {
+        return fastaFile.resolveSibling(fastaFile.getFileName() + ".fai");
+    }
+
 }
diff --git a/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileWalker.java b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileWalker.java
index d66f0f8..936f14c 100644
--- a/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileWalker.java
+++ b/src/main/java/htsjdk/samtools/reference/ReferenceSequenceFileWalker.java
@@ -1,7 +1,7 @@
 /*
  * The MIT License
  *
- * Copyright (c) 2009 The Broad Institute
+ * Copyright (c) 2009-2016 The Broad Institute
  *
  * Permission is hereby granted, free of charge, to any person obtaining a copy
  * of this software and associated documentation files (the "Software"), to deal
@@ -83,7 +83,7 @@ public class ReferenceSequenceFileWalker implements Closeable {
         }
         referenceSequence = null;
 
-        if(referenceSequenceFile.isIndexed()) {
+        if(referenceSequenceFile.isIndexed() && referenceSequenceFile.getSequenceDictionary() != null) {
             final SAMSequenceRecord samSequenceRecord = referenceSequenceFile.getSequenceDictionary().getSequence(sequenceIndex);
             if(samSequenceRecord != null) {
                 referenceSequence = referenceSequenceFile.getSequence(samSequenceRecord.getSequenceName()) ;
@@ -105,6 +105,7 @@ public class ReferenceSequenceFileWalker implements Closeable {
         return referenceSequenceFile.getSequenceDictionary();
     }
 
+    @Override
     public void close() throws IOException {
         referenceSequenceFile.close();
     }
diff --git a/src/main/java/htsjdk/samtools/seekablestream/ByteArraySeekableStream.java b/src/main/java/htsjdk/samtools/seekablestream/ByteArraySeekableStream.java
index 4f8c322..bb3b95a 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/ByteArraySeekableStream.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/ByteArraySeekableStream.java
@@ -1,7 +1,7 @@
 /*
  * The MIT License
  *
- * Copyright (c) 2016 The Broad Institute
+ * Copyright (c) 2015 The Broad Institute
  *
  * Permission is hereby granted, free of charge, to any person obtaining a copy
  * of this software and associated documentation files (the "Software"), to deal
@@ -24,13 +24,11 @@
 
 package htsjdk.samtools.seekablestream;
 
-import htsjdk.samtools.seekablestream.SeekableStream;
-
 import java.io.IOException;
 
 /**
-* Created by vadim on 23/03/2015.
-*/
+ * Created by vadim on 23/03/2015.
+ */
 public class ByteArraySeekableStream extends SeekableStream {
     private byte[] bytes;
     private long position = 0;
@@ -51,21 +49,27 @@ public class ByteArraySeekableStream extends SeekableStream {
 
     @Override
     public void seek(long position) throws IOException {
-        this.position = position;
+        if (position < 0) {
+            throw new IllegalArgumentException("Cannot seek to a negative position, position=" + position + ".");
+        } else {
+            this.position = position;
+        }
     }
 
     @Override
     public int read() throws IOException {
-        if (position < bytes.length)
+        if (position < bytes.length) {
             return 0xFF & bytes[((int) position++)];
-        else return -1;
+        } else {
+            return -1;
+        }
     }
 
     @Override
     public int read(byte[] b, int off, int len) throws IOException {
         if (b == null) {
             throw new NullPointerException();
-        } else if (off < 0 || len < 0 || len > b.length - off) {
+        } else if (off < 0 || len < 0 || len + off > b.length) {
             throw new IndexOutOfBoundsException();
         }
         if (position >= bytes.length) {
@@ -85,6 +89,7 @@ public class ByteArraySeekableStream extends SeekableStream {
     @Override
     public void close() throws IOException {
         bytes = null;
+        position = -1;
     }
 
     @Override
diff --git a/src/main/java/htsjdk/samtools/seekablestream/ISeekableStreamFactory.java b/src/main/java/htsjdk/samtools/seekablestream/ISeekableStreamFactory.java
index dff28b0..71807cf 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/ISeekableStreamFactory.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/ISeekableStreamFactory.java
@@ -2,6 +2,8 @@ package htsjdk.samtools.seekablestream;
 
 import java.io.IOException;
 import java.net.URL;
+import java.nio.channels.SeekableByteChannel;
+import java.util.function.Function;
 
 /**
  * Factory for creating {@link SeekableStream}s based on URLs/paths.
@@ -30,4 +32,24 @@ public interface ISeekableStreamFactory {
      * @return
      */
     public SeekableStream getBufferedStream(SeekableStream stream, int bufferSize);
+
+    /**
+     * Open a stream from the input path, applying the wrapper to the stream.
+     *
+     * The wrapper allows applying operations directly to the byte stream so that things like caching, prefetching, or decryption
+     * can be done at the raw byte level.
+     *
+     * The default implementation throws if wrapper != null, but implementations may support this wrapping operation
+     *
+     * @param path    a uri like String representing a resource to open
+     * @param wrapper a wrapper to apply to the stream
+     * @return a stream opened path
+     */
+    default SeekableStream getStreamFor(String path, Function<SeekableByteChannel, SeekableByteChannel> wrapper) throws IOException {
+        if(wrapper != null) {
+            throw new UnsupportedOperationException("This factory doesn't support adding wrappers");
+        } else {
+            return this.getStreamFor(path);
+        }
+    }
 }
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekableBufferedStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableBufferedStream.java
index 56b4d0c..0c89b01 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/SeekableBufferedStream.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekableBufferedStream.java
@@ -67,6 +67,7 @@ public class SeekableBufferedStream extends SeekableStream {
         this(stream, DEFAULT_BUFFER_SIZE);
     }
 
+    @Override
     public long length() {
         return wrappedStream.length();
     }
@@ -84,18 +85,21 @@ public class SeekableBufferedStream extends SeekableStream {
         }
     }
 
+    @Override
     public void seek(final long position) throws IOException {
         this.position = position;
         wrappedStream.seek(position);
         bufferedStream = new ExtBufferedInputStream(wrappedStream, bufferSize);
     }
 
+    @Override
     public int read() throws IOException {
         int b = bufferedStream.read();
         position++;
         return b;
     }
 
+    @Override
     public int read(final byte[] buffer, final int offset, final int length) throws IOException {
         int nBytesRead = bufferedStream.read(buffer, offset, length);
         if (nBytesRead > 0) {
@@ -112,10 +116,12 @@ public class SeekableBufferedStream extends SeekableStream {
         return nBytesRead;
     }
 
+    @Override
     public void close() throws IOException {
         wrappedStream.close();
     }
 
+    @Override
     public boolean eof() throws IOException {
         return position >= wrappedStream.length();
     }
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekableFTPStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableFTPStream.java
index 0a64a7c..1723747 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/SeekableFTPStream.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekableFTPStream.java
@@ -39,10 +39,12 @@ public class SeekableFTPStream extends SeekableStream {
         helper = new SeekableFTPStreamHelper(url, userPasswordInput);
     }
 
+    @Override
     public void seek(long position) {
         helper.seek(position);
     }
 
+    @Override
     public long position() {
         return helper.position();
     }
@@ -75,10 +77,12 @@ public class SeekableFTPStream extends SeekableStream {
     }
 
 
+    @Override
     public void close() throws IOException {
         helper.close();
     }
 
+    @Override
     public int read() throws IOException {
         return helper.read();
     }
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekableFileStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableFileStream.java
index 38191d7..b790732 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/SeekableFileStream.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekableFileStream.java
@@ -48,18 +48,22 @@ public class SeekableFileStream extends SeekableStream {
         allInstances.add(this);
     }
 
+    @Override
     public long length() {
         return file.length();
     }
 
+    @Override
     public boolean eof() throws IOException {
         return fis.length() == fis.getFilePointer();
     }
 
+    @Override
     public void seek(final long position) throws IOException {
         fis.seek(position);
     }
 
+    @Override
     public long position() throws IOException {
         return fis.getChannel().position();
     }
@@ -71,6 +75,7 @@ public class SeekableFileStream extends SeekableStream {
         return position() - initPos;
     }
     
+    @Override
     public int read(final byte[] buffer, final int offset, final int length) throws IOException {
         if (length < 0) {
             throw new IndexOutOfBoundsException();
@@ -91,6 +96,7 @@ public class SeekableFileStream extends SeekableStream {
 
     }
 
+    @Override
     public int read() throws IOException {
         return fis.read();  
     }
@@ -106,6 +112,7 @@ public class SeekableFileStream extends SeekableStream {
     }
 
 
+    @Override
     public void close() throws IOException {
         allInstances.remove(this);
         fis.close();
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekableHTTPStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableHTTPStream.java
index 4a864b7..640a14d 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/SeekableHTTPStream.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekableHTTPStream.java
@@ -67,10 +67,12 @@ public class SeekableHTTPStream extends SeekableStream {
 
     }
 
+    @Override
     public long position() {
         return position;
     }
 
+    @Override
     public long length() {
         return contentLength;
     }
@@ -82,14 +84,17 @@ public class SeekableHTTPStream extends SeekableStream {
         return bytesToSkip;
     }
 
+    @Override
     public boolean eof() throws IOException {
         return contentLength > 0 && position >= contentLength;
     }
 
+    @Override
     public void seek(final long position) {
         this.position = position;
     }
 
+    @Override
     public int read(byte[] buffer, int offset, int len) throws IOException {
 
         if (offset < 0 || len < 0 || (offset + len) > buffer.length) {
@@ -168,11 +173,13 @@ public class SeekableHTTPStream extends SeekableStream {
     }
 
 
+    @Override
     public void close() throws IOException {
         // Nothing to do
     }
 
 
+    @Override
     public int read() throws IOException {
     	byte []tmp=new byte[1];
     	read(tmp,0,1);
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekablePathStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekablePathStream.java
index 18a41e7..cb88512 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/SeekablePathStream.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekablePathStream.java
@@ -9,6 +9,7 @@ import java.nio.file.Path;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
+import java.util.function.Function;
 
 /**
  * An implementation of {@link SeekableStream} for {@link Path}.
@@ -29,8 +30,16 @@ public class SeekablePathStream extends SeekableStream {
     private final ByteBuffer oneByteBuf = ByteBuffer.allocate(1);
 
     public SeekablePathStream(final Path path) throws IOException {
+        this(path, null);
+    }
+
+    public SeekablePathStream(final Path path, Function<SeekableByteChannel, SeekableByteChannel> wrapper) throws IOException {
         this.path = path;
-        this.sbc = Files.newByteChannel(path);
+        if (null==wrapper) {
+            this.sbc = Files.newByteChannel(path);
+        } else {
+            this.sbc = wrapper.apply(Files.newByteChannel(path));
+        }
         ALL_INSTANCES.add(this);
     }
 
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekableStream.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableStream.java
index 673f08c..45f6990 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/SeekableStream.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekableStream.java
@@ -35,8 +35,10 @@ public abstract class SeekableStream extends InputStream {
 
     public abstract void seek(long position) throws IOException;
 
+    @Override
     public abstract int read(byte[] buffer, int offset, int length) throws IOException;
 
+    @Override
     public abstract void close() throws IOException;
 
     public abstract boolean eof() throws IOException;
diff --git a/src/main/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java b/src/main/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java
index fe8f42a..19d40e5 100644
--- a/src/main/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java
+++ b/src/main/java/htsjdk/samtools/seekablestream/SeekableStreamFactory.java
@@ -23,9 +23,12 @@
  */
 package htsjdk.samtools.seekablestream;
 
+import htsjdk.samtools.util.IOUtil;
 import java.io.File;
 import java.io.IOException;
 import java.net.URL;
+import java.nio.channels.SeekableByteChannel;
+import java.util.function.Function;
 
 /**
  * Singleton class for getting {@link SeekableStream}s from URL/paths
@@ -64,11 +67,27 @@ public class SeekableStreamFactory{
 
     private static class DefaultSeekableStreamFactory implements ISeekableStreamFactory {
 
+        @Override
         public SeekableStream getStreamFor(final URL url) throws IOException {
             return getStreamFor(url.toExternalForm());
         }
 
+        @Override
         public SeekableStream getStreamFor(final String path) throws IOException {
+            return getStreamFor(path, null);
+        }
+
+        /**
+         * The wrapper will only be applied to the stream if the stream is treated as a {@link java.nio.file.Path}
+         *
+         * This currently means any uri with a scheme that is not http, https, ftp, or file will have the wrapper applied to it
+         *
+         * @param path    a uri like String representing a resource to open
+         * @param wrapper a wrapper to apply to the stream allowing direct transformations on the byte stream to be applied
+         */
+        @Override
+        public SeekableStream getStreamFor(final String path,
+                                           Function<SeekableByteChannel, SeekableByteChannel> wrapper) throws IOException {
             // todo -- add support for SeekableBlockInputStream
 
             if (path.startsWith("http:") || path.startsWith("https:")) {
@@ -78,15 +97,19 @@ public class SeekableStreamFactory{
                 return new SeekableFTPStream(new URL(path));
             } else if (path.startsWith("file:")) {
                 return new SeekableFileStream(new File(new URL(path).getPath()));
+            } else if (IOUtil.hasScheme(path)) {
+                return new SeekablePathStream(IOUtil.getPath(path), wrapper);
             } else {
                 return new SeekableFileStream(new File(path));
             }
         }
 
+        @Override
         public SeekableStream getBufferedStream(SeekableStream stream){
             return getBufferedStream(stream, SeekableBufferedStream.DEFAULT_BUFFER_SIZE);
         }
 
+        @Override
         public SeekableStream getBufferedStream(SeekableStream stream, int bufferSize){
             if (bufferSize == 0) return stream;
             else return new SeekableBufferedStream(stream, bufferSize);
diff --git a/src/main/java/htsjdk/samtools/sra/SRALazyRecord.java b/src/main/java/htsjdk/samtools/sra/SRALazyRecord.java
index 4391857..c506711 100644
--- a/src/main/java/htsjdk/samtools/sra/SRALazyRecord.java
+++ b/src/main/java/htsjdk/samtools/sra/SRALazyRecord.java
@@ -675,6 +675,7 @@ public class SRALazyRecord extends SAMRecord {
         return super.getBinaryAttributes();
     }
 
+    @Override
     public boolean isUnsignedArrayAttribute(final String tag) {
         Short binaryTag = SAMTagUtil.getSingleton().makeBinaryTag(tag);
         LazyAttribute attr = lazyAttributeTags.get(binaryTag);
diff --git a/src/main/java/htsjdk/samtools/util/AbstractAsyncWriter.java b/src/main/java/htsjdk/samtools/util/AbstractAsyncWriter.java
index ef1803b..b7c6c54 100644
--- a/src/main/java/htsjdk/samtools/util/AbstractAsyncWriter.java
+++ b/src/main/java/htsjdk/samtools/util/AbstractAsyncWriter.java
@@ -66,12 +66,12 @@ public abstract class AbstractAsyncWriter<T> implements Closeable {
      * Attempts to finish draining the queue and then calls synchronouslyClose() to allow implementation
      * to do any one time clean up.
      */
+    @Override
     public void close() {
         checkAndRethrow();
 
         if (!this.isClosed.getAndSet(true)) {
             try {
-                if (this.queue.isEmpty()) this.writer.interrupt(); // signal to writer clean up
             	this.writer.join();
             } catch (final InterruptedException ie) {
             	throw new RuntimeException("Interrupted waiting on writer thread.", ie);
@@ -110,6 +110,7 @@ public abstract class AbstractAsyncWriter<T> implements Closeable {
      * synchronous writer.
      */
     private class WriterRunnable implements Runnable {
+        @Override
         public void run() {
             try {
                 //The order of the two conditions is important, see https://github.com/samtools/htsjdk/issues/564
@@ -118,7 +119,7 @@ public abstract class AbstractAsyncWriter<T> implements Closeable {
                 //the two operations are effectively atomic if isClosed returns true
                 while (!isClosed.get() || !queue.isEmpty()) {
                     try {
-                        final T item = queue.poll(2, TimeUnit.SECONDS);
+                        final T item = queue.poll(50, TimeUnit.MILLISECONDS);
                         if (item != null) synchronouslyWrite(item);
                     }
                     catch (final InterruptedException ie) {
diff --git a/src/main/java/htsjdk/samtools/util/AbstractLocusInfo.java b/src/main/java/htsjdk/samtools/util/AbstractLocusInfo.java
index 4e02007..d699dce 100644
--- a/src/main/java/htsjdk/samtools/util/AbstractLocusInfo.java
+++ b/src/main/java/htsjdk/samtools/util/AbstractLocusInfo.java
@@ -83,6 +83,7 @@ public class AbstractLocusInfo<E extends AbstractRecordAndOffset> implements Loc
     /**
      * @return the index of reference sequence
      */
+    @Override
     public int getSequenceIndex() {
         return referenceSequence.getSequenceIndex();
     }
@@ -90,6 +91,7 @@ public class AbstractLocusInfo<E extends AbstractRecordAndOffset> implements Loc
     /**
      * @return 1-based reference position
      */
+    @Override
     public int getPosition() {
         return position;
     }
diff --git a/src/main/java/htsjdk/samtools/util/AbstractLocusIterator.java b/src/main/java/htsjdk/samtools/util/AbstractLocusIterator.java
index 6ff8e83..e350874 100644
--- a/src/main/java/htsjdk/samtools/util/AbstractLocusIterator.java
+++ b/src/main/java/htsjdk/samtools/util/AbstractLocusIterator.java
@@ -182,6 +182,7 @@ public abstract class AbstractLocusIterator<T extends AbstractRecordAndOffset, K
      * @return iterator over all/all covered locus position in reference according to <code>emitUncoveredLoci</code>
      * value.
      */
+    @Override
     public Iterator<K> iterator() {
         if (samIterator != null) {
             throw new IllegalStateException("Cannot call iterator() more than once on " + this.getClass().getSimpleName());
@@ -202,6 +203,7 @@ public abstract class AbstractLocusIterator<T extends AbstractRecordAndOffset, K
     /**
      * Closes inner <code>SamIterator</>.
      */
+    @Override
     public void close() {
         this.samIterator.close();
     }
@@ -216,6 +218,7 @@ public abstract class AbstractLocusIterator<T extends AbstractRecordAndOffset, K
      * 2) there are AbstractLocusInfo<T>s in some stage of accumulation
      * 3) there are loci in the target mask that have yet to be accumulated (even if there are no reads covering them)
      */
+    @Override
     public boolean hasNext() {
         if (this.samIterator == null) {
             iterator();
@@ -253,6 +256,7 @@ public abstract class AbstractLocusIterator<T extends AbstractRecordAndOffset, K
      *
      * @return information about next locus position in reference sequence
      */
+    @Override
     public K next() {
         // if we don't have any completed entries to return, try and make some!
         while (complete.isEmpty() && samHasMore()) {
@@ -475,6 +479,7 @@ public abstract class AbstractLocusIterator<T extends AbstractRecordAndOffset, K
         return samReader.getFileHeader().getSequence(referenceSequenceIndex);
     }
 
+    @Override
     public void remove() {
         throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
     }
diff --git a/src/main/java/htsjdk/samtools/util/AbstractRecordAndOffset.java b/src/main/java/htsjdk/samtools/util/AbstractRecordAndOffset.java
index 28b9d34..e76b666 100644
--- a/src/main/java/htsjdk/samtools/util/AbstractRecordAndOffset.java
+++ b/src/main/java/htsjdk/samtools/util/AbstractRecordAndOffset.java
@@ -49,16 +49,6 @@ public class AbstractRecordAndOffset {
     /**
      * @param record inner SAMRecord
      * @param offset from the start of the read
-     * @param length of alignment block
-     * @param refPos corresponding to read offset reference position
-     */
-    public AbstractRecordAndOffset(final SAMRecord record, final int offset, int length, int refPos) {
-        this(record, offset);
-    }
-
-    /**
-     * @param record inner SAMRecord
-     * @param offset from the start of the read
      */
     public AbstractRecordAndOffset(final SAMRecord record, final int offset) {
         this.offset = offset;
@@ -94,13 +84,6 @@ public class AbstractRecordAndOffset {
     }
 
     /**
-     * @return the position in reference sequence, to which the start of alignment block is aligned.
-     */
-    public int getRefPos() {
-        return -1;
-    }
-
-    /**
      * @return read name of inner SAMRecord.
      */
     public String getReadName() {
diff --git a/src/main/java/htsjdk/samtools/util/AsciiWriter.java b/src/main/java/htsjdk/samtools/util/AsciiWriter.java
index 00c6f7f..50b08d8 100644
--- a/src/main/java/htsjdk/samtools/util/AsciiWriter.java
+++ b/src/main/java/htsjdk/samtools/util/AsciiWriter.java
@@ -50,6 +50,7 @@ public class AsciiWriter extends Writer {
     /**
      * flushes and closes underlying OutputStream.
      */
+    @Override
     public void close() throws IOException {
         flush();
         os.close();
@@ -58,6 +59,7 @@ public class AsciiWriter extends Writer {
     /**
      * flushes underlying OutputStream
      */
+    @Override
     public void flush() throws IOException {
         os.write(buffer, 0, numBytes);
         numBytes = 0;
@@ -67,6 +69,7 @@ public class AsciiWriter extends Writer {
     /**
      * All other Writer methods vector through this, so this is the only one that must be overridden.
      */
+    @Override
     public void write(final char[] chars, int offset, int length) throws IOException {
         while (length > 0) {
             final int charsToConvert = Math.min(length, buffer.length - numBytes);
diff --git a/src/main/java/htsjdk/samtools/util/AsyncBlockCompressedInputStream.java b/src/main/java/htsjdk/samtools/util/AsyncBlockCompressedInputStream.java
new file mode 100644
index 0000000..4f71ef5
--- /dev/null
+++ b/src/main/java/htsjdk/samtools/util/AsyncBlockCompressedInputStream.java
@@ -0,0 +1,234 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 Daniel Cameron
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+
+import htsjdk.samtools.Defaults;
+import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.zip.InflaterFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.ThreadFactory;
+
+/**
+ * Asynchronous read-ahead implementation of {@link htsjdk.samtools.util.BlockCompressedInputStream}.   
+ * 
+ * Note that this implementation is not synchronized. If multiple threads access an instance concurrently, it must be synchronized externally. 
+ */
+public class AsyncBlockCompressedInputStream extends BlockCompressedInputStream {
+    private static final int READ_AHEAD_BUFFERS = (int)Math.ceil(Defaults.NON_ZERO_BUFFER_SIZE / BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE);
+    private static final Executor threadpool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(),new ThreadFactory() {
+            @Override
+            public Thread newThread(Runnable r) {
+                Thread t = Executors.defaultThreadFactory().newThread(r);
+                t.setDaemon(true);
+                return t;
+            }
+        });
+    /**
+     * Next blocks (in stream order) that have already been decompressed. 
+     */
+    private final BlockingQueue<DecompressedBlock> mResult = new ArrayBlockingQueue<>(READ_AHEAD_BUFFERS);
+    /**
+     * Buffers used to decompress previous blocks that are no longer in use.
+     * These buffers are reused if possible.
+     * Note that no blocking occurs on this buffer and a blocking queue is used purely
+     * because it is a base library synchronized queue implementation
+     * (and Collections.synchronizedQueue() does not exist).
+     */
+    private final BlockingQueue<byte[]> freeBuffers = new ArrayBlockingQueue<>(READ_AHEAD_BUFFERS);
+    /**
+     * Indicates whether a read-ahead task has been scheduled to run. Only one read-ahead task
+     * per stream can be scheduled at any one time.
+     */
+    private final Semaphore running = new Semaphore(1);
+    /**
+     * Indicates whether any scheduled task should abort processing and terminate
+     * as soon as possible since the result will be discarded anyway.
+     */
+    private volatile boolean mAbort = false;
+
+    public AsyncBlockCompressedInputStream(final InputStream stream) {
+        super(stream, true);
+    }
+
+    public AsyncBlockCompressedInputStream(final InputStream stream, InflaterFactory inflaterFactory) {
+        super(stream, true, inflaterFactory);
+    }
+
+    public AsyncBlockCompressedInputStream(final File file)
+        throws IOException {
+        super(file);
+    }
+
+    public AsyncBlockCompressedInputStream(final File file, InflaterFactory inflaterFactory)
+            throws IOException {
+        super(file, inflaterFactory);
+    }
+
+    public AsyncBlockCompressedInputStream(final URL url) {
+        super(url);
+    }
+
+    public AsyncBlockCompressedInputStream(final URL url, InflaterFactory inflaterFactory) {
+        super(url, inflaterFactory);
+    }
+
+    public AsyncBlockCompressedInputStream(final SeekableStream strm) {
+        super(strm);
+    }
+
+    public AsyncBlockCompressedInputStream(final SeekableStream strm, InflaterFactory inflaterFactory) {
+        super(strm, inflaterFactory);
+    }
+
+    @Override
+    protected DecompressedBlock nextBlock(byte[] bufferAvailableForReuse) {
+        if (bufferAvailableForReuse != null) {
+            freeBuffers.offer(bufferAvailableForReuse);
+        }
+        return nextBlockSync();
+    }
+    
+    @Override
+    protected void prepareForSeek() {
+        flushReadAhead();
+        super.prepareForSeek();
+    }
+
+    @Override
+    public void close() throws IOException {
+        // Suppress interrupts while we close.
+        final boolean isInterrupted = Thread.interrupted();
+        mAbort = true;
+        try {
+            flushReadAhead();
+            super.close();
+        } finally {
+            if (isInterrupted) Thread.currentThread().interrupt();
+        }
+    }
+    /**
+     * Foreground thread blocking operation that aborts all read-ahead tasks
+     * and flushes all read-ahead results.
+     */
+    private void flushReadAhead() {
+        final boolean abortStatus = mAbort;
+        mAbort = true;
+        try {
+            // block until the thread pool operation has completed
+            running.acquire();
+        } catch (InterruptedException e) {
+            throw new RuntimeException("Interrupted waiting for decompression thread", e);
+        }
+        // flush any read-ahead results
+        mResult.clear();
+        mAbort = abortStatus;
+        running.release();
+    }
+    /**
+     * Ensures that a read-ahead task for this stream exists in the thread pool.
+     */
+    private void ensureReadAhead() {
+        if (running.tryAcquire()) {
+            tryQueueTask();
+        }
+    }
+    /**
+     * Try to queue another read-ahead buffer
+     * This method should only be invoked by the owner of the running semaphore
+     */
+    private void tryQueueTask() {
+        if (mAbort) {
+            // Potential deadlock between getNextBlock() and flushReadAhead() here
+            // This requires seek()/close() and another method to be called
+            // at the same time. Since the parent class is not thread-safe
+            // this is an acceptable behavior.
+            running.release();
+            return;
+        }
+        if (mResult.remainingCapacity() == 0) {
+            // read-ahead has already filled the results buffer
+            running.release();
+            if (mResult.remainingCapacity() > 0) {
+                // race condition this second check fixes:
+                // - worker thread context switch after checking remaining capacity is zero
+                // - foreground thread calls getNextBlock() repeatedly until blocking
+                // - worker thread switches back in and releases mutex
+                // = foreground blocking on mResult.take(), mutex free, no worker
+                // -> try to take back mutex and start worker
+                // if that fails, the someone else took the lock and would
+                // have started the background worker. (except if flushReadAhead()
+                // took the lock with getNextBlock() still blocking: not thread-safe
+                // so we don't care)
+                ensureReadAhead();
+                return;
+            } else {
+                return;
+            }
+        }
+        // we are able to perform a read-ahead operation
+        // ownership of the running mutex is now with the threadpool task
+        threadpool.execute(new AsyncBlockCompressedInputStreamRunnable());
+    }
+    /**
+     * Foreground thread blocking operation that retrieves the next read-ahead buffer.
+     * Lazy initiation of read-ahead is performed if required.
+     * @return next decompressed block in input stream 
+     */
+    private DecompressedBlock nextBlockSync() {
+        ensureReadAhead();
+        DecompressedBlock nextBlock;
+        try {
+            nextBlock = mResult.take();
+        } catch (InterruptedException e) {
+            return new DecompressedBlock(0, 0, e);
+        }
+        ensureReadAhead();
+        return nextBlock;
+    }
+    private class AsyncBlockCompressedInputStreamRunnable implements Runnable {
+        /**
+         * Thread pool operation that fills the read-ahead queue
+         */
+        @Override
+        public void run() {
+            final DecompressedBlock decompressed = processNextBlock(freeBuffers.poll());
+            if (!mResult.offer(decompressed)) {
+                // offer should never block since we never queue a task when the results buffer is full
+                running.release(); // safety release to ensure foreground close() does not block indefinitely
+                throw new IllegalStateException("Decompression buffer full");
+            }
+            tryQueueTask();
+        }
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/util/BinaryCodec.java b/src/main/java/htsjdk/samtools/util/BinaryCodec.java
index 8933ee3..898f50e 100644
--- a/src/main/java/htsjdk/samtools/util/BinaryCodec.java
+++ b/src/main/java/htsjdk/samtools/util/BinaryCodec.java
@@ -35,6 +35,8 @@ import java.io.OutputStream;
 import java.io.SyncFailedException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.file.Files;
+import java.nio.file.Path;
 
 /**
  * Encapsulates file representation of various primitive data types.  Forces little-endian disk representation.
@@ -91,29 +93,41 @@ public class BinaryCodec implements Closeable {
     //////////////////////////////////////////////////
 
     /**
-     * Constructs BinaryCodec from a file and set it's mode to writing or not
+     * Constructs BinaryCodec from a file and set its mode to writing or not
      *
-     * @param file    file to be written to or read from
+     * @param path    file to be written to or read from
      * @param writing whether the file is being written to
      */
-    public BinaryCodec(final File file, final boolean writing) {
+    public BinaryCodec(final Path path, final boolean writing) {
         this();
         try {
             this.isWriting = writing;
             if (this.isWriting) {
-                this.outputStream = IOUtil.maybeBufferOutputStream(new FileOutputStream(file));
-                this.outputFileName = file.getName();
+                this.outputStream = IOUtil.maybeBufferOutputStream(Files.newOutputStream(path));
+                this.outputFileName = path.getFileName().toString();
             } else {
-                this.inputStream = IOUtil.maybeBufferInputStream(new FileInputStream(file));
-                this.inputFileName = file.getName();
+                this.inputStream = IOUtil.maybeBufferInputStream(Files.newInputStream(path));
+                this.inputFileName = path.getFileName().toString();
             }
         } catch (FileNotFoundException e) {
-            throw new RuntimeIOException("File not found: " + file, e);
+            throw new RuntimeIOException("File not found: " + path, e);
+        } catch (IOException e) {
+            throw new RuntimeIOException("Error opening: " + path, e);
         }
     }
 
     /**
-     * Constructs BinaryCodec from a file name and set it's mode to writing or not
+     * Constructs BinaryCodec from a file and set its mode to writing or not
+     *
+     * @param file    file to be written to or read from
+     * @param writing whether the file is being written to
+     */
+    public BinaryCodec(final File file, final boolean writing) {
+        this(null == file ? null : file.toPath(), writing);
+    }
+
+    /**
+     * Constructs BinaryCodec from a file name and set its mode to writing or not
      *
      * @param fileName name of the file to be written to or read from
      * @param writing  writing whether the file is being written to
@@ -587,6 +601,7 @@ public class BinaryCodec implements Closeable {
     /**
      * Close the appropriate stream
      */
+    @Override
     public void close() {
         try {
             if (this.isWriting) {
diff --git a/src/main/java/htsjdk/samtools/util/BlockCompressedInputStream.java b/src/main/java/htsjdk/samtools/util/BlockCompressedInputStream.java
index b0ac001..622ca67 100755
--- a/src/main/java/htsjdk/samtools/util/BlockCompressedInputStream.java
+++ b/src/main/java/htsjdk/samtools/util/BlockCompressedInputStream.java
@@ -30,22 +30,25 @@ import htsjdk.samtools.seekablestream.SeekableBufferedStream;
 import htsjdk.samtools.seekablestream.SeekableFileStream;
 import htsjdk.samtools.seekablestream.SeekableHTTPStream;
 import htsjdk.samtools.seekablestream.SeekableStream;
+import htsjdk.samtools.util.zip.InflaterFactory;
 
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.RandomAccessFile;
+import java.io.*;
 import java.net.URL;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardOpenOption;
 import java.util.Arrays;
 
-/*
+/**
  * Utility class for reading BGZF block compressed files.  The caller can treat this file like any other InputStream.
  * It probably is not necessary to wrap this stream in a buffering stream, because there is internal buffering.
  * The advantage of BGZF over conventional GZip format is that BGZF allows for seeking without having to read the
- * entire file up to the location being sought.  Note that seeking is only possible if the ctor(File) is used.
+ * entire file up to the location being sought.  Note that seeking is only possible if the input stream is seekable.
+ * 
+ * Note that this implementation is not synchronized. If multiple threads access an instance concurrently, it must be synchronized externally. 
  *
  * c.f. http://samtools.sourceforge.net/SAM1.pdf for details of BGZF format
  */
@@ -54,30 +57,52 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
     public final static String INCORRECT_HEADER_SIZE_MSG = "Incorrect header size for file: ";
     public final static String UNEXPECTED_BLOCK_LENGTH_MSG = "Unexpected compressed block length: ";
     public final static String PREMATURE_END_MSG = "Premature end of file: ";
-    public final static String CANNOT_SEEK_STREAM_MSG = "Cannot seek on stream based file ";
+    public final static String CANNOT_SEEK_STREAM_MSG = "Cannot seek a position for a non-file stream";
+    public final static String CANNOT_SEEK_CLOSED_STREAM_MSG = "Cannot seek a position for a closed stream";
     public final static String INVALID_FILE_PTR_MSG = "Invalid file pointer: ";
 
     private InputStream mStream = null;
+    private boolean mIsClosed = false;
     private SeekableStream mFile = null;
     private byte[] mFileBuffer = null;
-    private byte[] mCurrentBlock = null;
+    private DecompressedBlock mCurrentBlock = null;
     private int mCurrentOffset = 0;
-    private long mBlockAddress = 0;
-    private int mLastBlockLength = 0;
-    private final BlockGunzipper blockGunzipper = new BlockGunzipper();
-
+    private long mStreamOffset = 0;
+    private final BlockGunzipper blockGunzipper;
 
     /**
      * Note that seek() is not supported if this ctor is used.
+     * @param stream source of bytes
      */
     public BlockCompressedInputStream(final InputStream stream) {
-        this(stream, true);
+        this(stream, true, BlockGunzipper.getDefaultInflaterFactory());
+    }
+
+    /**
+     * Note that seek() is not supported if this ctor is used.
+     * @param stream source of bytes
+     * @param inflaterFactory {@link InflaterFactory} used by {@link BlockGunzipper}
+     */
+    public BlockCompressedInputStream(final InputStream stream, final InflaterFactory inflaterFactory) {
+        this(stream, true, inflaterFactory);
     }
 
     /**
      * Note that seek() is not supported if this ctor is used.
+     * @param stream source of bytes
+     * @param allowBuffering if true, allow buffering
      */
     public BlockCompressedInputStream(final InputStream stream, final boolean allowBuffering) {
+        this(stream, allowBuffering, BlockGunzipper.getDefaultInflaterFactory());
+    }
+
+    /**
+     * Note that seek() is not supported if this ctor is used.
+     * @param stream source of bytes
+     * @param allowBuffering if true, allow buffering
+     * @param inflaterFactory {@link InflaterFactory} used by {@link BlockGunzipper}
+     */
+    public BlockCompressedInputStream(final InputStream stream, final boolean allowBuffering, final InflaterFactory inflaterFactory) {
         if (allowBuffering) {
             mStream = IOUtil.toBufferedStream(stream);
         }
@@ -86,30 +111,68 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
         }
 
         mFile = null;
+        blockGunzipper = new BlockGunzipper(inflaterFactory);
     }
 
     /**
      * Use this ctor if you wish to call seek()
+     * @param file source of bytes
+     * @throws IOException
      */
     public BlockCompressedInputStream(final File file) throws IOException {
+        this(file, BlockGunzipper.getDefaultInflaterFactory());
+    }
+
+    /**
+     * Use this ctor if you wish to call seek()
+     * @param file source of bytes
+     * @param inflaterFactory {@link InflaterFactory} used by {@link BlockGunzipper}
+     * @throws IOException
+     */
+    public BlockCompressedInputStream(final File file, final InflaterFactory inflaterFactory) throws IOException {
         mFile = new SeekableFileStream(file);
         mStream = null;
-
+        blockGunzipper = new BlockGunzipper(inflaterFactory);
     }
 
+    /**
+     * @param url source of bytes
+     */
     public BlockCompressedInputStream(final URL url) {
+        this(url, BlockGunzipper.getDefaultInflaterFactory());
+    }
+
+    /**
+     * @param url source of bytes
+     * @param inflaterFactory {@link InflaterFactory} used by {@link BlockGunzipper}
+     */
+    public BlockCompressedInputStream(final URL url, final InflaterFactory inflaterFactory) {
         mFile = new SeekableBufferedStream(new SeekableHTTPStream(url));
         mStream = null;
+        blockGunzipper = new BlockGunzipper(inflaterFactory);
     }
 
     /**
      * For providing some arbitrary data source.  No additional buffering is
      * provided, so if the underlying source is not buffered, wrap it in a
      * SeekableBufferedStream before passing to this ctor.
+     * @param strm source of bytes
      */
     public BlockCompressedInputStream(final SeekableStream strm) {
+        this(strm, BlockGunzipper.getDefaultInflaterFactory());
+    }
+
+    /**
+     * For providing some arbitrary data source.  No additional buffering is
+     * provided, so if the underlying source is not buffered, wrap it in a
+     * SeekableBufferedStream before passing to this ctor.
+     * @param strm source of bytes
+     * @param inflaterFactory {@link InflaterFactory} used by {@link BlockGunzipper}
+     */
+    public BlockCompressedInputStream(final SeekableStream strm, final InflaterFactory inflaterFactory) {
         mFile = strm;
         mStream = null;
+        blockGunzipper = new BlockGunzipper(inflaterFactory);
     }
 
     /**
@@ -127,14 +190,15 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
      * Note that although the next caller can read this many bytes without blocking, the available() method call itself
      * may block in order to fill an internal buffer if it has been exhausted.
      */
+    @Override
     public int available() throws IOException {
-        if (mCurrentBlock == null || mCurrentOffset == mCurrentBlock.length) {
+        if (mCurrentBlock == null || mCurrentOffset == mCurrentBlock.mBlock.length) {
             readBlock();
         }
         if (mCurrentBlock == null) {
             return 0;
         }
-        return mCurrentBlock.length - mCurrentOffset;
+        return mCurrentBlock.mBlock.length - mCurrentOffset;
     }
 
     /**
@@ -142,12 +206,13 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
      * <code>false</code> otherwise.
      */
     public boolean endOfBlock() {
-        return (mCurrentBlock != null && mCurrentOffset == mCurrentBlock.length);
+        return (mCurrentBlock != null && mCurrentOffset == mCurrentBlock.mBlock.length);
     }
 
     /**
      * Closes the underlying InputStream or RandomAccessFile
      */
+    @Override
     public void close() throws IOException {
         if (mFile != null) {
             mFile.close();
@@ -159,6 +224,9 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
         // Encourage garbage collection
         mFileBuffer = null;
         mCurrentBlock = null;
+
+        // Mark as closed
+        mIsClosed = true;
     }
 
     /**
@@ -168,8 +236,9 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
 
      * @return the next byte of data, or -1 if the end of the stream is reached.
      */
+    @Override
     public int read() throws IOException {
-        return (available() > 0) ? (mCurrentBlock[mCurrentOffset++] & 0xFF) : -1;
+        return (available() > 0) ? (mCurrentBlock.mBlock[mCurrentOffset++] & 0xFF) : -1;
     }
 
     /**
@@ -183,6 +252,7 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
      * @return the total number of bytes read into the buffer, or -1 is there is no more data because the end of
      * the stream has been reached.
      */
+    @Override
     public int read(final byte[] buffer) throws IOException {
         return read(buffer, 0, buffer.length);
     }
@@ -199,48 +269,47 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
      *          character, or null if the end of the stream has been reached
      *
      * @exception  IOException  If an I/O error occurs
-     * @
      */
     public String readLine() throws IOException {
-    	int available = available();
+        int available = available();
         if (available == 0) {
             return null;
         }
         if(null == buf){ // lazy initialisation 
-        	buf = new ByteArrayOutputStream(8192);
+            buf = new ByteArrayOutputStream(8192);
         }
         buf.reset();
-    	boolean done = false;
-    	boolean foundCr = false; // \r found flag
+        boolean done = false;
+        boolean foundCr = false; // \r found flag
         while (!done) {
-        	int linetmpPos = mCurrentOffset;
-        	int bCnt = 0;
-        	while((available-- > 0)){
-        		final byte c = mCurrentBlock[linetmpPos++];
-        		if(c == eol){ // found \n
-        			done = true;
-        			break;
-        		} else if(foundCr){  // previous char was \r
-        			--linetmpPos; // current char is not \n so put it back
-        			done = true;
-        			break;
-        		} else if(c == eolCr){ // found \r
-					foundCr = true;
-        			continue; // no ++bCnt
-        		}
-				++bCnt;
-        	}
-        	if(mCurrentOffset < linetmpPos){
-				buf.write(mCurrentBlock, mCurrentOffset, bCnt);
-	        	mCurrentOffset = linetmpPos;
-        	}
-        	available = available();    
-        	if(available == 0){
-        		// EOF
-        		done = true;
-        	}
-        }
-    	return buf.toString();
+            int linetmpPos = mCurrentOffset;
+            int bCnt = 0;
+            while((available-- > 0)){
+                final byte c = mCurrentBlock.mBlock[linetmpPos++];
+                if(c == eol){ // found \n
+                    done = true;
+                    break;
+                } else if(foundCr){  // previous char was \r
+                    --linetmpPos; // current char is not \n so put it back
+                    done = true;
+                    break;
+                } else if(c == eolCr){ // found \r
+                    foundCr = true;
+                    continue; // no ++bCnt
+                }
+                ++bCnt;
+            }
+            if(mCurrentOffset < linetmpPos) {
+                buf.write(mCurrentBlock.mBlock, mCurrentOffset, bCnt);
+                mCurrentOffset = linetmpPos;
+            }
+            available = available();
+            if(available == 0) {
+                // EOF
+                done = true;
+            }
+        }
+        return buf.toString();
     }
 
     /**
@@ -255,6 +324,7 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
      * @return the total number of bytes read into the buffer, or -1 if there is no more data because the end of
      * the stream has been reached.
      */
+    @Override
     public int read(final byte[] buffer, int offset, int length) throws IOException {
         final int originalLength = length;
         while (length > 0) {
@@ -267,7 +337,7 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
                 break;
             }
             final int copyLength = Math.min(length, available);
-            System.arraycopy(mCurrentBlock, mCurrentOffset, buffer, offset, copyLength);
+            System.arraycopy(mCurrentBlock.mBlock, mCurrentOffset, buffer, offset, copyLength);
             mCurrentOffset += copyLength;
             offset += copyLength;
             length -= copyLength;
@@ -279,40 +349,57 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
      * Seek to the given position in the file.  Note that pos is a special virtual file pointer,
      * not an actual byte offset.
      *
-     * @param pos virtual file pointer
+     * @param pos virtual file pointer position
+     * @throws IOException if stream is closed or not a file based stream
      */
     public void seek(final long pos) throws IOException {
+        // Must be before the mFile == null check because mFile == null for closed files and streams
+        if (mIsClosed) {
+            throw new IOException(CANNOT_SEEK_CLOSED_STREAM_MSG);
+        }
+
+        // Cannot seek on streams that are not file based
         if (mFile == null) {
             throw new IOException(CANNOT_SEEK_STREAM_MSG);
         }
+
         // Decode virtual file pointer
-        // Upper 48 bits is the byte offset into the compressed stream of a block.
-        // Lower 16 bits is the byte offset into the uncompressed stream inside the block.
+        // Upper 48 bits is the byte offset into the compressed stream of a
+        // block.
+        // Lower 16 bits is the byte offset into the uncompressed stream inside
+        // the block.
         final long compressedOffset = BlockCompressedFilePointerUtil.getBlockAddress(pos);
         final int uncompressedOffset = BlockCompressedFilePointerUtil.getBlockOffset(pos);
         final int available;
-        if (mBlockAddress == compressedOffset && mCurrentBlock != null) {
-            available = mCurrentBlock.length;
+        if (mCurrentBlock != null && mCurrentBlock.mBlockAddress == compressedOffset) {
+            available = mCurrentBlock.mBlock.length;
         } else {
+            prepareForSeek();
             mFile.seek(compressedOffset);
-            mBlockAddress = compressedOffset;
-            mLastBlockLength = 0;
-            readBlock();
+            mStreamOffset = compressedOffset;
+            mCurrentBlock = nextBlock(getBufferForReuse(mCurrentBlock));
+            mCurrentOffset = 0;
             available = available();
         }
-        if (uncompressedOffset > available ||
-                (uncompressedOffset == available && !eof())) {
+        if (uncompressedOffset > available || (uncompressedOffset == available && !eof())) {
             throw new IOException(INVALID_FILE_PTR_MSG + pos + " for " + mFile.getSource());
         }
         mCurrentOffset = uncompressedOffset;
     }
+    
+    /**
+     * Performs cleanup required before seek is called on the underlying stream 
+     */
+    protected void prepareForSeek() {
+    }
 
     private boolean eof() throws IOException {
         if (mFile.eof()) {
             return true;
         }
         // If the last remaining block is the size of the EMPTY_GZIP_BLOCK, this is the same as being at EOF.
-        return (mFile.length() - (mBlockAddress + mLastBlockLength) == BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length);
+        return (mFile.length() - (mCurrentBlock.mBlockAddress
+                + mCurrentBlock.mBlockCompressedSize) == BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length);
     }
 
     /**
@@ -321,12 +408,17 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
      * the two.
      */
     public long getFilePointer() {
-        if (mCurrentOffset == mCurrentBlock.length) {
-            // If current offset is at the end of the current block, file pointer should point
+        if (mCurrentBlock == null) {
+            // Haven't read anything yet = at start of stream
+            return BlockCompressedFilePointerUtil.makeFilePointer(0, 0);
+        }
+        if (mCurrentOffset > 0 && mCurrentOffset == mCurrentBlock.mBlock.length) {
+            // If current offset is at the end of the current block, file
+            // pointer should point
             // to the beginning of the next block.
-            return BlockCompressedFilePointerUtil.makeFilePointer(mBlockAddress + mLastBlockLength, 0);
+            return BlockCompressedFilePointerUtil.makeFilePointer(mCurrentBlock.mBlockAddress + mCurrentBlock.mBlockCompressedSize, 0);
         }
-        return BlockCompressedFilePointerUtil.makeFilePointer(mBlockAddress, mCurrentOffset);
+        return BlockCompressedFilePointerUtil.makeFilePointer(mCurrentBlock.mBlockAddress, mCurrentOffset);
     }
 
     @Override
@@ -363,49 +455,100 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
     }
 
     private void readBlock() throws IOException {
-
+        mCurrentBlock = nextBlock(getBufferForReuse(mCurrentBlock));
+        mCurrentOffset = 0;
+        checkAndRethrowDecompressionException();
+    }
+    /**
+     * Reads and decompresses the next block
+     * @param bufferAvailableForReuse decompression buffer available for reuse
+     * @return next block in the decompressed stream
+     */
+    protected DecompressedBlock nextBlock(byte[] bufferAvailableForReuse) {
+        return processNextBlock(bufferAvailableForReuse);
+    }
+    /**
+     * Rethrows an exception encountered during decompression
+     * @throws IOException
+     */
+    private void checkAndRethrowDecompressionException() throws IOException {
+        if (mCurrentBlock.mException != null) {
+            if (mCurrentBlock.mException instanceof IOException) {
+                throw (IOException) mCurrentBlock.mException;
+            } else if (mCurrentBlock.mException instanceof RuntimeException) {
+                throw (RuntimeException) mCurrentBlock.mException;
+            } else {
+                throw new RuntimeException(mCurrentBlock.mException);
+            }
+        }
+    }
+    
+    /**
+     * Attempt to reuse the buffer of the given block 
+     * @param block owning block
+     * @return null decompressing buffer to reuse, null if no buffer is available
+     */
+    private byte[] getBufferForReuse(DecompressedBlock block) {
+        if (block == null) return null;
+        return block.mBlock;
+    }
+    
+    /**
+     * Decompress the next block from the input stream. When using asynchronous 
+     * IO, this will be called by the background thread.
+     * @param bufferAvailableForReuse buffer in which to place decompressed block. A null or
+     *  incorrectly sized buffer will result in the buffer being ignored and
+     *  a new buffer allocated for decompression.
+     * @return next block in input stream
+     */
+    protected DecompressedBlock processNextBlock(byte[] bufferAvailableForReuse) {
         if (mFileBuffer == null) {
             mFileBuffer = new byte[BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE];
         }
-        int count = readBytes(mFileBuffer, 0, BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH);
-        if (count == 0) {
-            // Handle case where there is no empty gzip block at end.
-            mCurrentOffset = 0;
-            mBlockAddress += mLastBlockLength;
-            mCurrentBlock = new byte[0];
-            return;
-        }
-        if (count != BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH) {
-            throw new IOException(INCORRECT_HEADER_SIZE_MSG + mFile.getSource());
-        }
-        final int blockLength = unpackInt16(mFileBuffer, BlockCompressedStreamConstants.BLOCK_LENGTH_OFFSET) + 1;
-        if (blockLength < BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH || blockLength > mFileBuffer.length) {
-            throw new IOException(UNEXPECTED_BLOCK_LENGTH_MSG + blockLength + " for " + mFile.getSource());
-        }
-        final int remaining = blockLength - BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH;
-        count = readBytes(mFileBuffer, BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH, remaining);
-        if (count != remaining) {
-            throw new FileTruncatedException(PREMATURE_END_MSG + mFile.getSource());
+        long blockAddress = mStreamOffset;
+        try {
+            final int headerByteCount = readBytes(mFileBuffer, 0, BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH);
+            mStreamOffset += headerByteCount;
+            if (headerByteCount == 0) {
+                // Handle case where there is no empty gzip block at end.
+                return new DecompressedBlock(blockAddress, new byte[0], 0);
+            }
+            if (headerByteCount != BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH) {
+                return new DecompressedBlock(blockAddress, headerByteCount, new IOException(INCORRECT_HEADER_SIZE_MSG + mFile.getSource()));
+            }
+            final int blockLength = unpackInt16(mFileBuffer, BlockCompressedStreamConstants.BLOCK_LENGTH_OFFSET) + 1;
+            if (blockLength < BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH || blockLength > mFileBuffer.length) {
+                return new DecompressedBlock(blockAddress, blockLength,
+                		new IOException(UNEXPECTED_BLOCK_LENGTH_MSG + blockLength + " for " + mFile.getSource()));
+            }
+            final int remaining = blockLength - BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH;
+            final int dataByteCount = readBytes(mFileBuffer, BlockCompressedStreamConstants.BLOCK_HEADER_LENGTH,
+                    remaining);
+            mStreamOffset += dataByteCount;
+            if (dataByteCount != remaining) {
+                return new DecompressedBlock(blockAddress, blockLength,
+                		new FileTruncatedException(PREMATURE_END_MSG + mFile.getSource()));
+            }
+            final byte[] decompressed = inflateBlock(mFileBuffer, blockLength, bufferAvailableForReuse);
+            return new DecompressedBlock(blockAddress, decompressed, blockLength);
+        } catch (IOException e) {
+            return new DecompressedBlock(blockAddress, 0, e);
         }
-        inflateBlock(mFileBuffer, blockLength);
-        mCurrentOffset = 0;
-        mBlockAddress += mLastBlockLength;
-        mLastBlockLength = blockLength;
     }
 
-    private void inflateBlock(final byte[] compressedBlock, final int compressedLength) throws IOException {
-        final int uncompressedLength = unpackInt32(compressedBlock, compressedLength-4);
-        byte[] buffer = mCurrentBlock;
-        mCurrentBlock = null;
-        if (buffer == null || buffer.length != uncompressedLength) {
-            try {
-                buffer = new byte[uncompressedLength];
-            } catch (final NegativeArraySizeException e) {
-                throw new RuntimeIOException(mFile.getSource() + " has invalid uncompressedLength: " + uncompressedLength, e);
-            }
+    private byte[] inflateBlock(final byte[] compressedBlock, final int compressedLength,
+            final byte[] bufferAvailableForReuse) throws IOException {
+        final int uncompressedLength = unpackInt32(compressedBlock, compressedLength - 4);
+        if (uncompressedLength < 0) {
+        	throw new RuntimeIOException(mFile.getSource() + " has invalid uncompressedLength: " + uncompressedLength);
+        }
+        byte[] buffer = bufferAvailableForReuse;
+        if (buffer == null || uncompressedLength != buffer.length) {
+        	// can't reuse the buffer since the size is incorrect
+            buffer = new byte[uncompressedLength];
         }
         blockGunzipper.unzipBlock(buffer, compressedBlock, compressedLength);
-        mCurrentBlock = buffer;
+        return buffer;
     }
 
     private int readBytes(final byte[] buffer, final int offset, final int length) throws IOException {
@@ -456,41 +599,98 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
 
     public enum FileTermination {HAS_TERMINATOR_BLOCK, HAS_HEALTHY_LAST_BLOCK, DEFECTIVE}
 
+    /**
+     *
+     * @param file the file to check
+     * @return status of the last compressed block
+     * @throws IOException
+     */
     public static FileTermination checkTermination(final File file) throws IOException {
-        final long fileSize = file.length();
+        return checkTermination(file == null ? null : file.toPath());
+    }
+
+    /**
+     *
+     * @param path to the file to check
+     * @return status of the last compressed block
+     * @throws IOException
+     */
+    public static FileTermination checkTermination(final Path path) throws IOException {
+        try( final SeekableByteChannel channel = Files.newByteChannel(path, StandardOpenOption.READ) ){
+            return checkTermination(channel);
+        }
+    }
+
+    /**
+     * check the status of the final bzgipped block for the given bgzipped resource
+     *
+     * @param channel an open channel to read from,
+     * the channel will remain open and the initial position will be restored when the operation completes
+     * this makes no guarantee about the state of the channel if an exception is thrown during reading
+     *
+     * @return the status of the last compressed black
+     * @throws IOException
+     */
+    public static FileTermination checkTermination(SeekableByteChannel channel) throws IOException {
+        final long fileSize = channel.size();
         if (fileSize < BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length) {
             return FileTermination.DEFECTIVE;
         }
-        final RandomAccessFile raFile = new RandomAccessFile(file, "r");
+        final long initialPosition = channel.position();
+        boolean exceptionThrown = false;
         try {
-            raFile.seek(fileSize - BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length);
-            byte[] buf = new byte[BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length];
-            raFile.readFully(buf);
-            if (Arrays.equals(buf, BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK)) {
+            channel.position(fileSize - BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length);
+
+            //Check if the end of the file is an empty gzip block which is used as the terminator for a bgzipped file
+            final ByteBuffer lastBlockBuffer = ByteBuffer.allocate(BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length);
+            readFully(channel, lastBlockBuffer);
+            if (Arrays.equals(lastBlockBuffer.array(), BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK)) {
                 return FileTermination.HAS_TERMINATOR_BLOCK;
             }
-            final int bufsize = (int)Math.min(fileSize, BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE);
-            buf = new byte[bufsize];
-            raFile.seek(fileSize - bufsize);
-            raFile.read(buf);
-            for (int i = buf.length - BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length;
-                    i >= 0; --i) {
+
+            //if the last block isn't an empty gzip block, check to see if it is a healthy compressed block or if it's corrupted
+            final int bufsize = (int) Math.min(fileSize, BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE);
+            final byte[] bufferArray = new byte[bufsize];
+            channel.position(fileSize - bufsize);
+            readFully(channel, ByteBuffer.wrap(bufferArray));
+            for (int i = bufferArray.length - BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK.length;
+                 i >= 0; --i) {
                 if (!preambleEqual(BlockCompressedStreamConstants.GZIP_BLOCK_PREAMBLE,
-                        buf, i, BlockCompressedStreamConstants.GZIP_BLOCK_PREAMBLE.length)) {
+                                   bufferArray, i, BlockCompressedStreamConstants.GZIP_BLOCK_PREAMBLE.length)) {
                     continue;
                 }
-                final ByteBuffer byteBuffer = ByteBuffer.wrap(buf, i + BlockCompressedStreamConstants.GZIP_BLOCK_PREAMBLE.length, 4);
+                final ByteBuffer byteBuffer = ByteBuffer.wrap(bufferArray,
+                                                              i + BlockCompressedStreamConstants.GZIP_BLOCK_PREAMBLE.length,
+                                                              4);
                 byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
-                final int totalBlockSizeMinusOne =  byteBuffer.getShort() & 0xFFFF;
-                if (buf.length - i == totalBlockSizeMinusOne + 1) {
+                final int totalBlockSizeMinusOne = byteBuffer.getShort() & 0xFFFF;
+                if (bufferArray.length - i == totalBlockSizeMinusOne + 1) {
                     return FileTermination.HAS_HEALTHY_LAST_BLOCK;
                 } else {
                     return FileTermination.DEFECTIVE;
                 }
             }
             return FileTermination.DEFECTIVE;
+        } catch (final Throwable e) {
+            exceptionThrown = true;
+            throw e;
         } finally {
-            raFile.close();
+            //if an exception was thrown we don't want to reset the position because that would be likely to throw again
+            //and suppress the initial exception
+            if(!exceptionThrown) {
+                channel.position(initialPosition);
+            }
+        }
+    }
+
+    /**
+     * read as many bytes as dst's capacity into dst or throw if that's not possible
+     * @throws EOFException if channel has fewer bytes available than dst's capacity
+     */
+    static void readFully(SeekableByteChannel channel, ByteBuffer dst) throws IOException {
+        final int bytesRead = channel.read(dst);
+        if (bytesRead < dst.capacity()){
+            throw new EOFException();
         }
     }
 
@@ -508,6 +708,38 @@ public class BlockCompressedInputStream extends InputStream implements LocationA
         }
         return true;
     }
-}
 
+    protected static class DecompressedBlock {
+        /**
+         * Decompressed block
+         */
+        private final byte[] mBlock;
+        /**
+         * Compressed size of block (the uncompressed size can be found using
+         * mBlock.length)
+         */
+        private final int mBlockCompressedSize;
+        /**
+         * Stream offset of start of block
+         */
+        private final long mBlockAddress;
+        /**
+         * Exception thrown (if any) when attempting to decompress block
+         */
+        private final Exception mException;
+
+        public DecompressedBlock(long blockAddress, byte[] block, int compressedSize) {
+            mBlock = block;
+            mBlockAddress = blockAddress;
+            mBlockCompressedSize = compressedSize;
+            mException = null;
+        }
 
+        public DecompressedBlock(long blockAddress, int compressedSize, Exception exception) {
+            mBlock = new byte[0];
+            mBlockAddress = blockAddress;
+            mBlockCompressedSize = compressedSize;
+            mException = exception;
+        }
+    }
+}
diff --git a/src/main/java/htsjdk/samtools/util/BlockCompressedOutputStream.java b/src/main/java/htsjdk/samtools/util/BlockCompressedOutputStream.java
index 408282f..a1fc6c8 100644
--- a/src/main/java/htsjdk/samtools/util/BlockCompressedOutputStream.java
+++ b/src/main/java/htsjdk/samtools/util/BlockCompressedOutputStream.java
@@ -28,6 +28,7 @@ import htsjdk.samtools.util.zip.DeflaterFactory;
 import java.io.File;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.file.Files;
 import java.util.zip.CRC32;
 import java.util.zip.Deflater;
 
@@ -282,7 +283,7 @@ public class BlockCompressedOutputStream
         codec.writeBytes(BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK);
         codec.close();
         // Can't re-open something that is not a regular file, e.g. a named pipe or an output stream
-        if (this.file == null || !this.file.isFile()) return;
+        if (this.file == null || !this.file.isFile() || !Files.isRegularFile(this.file.toPath())) return;
         if (BlockCompressedInputStream.checkTermination(this.file) !=
                 BlockCompressedInputStream.FileTermination.HAS_TERMINATOR_BLOCK) {
             throw new IOException("Terminator block not found after closing BGZF file " + this.file);
@@ -296,6 +297,7 @@ public class BlockCompressedOutputStream
      * @param bite
      * @throws IOException
      */
+    @Override
     public void write(final int bite) throws IOException {
         singleByteArray[0] = (byte)bite;
         write(singleByteArray);
diff --git a/src/main/java/htsjdk/samtools/util/BlockGunzipper.java b/src/main/java/htsjdk/samtools/util/BlockGunzipper.java
index 18e9285..bf763ba 100644
--- a/src/main/java/htsjdk/samtools/util/BlockGunzipper.java
+++ b/src/main/java/htsjdk/samtools/util/BlockGunzipper.java
@@ -24,6 +24,7 @@
 package htsjdk.samtools.util;
 
 import htsjdk.samtools.SAMFormatException;
+import htsjdk.samtools.util.zip.InflaterFactory;
 
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
@@ -43,10 +44,42 @@ import java.util.zip.Inflater;
  * @author alecw at broadinstitute.org
  */
 public class BlockGunzipper {
-    private final Inflater inflater = new Inflater(true); // GZIP mode
+    private static InflaterFactory defaultInflaterFactory = new InflaterFactory();
+    private final Inflater inflater;
     private final CRC32 crc32 = new CRC32();
     private boolean checkCrcs = false;
 
+    /**
+     * Create a BlockGunzipper using the default inflaterFactory
+     */
+    BlockGunzipper() {
+        inflater = defaultInflaterFactory.makeInflater(true); // GZIP mode
+    }
+
+    /**
+     * Create a BlockGunzipper using the provided inflaterFactory
+     * @param inflaterFactory
+     */
+    BlockGunzipper(InflaterFactory inflaterFactory) {
+        inflater = inflaterFactory.makeInflater(true); // GZIP mode
+    }
+
+    /**
+     * Sets the default {@link InflaterFactory} that will be used for all instances unless specified otherwise in the constructor.
+     * If this method is not called the default is a factory that will create the JDK {@link Inflater}.
+     * @param inflaterFactory non-null default factory.
+     */
+    public static void setDefaultInflaterFactory(final InflaterFactory inflaterFactory) {
+        if (inflaterFactory == null) {
+            throw new IllegalArgumentException("null inflaterFactory");
+        }
+        defaultInflaterFactory = inflaterFactory;
+    }
+
+    public static InflaterFactory getDefaultInflaterFactory() {
+        return defaultInflaterFactory;
+    }
+
     /** Allows the caller to decide whether or not to check CRCs on when uncompressing blocks. */
     public void setCheckCrcs(final boolean check) {
         this.checkCrcs = check;
diff --git a/src/main/java/htsjdk/samtools/util/BufferedLineReader.java b/src/main/java/htsjdk/samtools/util/BufferedLineReader.java
index de1115d..bc13466 100644
--- a/src/main/java/htsjdk/samtools/util/BufferedLineReader.java
+++ b/src/main/java/htsjdk/samtools/util/BufferedLineReader.java
@@ -26,32 +26,36 @@ package htsjdk.samtools.util;
 import htsjdk.samtools.Defaults;
 
 import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.io.LineNumberReader;
 import java.nio.charset.Charset;
 
 /**
  * Implementation of LineReader that is a thin wrapper around BufferedReader.  On Linux, this is faster
  * than AsciiLineReaderImpl.  If you use AsciiLineReader rather than this class, it will detect the OS
  * and delegate to the preferred implementation.
- *
- * TODO: Replace this with {@link java.io.LineNumberReader}?
  * 
  * @author alecw at broadinstitute.org
  */
-public class BufferedLineReader implements LineReader {
-
-    private final BufferedReader reader;
-    private int lineNumber = 0;
-    private String peekedLine;
+public class BufferedLineReader extends LineNumberReader implements LineReader {
 
     public BufferedLineReader(final InputStream is) {
         this(is, Defaults.NON_ZERO_BUFFER_SIZE);
     }
 
     public BufferedLineReader(final InputStream is, final int bufferSize) {
-        reader = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8")), bufferSize);
+        super(new InputStreamReader(is, Charset.forName("UTF-8")), bufferSize);
+    }
+
+    /**
+     * Returns a {@link BufferedLineReader} that gets its input from a String. No charset conversion
+     * is necessary because the String is in unicode.
+     */
+    public static BufferedLineReader fromString(final String s) {
+        return new BufferedLineReader(new ByteArrayInputStream(s.getBytes()));
     }
 
     /**
@@ -59,55 +63,36 @@ public class BufferedLineReader implements LineReader {
      *
      * @return the line read, or null if EOF has been reached.
      */
+    @Override
     public String readLine() {
-        ++lineNumber;
         try {
-            final String ret;
-            if (peekedLine != null) {
-                ret = peekedLine;
-                peekedLine = null;
-            } else {
-            ret = reader.readLine();
-            }
-            return ret;
+            return super.readLine();
         } catch (IOException e) {
             throw new RuntimeIOException(e);
         }
     }
 
     /**
-     * @return 1-based number of line most recently read
-     */
-    public int getLineNumber() {
-        return lineNumber;
-    }
-
-    /**
      * Non-destructive one-character look-ahead.
      *
      * @return If not eof, the next character that would be read.  If eof, -1.
      */
+    @Override
     public int peek() {
-        if (peekedLine == null) {
-            try {
-                peekedLine = reader.readLine();
-            } catch (IOException e) {
+        try {
+            mark(1);
+            final int ret = read();
+            reset();
+            return ret;
+        } catch (IOException e) {
                 throw new RuntimeIOException(e);
-            }
-        }
-        if (peekedLine == null) {
-            return -1;
-        }
-        if (peekedLine.isEmpty()) {
-            return '\n';
         }
-        return peekedLine.charAt(0);
     }
 
+    @Override
     public void close() {
-        peekedLine = null;
         try {
-            reader.close();
+            super.close();
         } catch (IOException e) {
             throw new RuntimeIOException(e);
         }
diff --git a/src/main/java/htsjdk/samtools/util/CloseableIterator.java b/src/main/java/htsjdk/samtools/util/CloseableIterator.java
index d26443e..fa657be 100755
--- a/src/main/java/htsjdk/samtools/util/CloseableIterator.java
+++ b/src/main/java/htsjdk/samtools/util/CloseableIterator.java
@@ -45,6 +45,7 @@ import java.util.stream.StreamSupport;
  */
 public interface CloseableIterator<T> extends Iterator<T>, Closeable {
     /** Should be implemented to close/release any underlying resources. */
+    @Override
     void close();
 
     /** Consumes the contents of the iterator and returns it as a List. */
diff --git a/src/main/java/htsjdk/samtools/util/CustomGzipOutputStream.java b/src/main/java/htsjdk/samtools/util/CustomGzipOutputStream.java
index cb3652e..16f9950 100644
--- a/src/main/java/htsjdk/samtools/util/CustomGzipOutputStream.java
+++ b/src/main/java/htsjdk/samtools/util/CustomGzipOutputStream.java
@@ -11,13 +11,13 @@ import java.util.zip.GZIPOutputStream;
  * @author Tim Fennell
  */
 public class CustomGzipOutputStream extends GZIPOutputStream {
-    CustomGzipOutputStream(final OutputStream outputStream, final int bufferSize, final int compressionLevel) throws
+    public CustomGzipOutputStream(final OutputStream outputStream, final int bufferSize, final int compressionLevel) throws
             IOException {
         super(outputStream, bufferSize);
         this.def.setLevel(compressionLevel);
     }
 
-    CustomGzipOutputStream(final OutputStream outputStream, final int compressionLevel) throws IOException {
+    public CustomGzipOutputStream(final OutputStream outputStream, final int compressionLevel) throws IOException {
         super(outputStream);
         this.def.setLevel(compressionLevel);
     }
diff --git a/src/main/java/htsjdk/samtools/util/DateParser.java b/src/main/java/htsjdk/samtools/util/DateParser.java
index 02a9609..f2d9481 100644
--- a/src/main/java/htsjdk/samtools/util/DateParser.java
+++ b/src/main/java/htsjdk/samtools/util/DateParser.java
@@ -277,47 +277,6 @@ public class DateParser {
                 .append("Z").toString();
     }
 
-    public static void test(String isodate) {
-        System.out.println("----------------------------------");
-        try {
-            Date date = parse(isodate);
-            System.out.println(">> "+isodate);
-            System.out.println(">> "+date.toString()+" ["+date.getTime()+"]");
-            System.out.println(">> "+getIsoDate(date));
-        } catch (InvalidDateException ex) {
-            System.err.println(isodate+" is invalid");
-            System.err.println(ex.getMessage());
-        }
-        System.out.println("----------------------------------");
-    }
-
-    public static void test(Date date) {
-        String isodate = null;
-        System.out.println("----------------------------------");
-        try {
-            System.out.println(">> "+date.toString()+" ["+date.getTime()+"]");
-            isodate = getIsoDate(date);
-            System.out.println(">> "+isodate);
-            date = parse(isodate);
-            System.out.println(">> "+date.toString()+" ["+date.getTime()+"]");
-        } catch (InvalidDateException ex) {
-            System.err.println(isodate+" is invalid");
-            System.err.println(ex.getMessage());
-        }
-        System.out.println("----------------------------------");
-    }
-
-    public static void main(String args[]) {
-        test("1997-07-16T19:20:30.45-02:00");
-        test("1997-07-16T19:20:30+01:00");
-        test("1997-07-16T19:20:30+01:00");
-        test("1997-07-16T19:20");
-        test("1997-07-16");
-        test("1997-07");
-        test("1997");
-        test(new Date());
-    }
-
     public static class InvalidDateException extends SAMException {
         public InvalidDateException() {
         }
diff --git a/src/main/java/htsjdk/samtools/util/DelegatingIterator.java b/src/main/java/htsjdk/samtools/util/DelegatingIterator.java
index 054352b..9d5174a 100644
--- a/src/main/java/htsjdk/samtools/util/DelegatingIterator.java
+++ b/src/main/java/htsjdk/samtools/util/DelegatingIterator.java
@@ -15,20 +15,24 @@ public class DelegatingIterator<T> implements CloseableIterator<T> {
         this.iterator = iterator;
     }
 
+    @Override
     public void close() {
         if (iterator instanceof CloseableIterator) {
             ((CloseableIterator) this.iterator).close();
         }
     }
 
+    @Override
     public boolean hasNext() {
         return this.iterator.hasNext();
     }
 
+    @Override
     public T next() {
         return this.iterator.next();
     }
 
+    @Override
     public void remove() {
         this.iterator.remove();
     }
diff --git a/src/main/java/htsjdk/samtools/util/DiskBackedQueue.java b/src/main/java/htsjdk/samtools/util/DiskBackedQueue.java
index bbf3818..22fca11 100644
--- a/src/main/java/htsjdk/samtools/util/DiskBackedQueue.java
+++ b/src/main/java/htsjdk/samtools/util/DiskBackedQueue.java
@@ -130,6 +130,7 @@ public class DiskBackedQueue<E> implements Queue<E> {
      * @return true (if add successful)
      * @throws IllegalStateException if the queue cannot be added to
      */
+    @Override
     public boolean add(final E record) throws IllegalStateException {
         if (!canAdd) throw new IllegalStateException("Cannot add to DiskBackedQueue whose canAdd() method returns false");
 
@@ -192,6 +193,7 @@ public class DiskBackedQueue<E> implements Queue<E> {
     /**
      * Return the total number of elements in the queue, both in memory and on disk
      */
+    @Override
     public int size() {
         return (this.headRecord == null) ? 0 : (1 + this.ramRecords.size() + this.numRecordsOnDisk);
     }
@@ -238,6 +240,7 @@ public class DiskBackedQueue<E> implements Queue<E> {
      *
      * @throws Throwable
      */
+    @Override
     protected void finalize() throws Throwable {
         this.closeIOResources();
         super.finalize(); // NB: intellij wanted me to do this. Need I?  I'm not extending anything
diff --git a/src/main/java/htsjdk/samtools/util/EdgingRecordAndOffset.java b/src/main/java/htsjdk/samtools/util/EdgingRecordAndOffset.java
index b83a169..df282b0 100644
--- a/src/main/java/htsjdk/samtools/util/EdgingRecordAndOffset.java
+++ b/src/main/java/htsjdk/samtools/util/EdgingRecordAndOffset.java
@@ -56,6 +56,8 @@ public abstract class EdgingRecordAndOffset extends AbstractRecordAndOffset {
 
     public abstract byte getBaseQuality(int position);
 
+    public abstract int getRefPos();
+
     public static EdgingRecordAndOffset createBeginRecord(SAMRecord record, int offset, int length, int refPos) {
         return new StartEdgingRecordAndOffset(record, offset, length, refPos);
     }
@@ -103,6 +105,7 @@ public abstract class EdgingRecordAndOffset extends AbstractRecordAndOffset {
          * @param position in the reference
          * @return base quality of a read base, corresponding to a given reference position
          */
+        @Override
         public byte getBaseQuality(int position) {
             int rOffset = getRelativeOffset(position);
             byte[] baseQualities = record.getBaseQualities();
@@ -174,6 +177,7 @@ public abstract class EdgingRecordAndOffset extends AbstractRecordAndOffset {
          * @param position in the reference
          * @return base quality of a read base, corresponding to a given reference position
          */
+        @Override
         public byte getBaseQuality(int position) {
             return start.getBaseQuality(position);
         }
diff --git a/src/main/java/htsjdk/samtools/util/FastLineReader.java b/src/main/java/htsjdk/samtools/util/FastLineReader.java
index d802fad..95d6202 100644
--- a/src/main/java/htsjdk/samtools/util/FastLineReader.java
+++ b/src/main/java/htsjdk/samtools/util/FastLineReader.java
@@ -79,6 +79,7 @@ public class FastLineReader implements Closeable {
         return sawEoln;
     }
 
+    @Override
     public void close() {
         CloserUtil.close(in);
         in = null;
diff --git a/src/main/java/htsjdk/samtools/util/FileAppendStreamLRUCache.java b/src/main/java/htsjdk/samtools/util/FileAppendStreamLRUCache.java
index bc8bc01..500b931 100644
--- a/src/main/java/htsjdk/samtools/util/FileAppendStreamLRUCache.java
+++ b/src/main/java/htsjdk/samtools/util/FileAppendStreamLRUCache.java
@@ -47,6 +47,7 @@ public class FileAppendStreamLRUCache extends ResourceLimitedMap<File, OutputStr
     }
 
     private static class Functor implements ResourceLimitedMapFunctor<File, OutputStream> {
+        @Override
         public OutputStream makeValue(final File file) {
             try {
                 return IOUtil.maybeBufferOutputStream(new FileOutputStream(file, true));
@@ -65,6 +66,7 @@ public class FileAppendStreamLRUCache extends ResourceLimitedMap<File, OutputStr
             }
         }
 
+        @Override
         public void finalizeValue(final File file, final OutputStream out) {
             try {
                 out.flush();
diff --git a/src/main/java/htsjdk/samtools/util/IOUtil.java b/src/main/java/htsjdk/samtools/util/IOUtil.java
index 07ae900..29a5e6b 100644
--- a/src/main/java/htsjdk/samtools/util/IOUtil.java
+++ b/src/main/java/htsjdk/samtools/util/IOUtil.java
@@ -48,14 +48,20 @@ import java.io.OutputStreamWriter;
 import java.io.Reader;
 import java.io.Writer;
 import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.net.URL;
 import java.nio.charset.Charset;
+import java.nio.file.FileSystemNotFoundException;
+import java.nio.file.FileSystems;
 import java.nio.file.Files;
 import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Scanner;
@@ -80,8 +86,11 @@ public class IOUtil {
     public static final long TWO_GBS = 2 * ONE_GB;
     public static final long FIVE_GBS = 5 * ONE_GB;
 
+    public static final String VCF_FILE_EXTENSION = ".vcf";
+    public static final String BCF_FILE_EXTENSION = ".bcf";
+    public static final String COMPRESSED_VCF_FILE_EXTENSION = ".vcf.gz";
     /** Possible extensions for VCF files and related formats. */
-    public static final String[] VCF_EXTENSIONS = new String[] {".vcf", ".vcf.gz", ".bcf"};
+    public static final String[] VCF_EXTENSIONS = {VCF_FILE_EXTENSION, COMPRESSED_VCF_FILE_EXTENSION, BCF_FILE_EXTENSION};
 
     public static final String INTERVAL_LIST_FILE_EXTENSION = IntervalList.INTERVAL_LIST_FILE_EXTENSION;
 
@@ -240,6 +249,15 @@ public class IOUtil {
     }
 
     /**
+     * @return true if the path is not a device (e.g. /dev/null or /dev/stdin), and is not
+     * an existing directory.  I.e. is is a regular path that may correspond to an existing
+     * file, or a path that could be a regular output file.
+     */
+    public static boolean isRegularPath(final Path path) {
+        return !Files.exists(path) || Files.isRegularFile(path);
+    }
+
+    /**
      * Creates a new tmp file on one of the available temp filesystems, registers it for deletion
      * on JVM exit and then returns it.
      */
@@ -360,7 +378,7 @@ public class IOUtil {
      * and if it is a file then not a directory and is readable.  If any
      * condition is false then a runtime exception is thrown.
      *
-     * @param files the list of files to check for readability
+     * @param inputs the list of files to check for readability
      */
     public static void assertInputsAreValid(final List<String> inputs) {
         for (final String input : inputs) assertInputIsValid(input);
@@ -459,7 +477,7 @@ public class IOUtil {
     public static void assertFilesEqual(final File f1, final File f2) {
         try {
             if (f1.length() != f2.length()) {
-                throw new SAMException("Files " + f1 + " and " + f2 + " are different lengths.");
+                throw new SAMException("File " + f1 + " is " + f1.length() + " bytes but file " + f2 + " is " + f2.length() + " bytes.");
             }
             final FileInputStream s1 = new FileInputStream(f1);
             final FileInputStream s2 = new FileInputStream(f2);
@@ -690,6 +708,7 @@ public class IOUtil {
 
     public static File[] getFilesMatchingRegexp(final File directory, final Pattern regexp) {
         return directory.listFiles( new FilenameFilter() {
+            @Override
             public boolean accept(final File dir, final String name) {
                 return regexp.matcher(name).matches();
             }
@@ -771,9 +790,14 @@ public class IOUtil {
 
     /** Checks that a file exists and is readable, and then returns a buffered reader for it. */
     public static BufferedReader openFileForBufferedReading(final File file) {
-        return new BufferedReader(new InputStreamReader(openFileForReading(file)), Defaults.NON_ZERO_BUFFER_SIZE);
+        return openFileForBufferedReading(file.toPath());
 	}
 
+    /** Checks that a path exists and is readable, and then returns a buffered reader for it. */
+    public static BufferedReader openFileForBufferedReading(final Path path) {
+        return new BufferedReader(new InputStreamReader(openFileForReading(path)), Defaults.NON_ZERO_BUFFER_SIZE);
+    }
+
     /** Takes a string and replaces any characters that are not safe for filenames with an underscore */
     public static String makeFileNameSafe(final String str) {
         return str.trim().replaceAll("[\\s!\"#$%&'()*/:;<=>?@\\[\\]\\\\^`{|}~]", "_");
@@ -943,4 +967,53 @@ public class IOUtil {
 
         return output;
     }
+
+    /**
+     * Check if the given URI has a scheme.
+     *
+     * @param uriString the URI to check
+     * @return <code>true</code> if the given URI has a scheme, <code>false</code> if
+     * not, or if the URI is malformed.
+     */
+    public static boolean hasScheme(String uriString) {
+        try {
+            return new URI(uriString).getScheme() != null;
+        } catch (URISyntaxException e) {
+            return false;
+        }
+    }
+
+    /**
+     * Converts the given URI to a {@link Path} object. If the filesystem cannot be found in the usual way, then attempt
+     * to load the filesystem provider using the thread context classloader. This is needed when the filesystem
+     * provider is loaded using a URL classloader (e.g. in spark-submit).
+     *
+     * @param uriString the URI to convert
+     * @return the resulting {@code Path}
+     * @throws IOException an I/O error occurs creating the file system
+     */
+    public static Path getPath(String uriString) throws IOException {
+        URI uri = URI.create(uriString);
+        try {
+            // if the URI has no scheme, then treat as a local file, otherwise use the scheme to determine the filesystem to use
+            return uri.getScheme() == null ? Paths.get(uriString) : Paths.get(uri);
+        } catch (FileSystemNotFoundException e) {
+            ClassLoader cl = Thread.currentThread().getContextClassLoader();
+            if (cl == null) {
+                throw e;
+            }
+            return FileSystems.newFileSystem(uri, new HashMap<>(), cl).provider().getPath(uri);
+        }
+    }
+
+    /**
+     * Adds the extension to the given path.
+     *
+     * @param path       the path to start from, eg. "/folder/file.jpg"
+     * @param extension  the extension to add, eg. ".bak"
+     * @return           "/folder/file.jpg.bak"
+     */
+    public static Path addExtension(Path path, String extension) {
+        return path.resolveSibling(path.getFileName() + extension);
+    }
 }
diff --git a/src/main/java/htsjdk/samtools/util/Interval.java b/src/main/java/htsjdk/samtools/util/Interval.java
index 779bb25..51e9127 100644
--- a/src/main/java/htsjdk/samtools/util/Interval.java
+++ b/src/main/java/htsjdk/samtools/util/Interval.java
@@ -141,6 +141,7 @@ public class Interval implements Comparable<Interval>, Cloneable, Locatable {
      * Sort based on sequence.compareTo, then start pos, then end pos
      * with null objects coming lexically last
      */
+    @Override
     public int compareTo(final Interval that) {
         if (that == null) return -1; // nulls last
 
diff --git a/src/main/java/htsjdk/samtools/util/IntervalList.java b/src/main/java/htsjdk/samtools/util/IntervalList.java
index 76cb508..a5d2a8e 100644
--- a/src/main/java/htsjdk/samtools/util/IntervalList.java
+++ b/src/main/java/htsjdk/samtools/util/IntervalList.java
@@ -34,6 +34,7 @@ import java.io.BufferedWriter;
 import java.io.File;
 import java.io.IOException;
 import java.io.Serializable;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -84,6 +85,7 @@ public class IntervalList implements Iterable<Interval> {
     public SAMFileHeader getHeader() { return header; }
 
     /** Returns an iterator over the intervals. */
+    @Override
     public Iterator<Interval> iterator() { return this.intervals.iterator(); }
 
     /** Adds an interval to the list of intervals. */
@@ -355,11 +357,9 @@ public class IntervalList implements Iterable<Interval> {
             end   = Math.max(end, i.getEnd());
         }
 
-        if (concatenateNames) {
-            if (names.isEmpty()) name = null;
-            else name = StringUtil.join("|", names);
-        }
-        else { name = names.iterator().next(); }
+        if (names.isEmpty()) name = null;
+        else if (concatenateNames) name = StringUtil.join("|", names);
+        else name = names.iterator().next();
 
         return new Interval(chrom, start, end, neg, name);
     }
@@ -396,12 +396,21 @@ public class IntervalList implements Iterable<Interval> {
      * @return an IntervalList object that contains the headers and intervals from the file
      */
     public static IntervalList fromFile(final File file) {
-        final BufferedReader reader= IOUtil.openFileForBufferedReading(file);
+        return fromPath(file.toPath());
+    }
+
+    /**
+     * Parses an interval list from a path.
+     * @param path the path containing the intervals
+     * @return an IntervalList object that contains the headers and intervals from the path
+     */
+    public static IntervalList fromPath(final Path path) {
+        final BufferedReader reader = IOUtil.openFileForBufferedReading(path);
         final IntervalList list = fromReader(reader);
         try {
             reader.close();
         } catch (final IOException e) {
-            throw new SAMException(String.format("Failed to close file %s after reading",file));
+            throw new SAMException(String.format("Failed to close file %s after reading", path.toUri().toString()));
         }
 
         return list;
@@ -455,7 +464,7 @@ public class IntervalList implements Iterable<Interval> {
                 throw new IllegalStateException("Interval list file must contain header. ");
             }
 
-            final StringLineReader headerReader = new StringLineReader(builder.toString());
+            final BufferedLineReader headerReader = BufferedLineReader.fromString(builder.toString());
             final SAMTextHeaderCodec codec = new SAMTextHeaderCodec();
             final IntervalList list = new IntervalList(codec.decode(headerReader, "BufferedReader"));
             final SAMSequenceDictionary dict = list.getHeader().getSequenceDictionary();
@@ -730,6 +739,64 @@ public class IntervalList implements Iterable<Interval> {
                 subtract(lists2, lists1));
     }
 
+    /**
+     * A utility function for finding the intervals in the first list that have at least 1bp overlap with any interval
+     * in the second list.
+     *
+     * @param lhs the first collection of IntervalLists
+     * @param lhs the second collection of IntervalLists
+     * @return an IntervalList comprising of all intervals in the first IntervalList that have at least 1bp overlap with
+     * any interval in the second.
+     */
+    public static IntervalList overlaps(final IntervalList lhs, final IntervalList rhs) {
+        return overlaps(Collections.singletonList(lhs), Collections.singletonList(rhs));
+    }
+
+    /**
+     * A utility function for finding the intervals in the first list that have at least 1bp overlap with any interval
+     * in the second list.
+     *
+     * @param lists1 the first collection of IntervalLists
+     * @param lists2 the second collection of IntervalLists
+     * @return an IntervalList comprising of all intervals in the first collection of lists that have at least 1bp
+     * overlap with any interval in the second lists.
+     */
+    public static IntervalList overlaps(final Collection<IntervalList> lists1, final Collection<IntervalList> lists2) {
+        if(lists1.isEmpty()){
+            throw new SAMException("Cannot call overlaps with the first collection having empty list of IntervalLists.");
+        }
+
+        final SAMFileHeader header = lists1.iterator().next().getHeader().clone();
+        header.setSortOrder(SAMFileHeader.SortOrder.unsorted);
+
+        // Create an overlap detector on list2
+        final IntervalList overlapIntervals = new IntervalList(header);
+        for (final IntervalList list : lists2) {
+            SequenceUtil.assertSequenceDictionariesEqual(header.getSequenceDictionary(),
+                    list.getHeader().getSequenceDictionary());
+            overlapIntervals.addall(list.getIntervals());
+        }
+        final OverlapDetector<Integer> detector = new OverlapDetector<>(0, 0);
+        final int dummy = -1; // NB: since we don't actually use the returned objects, we can use a dummy value
+        for (final Interval interval : overlapIntervals.sorted().uniqued()) {
+            detector.addLhs(dummy, interval);
+        }
+
+        // Go through each input interval in in lists1 and see if overlaps any interval in lists2
+        final IntervalList merged = new IntervalList(header);
+        for (final IntervalList list : lists1) {
+            SequenceUtil.assertSequenceDictionariesEqual(header.getSequenceDictionary(),
+                    list.getHeader().getSequenceDictionary());
+            for (final Interval interval : list.getIntervals()) {
+                if (detector.overlapsAny(interval)) {
+                    merged.add(interval);
+                }
+            }
+        }
+
+        return merged;
+    }
+
     @Override
     public boolean equals(final Object o) {
         if (this == o) return true;
@@ -762,6 +829,7 @@ class IntervalCoordinateComparator implements Comparator<Interval>, Serializable
         this.header = header;
     }
 
+    @Override
     public int compare(final Interval lhs, final Interval rhs) {
         final int lhsIndex = this.header.getSequenceIndex(lhs.getContig());
         final int rhsIndex = this.header.getSequenceIndex(rhs.getContig());
diff --git a/src/main/java/htsjdk/samtools/util/IntervalListReferenceSequenceMask.java b/src/main/java/htsjdk/samtools/util/IntervalListReferenceSequenceMask.java
index 1ddd164..08c2dd5 100644
--- a/src/main/java/htsjdk/samtools/util/IntervalListReferenceSequenceMask.java
+++ b/src/main/java/htsjdk/samtools/util/IntervalListReferenceSequenceMask.java
@@ -66,6 +66,7 @@ public class IntervalListReferenceSequenceMask implements ReferenceSequenceMask
      *
      * @return true if the mask is set for the given sequence and position
      */
+    @Override
     public boolean get(final int sequenceIndex, final int position) {
         ensureSequenceLoaded(sequenceIndex);
         return currentBitSet.get(position);
@@ -76,6 +77,7 @@ public class IntervalListReferenceSequenceMask implements ReferenceSequenceMask
      *
      * @return the next pos on the given sequence >= position that is set, or -1 if there are no more set positions
      */
+    @Override
     public int nextPosition(final int sequenceIndex, final int position) {
         ensureSequenceLoaded(sequenceIndex);
         // nextSetBit returns the first set bit on or after the starting index, therefore position+1
@@ -108,6 +110,7 @@ public class IntervalListReferenceSequenceMask implements ReferenceSequenceMask
     /**
      * @return Largest sequence index for which there are set bits.
      */
+    @Override
     public int getMaxSequenceIndex() {
         return lastSequenceIndex;
     }
@@ -115,6 +118,7 @@ public class IntervalListReferenceSequenceMask implements ReferenceSequenceMask
     /**
      * @return the largest position on the last sequence index
      */
+    @Override
     public int getMaxPosition() {
         return lastPosition;
     }
diff --git a/src/main/java/htsjdk/samtools/util/IntervalTree.java b/src/main/java/htsjdk/samtools/util/IntervalTree.java
index 49c3017..3efc4df 100644
--- a/src/main/java/htsjdk/samtools/util/IntervalTree.java
+++ b/src/main/java/htsjdk/samtools/util/IntervalTree.java
@@ -340,6 +340,7 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
      * Return an iterator over the entire tree.
      * @return An iterator.
      */
+    @Override
     public Iterator<Node<V>> iterator()
     {
         return new FwdIterator(min());
@@ -482,7 +483,7 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
 
         public int getLength()
         {
-            return mEnd - mStart;
+            return mEnd - mStart + 1 ;
         }
 
         public int getRelationship( final Node<V1> interval )
@@ -492,14 +493,14 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
                 result = HAS_LESSER_PART;
             if ( mEnd > interval.getEnd() )
                 result |= HAS_GREATER_PART;
-            if ( mStart < interval.getEnd() && interval.getStart() < mEnd )
+            if ( mStart <= interval.getEnd() && interval.getStart() <= mEnd )
                 result |= HAS_OVERLAPPING_PART;
             return result;
         }
 
         public boolean isAdjacent( final Node<V1> interval )
         {
-            return mStart == interval.getEnd() || mEnd == interval.getStart();
+            return mStart == interval.getEnd() + 1 || mEnd + 1 == interval.getStart();
         }
 
         public V1 getValue()
@@ -1069,11 +1070,13 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
             mNext = node;
         }
 
+        @Override
         public boolean hasNext()
         {
             return mNext != null;
         }
 
+        @Override
         public Node<V> next()
         {
             if ( mNext == null )
@@ -1092,6 +1095,7 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
             return mLast;
         }
 
+        @Override
         public void remove()
         {
             if ( mLast == null )
@@ -1115,11 +1119,13 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
             mNext = node;
         }
 
+        @Override
         public boolean hasNext()
         {
             return mNext != null;
         }
 
+        @Override
         public Node<V> next()
         {
             if ( mNext == null )
@@ -1135,6 +1141,7 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
             return mLast;
         }
 
+        @Override
         public void remove()
         {
             if ( mLast == null )
@@ -1160,11 +1167,13 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
             mEnd = end;
         }
 
+        @Override
         public boolean hasNext()
         {
             return mNext != null;
         }
 
+        @Override
         public Node<V> next()
         {
             if ( mNext == null )
@@ -1182,6 +1191,7 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
             return mLast;
         }
 
+        @Override
         public void remove()
         {
             if ( mLast == null )
@@ -1207,16 +1217,19 @@ public class IntervalTree<V> implements Iterable<IntervalTree.Node<V>>
             mItr = itr;
         }
 
+        @Override
         public boolean hasNext()
         {
             return mItr.hasNext();
         }
 
+        @Override
         public V1 next()
         {
             return mItr.next().getValue();
         }
 
+        @Override
         public void remove()
         {
             mItr.remove();
diff --git a/src/main/java/htsjdk/samtools/util/IntervalTreeMap.java b/src/main/java/htsjdk/samtools/util/IntervalTreeMap.java
index ffeae94..ebec2f4 100644
--- a/src/main/java/htsjdk/samtools/util/IntervalTreeMap.java
+++ b/src/main/java/htsjdk/samtools/util/IntervalTreeMap.java
@@ -60,10 +60,12 @@ public class IntervalTreeMap<T>
         }
     }
 
+    @Override
     public void clear() {
         mSequenceMap.clear();
     }
 
+    @Override
     public boolean containsKey(final Object object) {
         if (!(object instanceof Interval)) {
             return false;
@@ -79,6 +81,7 @@ public class IntervalTreeMap<T>
         return (tree.find(key.getStart(), key.getEnd()) != null);
     }
 
+    @Override
     public Set<Entry<Interval, T>> entrySet() {
         return mEntrySet;
     }
@@ -95,6 +98,7 @@ public class IntervalTreeMap<T>
         return mSequenceMap.hashCode();
     }
 
+    @Override
     public T get(final Object object) {
         if (!(object instanceof Interval)) {
             return null;
@@ -114,6 +118,7 @@ public class IntervalTreeMap<T>
         return node.getValue();
     }
 
+    @Override
     public boolean isEmpty() {
         for (final IntervalTree<T> tree : mSequenceMap.values()) {
             if (tree.size() > 0) {
@@ -123,6 +128,7 @@ public class IntervalTreeMap<T>
         return true;
     }
 
+    @Override
     public T put(final Interval key, final T value) {
         IntervalTree<T> tree = mSequenceMap.get(key.getContig());
         if (tree == null) {
@@ -132,6 +138,7 @@ public class IntervalTreeMap<T>
         return tree.put(key.getStart(), key.getEnd(), value);
     }
 
+    @Override
     public T remove(final Object object) {
         if (!(object instanceof Interval)) {
             return null;
@@ -147,6 +154,7 @@ public class IntervalTreeMap<T>
         return tree.remove(key.getStart(), key.getEnd());
     }
 
+    @Override
     public int size() {
         // Note: We should think about caching the size to avoid having to recompute it.
         int size = 0;
@@ -157,16 +165,16 @@ public class IntervalTreeMap<T>
     }
     /**
      * Test overlapping interval 
-     * @param key the interval
+     * @param key the Locatable
      * @return true if it contains an object overlapping the interval 
      */
-    public boolean containsOverlapping(final Interval key) {
+    public boolean containsOverlapping(final Locatable key) {
         final IntervalTree<T> tree = mSequenceMap.get(key.getContig());
         return tree!=null && tree.overlappers(key.getStart(), key.getEnd()).hasNext();
     	}
     
     
-    public Collection<T> getOverlapping(final Interval key) {
+    public Collection<T> getOverlapping(final Locatable key) {
         final List<T> result = new ArrayList<T>();
         final IntervalTree<T> tree = mSequenceMap.get(key.getContig());
         if (tree != null) {
@@ -179,10 +187,10 @@ public class IntervalTreeMap<T>
     }
     /**
      * Test if this contains an object that is contained by 'key'
-     * @param key the interval
+     * @param key the Locatable
      * @return true if it contains an object is contained by 'key'
      */
-    public boolean containsContained(final Interval key) {
+    public boolean containsContained(final Locatable key) {
         final IntervalTree<T> tree = mSequenceMap.get(key.getContig());
         if(tree==null) return false;
             final Iterator<IntervalTree.Node<T>> iterator = tree.overlappers(key.getStart(), key.getEnd());
@@ -196,7 +204,7 @@ public class IntervalTreeMap<T>
     }
     
     
-    public Collection<T> getContained(final Interval key) {
+    public Collection<T> getContained(final Locatable key) {
         final List<T> result = new ArrayList<T>();
         final IntervalTree<T> tree = mSequenceMap.get(key.getContig());
         if (tree != null) {
@@ -214,6 +222,7 @@ public class IntervalTreeMap<T>
     private class EntrySet
         extends AbstractSet<Map.Entry<Interval,T>> {
 
+        @Override
         public void clear() {
            IntervalTreeMap.this.clear();
         }
@@ -225,14 +234,17 @@ public class IntervalTreeMap<T>
             return entry.getValue().equals(IntervalTreeMap.this.get(entry.getKey()));
         }
 
+        @Override
         public boolean isEmpty() {
             return IntervalTreeMap.this.isEmpty();
         }
 
+        @Override
         public Iterator<Map.Entry<Interval,T>> iterator() {
             return new EntryIterator();
         }
 
+        @Override
         @SuppressWarnings("unchecked")
         public boolean remove(final Object object) {
             // Note: Could not figure out how to eliminate the unchecked cast.
@@ -251,6 +263,7 @@ public class IntervalTreeMap<T>
             }
         }
 
+        @Override
         public int size() {
             return IntervalTreeMap.this.size();
         }
@@ -268,10 +281,12 @@ public class IntervalTreeMap<T>
             advanceSequence();
         }
 
+        @Override
         public boolean hasNext() {
             return (mTreeIterator != null && mTreeIterator.hasNext());
         }
 
+        @Override
         public Map.Entry<Interval,T> next() {
             if (!hasNext()) {
                 throw new NoSuchElementException("Iterator exhausted");
@@ -286,6 +301,7 @@ public class IntervalTreeMap<T>
             return new MapEntry(key, value);
         }
 
+        @Override
         public void remove() {
             if (mTreeIterator == null) {
                 throw new IllegalStateException("Iterator.next() has not been called");
@@ -315,14 +331,17 @@ public class IntervalTreeMap<T>
             mValue = value;
         }
 
+        @Override
         public Interval getKey() {
             return mKey;
         }
 
+        @Override
         public T getValue() {
             return mValue;
         }
 
+        @Override
         public T setValue(final T value) {
             mValue = value;
             return IntervalTreeMap.this.put(mKey, mValue);
diff --git a/src/main/java/htsjdk/samtools/util/Iso8601Date.java b/src/main/java/htsjdk/samtools/util/Iso8601Date.java
index 9128867..e173bd3 100644
--- a/src/main/java/htsjdk/samtools/util/Iso8601Date.java
+++ b/src/main/java/htsjdk/samtools/util/Iso8601Date.java
@@ -36,6 +36,7 @@ import java.util.Date;
  */
 public class Iso8601Date extends Date {
     private static final ThreadLocal<DateFormat> iso8601DateFormatter = new ThreadLocal<DateFormat>() {
+        @Override
         protected synchronized DateFormat initialValue() {
             return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
         }
diff --git a/src/main/java/htsjdk/samtools/util/Lazy.java b/src/main/java/htsjdk/samtools/util/Lazy.java
index 13726b8..fca53a6 100644
--- a/src/main/java/htsjdk/samtools/util/Lazy.java
+++ b/src/main/java/htsjdk/samtools/util/Lazy.java
@@ -1,5 +1,7 @@
 package htsjdk.samtools.util;
 
+import java.util.function.Supplier;
+
 /**
  * Simple utility for building an on-demand (lazy) object-initializer.
  * 
@@ -9,29 +11,36 @@ package htsjdk.samtools.util;
  * @author mccowan
  */
 public class Lazy<T> {
-    private final LazyInitializer<T> initializer;
+    private final Supplier<T> initializer;
     private boolean isInitialized = false;
     private T instance;
 
-    /** Simple cons */
-    public Lazy(final LazyInitializer<T> initializer) {
+    public Lazy(final Supplier<T> initializer) {
         this.initializer = initializer;
     }
 
     /** Returns the instance associated with this {@link Lazy}, initializing it if necessary. */
     public synchronized T get() {
         if (!isInitialized) {
-            this.instance = initializer.make();
+            this.instance = initializer.get();
             isInitialized = true;
         }
         return instance;
     }
 
-    /** Describes how to build the instance of the lazy object. */
+    /** Describes how to build the instance of the lazy object.
+     * @deprecated since 1/2017 use a {@link Supplier} instead
+     * */
     @FunctionalInterface
-    public interface LazyInitializer<T> {
+    @Deprecated
+    public interface LazyInitializer<T> extends Supplier<T> {
         /** Returns the desired object instance. */
         T make();
+
+        @Override
+        default T get(){
+            return make();
+        }
     }
 
     public boolean isInitialized() {
diff --git a/src/main/java/htsjdk/samtools/util/LineReader.java b/src/main/java/htsjdk/samtools/util/LineReader.java
index 0185700..4a07f15 100644
--- a/src/main/java/htsjdk/samtools/util/LineReader.java
+++ b/src/main/java/htsjdk/samtools/util/LineReader.java
@@ -47,5 +47,6 @@ public interface LineReader extends Closeable{
      */
     int peek();
 
+    @Override
     public void close();
 }
diff --git a/src/main/java/htsjdk/samtools/util/Locatable.java b/src/main/java/htsjdk/samtools/util/Locatable.java
index 0d70090..8dbf534 100644
--- a/src/main/java/htsjdk/samtools/util/Locatable.java
+++ b/src/main/java/htsjdk/samtools/util/Locatable.java
@@ -1,5 +1,7 @@
 package htsjdk.samtools.util;
 
+import java.util.Objects;
+
 /**
  * Any class that has a single logical mapping onto the genome should implement Locatable
  * positions should be reported as 1-based and closed at both ends
@@ -22,4 +24,56 @@ public interface Locatable {
      * @return 1-based closed-ended position, undefined if getContig() == null
      */
     int getEnd();
+
+    /**
+     * @return number of bases of reference covered by this interval
+     */
+    default int getLengthOnReference() {
+        return CoordMath.getLength(getStart(), getEnd());
+    }
+
+    /**
+     * Determines whether this interval overlaps the provided locatable.
+     *
+     * @param other interval to check
+     * @return true if this interval overlaps other, otherwise false
+     */
+    default boolean overlaps(Locatable other) {
+        return withinDistanceOf(other, 0);
+    }
+
+    /**
+     * Determines whether this interval comes within {@code distance} of overlapping the provided locatable.
+     * When distance = 0 this is equal to {@link #overlaps(Locatable)}
+     *
+     * @param other interval to check
+     * @param distance how many bases may be between the two intervals for us to still consider them overlapping.
+     * @return true if this interval overlaps other, otherwise false
+     */
+    default boolean withinDistanceOf(Locatable other, int distance) {
+        return contigsMatch(other) &&
+                CoordMath.overlaps(getStart(), getEnd(), other.getStart()-distance, other.getEnd()+distance);
+    }
+
+    /**
+     * Determines whether this interval contains the entire region represented by other
+     * (in other words, whether it covers it).
+     *
+     *
+     * @param other interval to check
+     * @return true if this interval contains all of the base positions spanned by other, otherwise false
+     */
+    default boolean contains(Locatable other) {
+        return contigsMatch(other) && CoordMath.encloses(getStart(), getEnd(), other.getStart(), other.getEnd());
+    }
+
+    /**
+     * Determine if this is on the same contig as other
+     * this must be equivalent to this.getContig().equals(other.getContig()) but may be implemented more efficiently
+     *
+     * @return true iff this.getContig().equals(other.getContig())
+     */
+    default boolean contigsMatch(Locatable other) {
+        return getContig() != null && other != null && Objects.equals(this.getContig(), other.getContig());
+    }
 }
diff --git a/src/main/java/htsjdk/samtools/util/LocusComparator.java b/src/main/java/htsjdk/samtools/util/LocusComparator.java
index e0f04d9..efbe09f 100644
--- a/src/main/java/htsjdk/samtools/util/LocusComparator.java
+++ b/src/main/java/htsjdk/samtools/util/LocusComparator.java
@@ -34,6 +34,7 @@ import java.util.Comparator;
 public class LocusComparator<T extends Locus> implements Comparator<T>, Serializable {
     private static final long serialVersionUID = 1L;
 
+    @Override
     public int compare(T thing1, T thing2) {
         int refCompare = thing1.getSequenceIndex() - thing2.getSequenceIndex();
         return refCompare == 0 ? thing1.getPosition() - thing2.getPosition() : refCompare;
diff --git a/src/main/java/htsjdk/samtools/util/LocusImpl.java b/src/main/java/htsjdk/samtools/util/LocusImpl.java
index 8629078..5986a6c 100644
--- a/src/main/java/htsjdk/samtools/util/LocusImpl.java
+++ b/src/main/java/htsjdk/samtools/util/LocusImpl.java
@@ -36,6 +36,7 @@ public class LocusImpl implements Locus {
         this.sequenceIndex = sequenceIndex;
     }
 
+    @Override
     public int getSequenceIndex() {
         return sequenceIndex;
     }
@@ -43,6 +44,7 @@ public class LocusImpl implements Locus {
     /**
      * @return 1-based position
      */
+    @Override
     public int getPosition() {
         return position;
     }
diff --git a/src/main/java/htsjdk/samtools/util/Log.java b/src/main/java/htsjdk/samtools/util/Log.java
index acbd3c4..dfe758a 100644
--- a/src/main/java/htsjdk/samtools/util/Log.java
+++ b/src/main/java/htsjdk/samtools/util/Log.java
@@ -41,13 +41,13 @@ import java.util.Date;
  */
 public final class Log {
     /** Enumeration for setting log levels. */
-    public static enum LogLevel { ERROR, WARNING, INFO, DEBUG }
+    public enum LogLevel { ERROR, WARNING, INFO, DEBUG }
 
     private static LogLevel globalLogLevel = LogLevel.INFO;
+    private static PrintStream out = System.err;
 
     private final Class<?> clazz;
     private final String className;
-    private final PrintStream out = System.err;
 
     /**
      * Private constructor
@@ -67,10 +67,41 @@ public final class Log {
         return new Log(clazz);
     }
 
+    /**
+     * Set the log level.
+     *
+     * @param logLevel  The log level enumeration
+     */
     public static void setGlobalLogLevel(final LogLevel logLevel) {
         globalLogLevel = logLevel;
     }
 
+    /**
+     * Get the log level.
+     *
+     * @return The enumeration for setting log levels.
+     */
+    public static LogLevel getGlobalLogLevel() {
+        return globalLogLevel;
+    }
+
+    /**
+     * Set the {@link PrintStream} for writing.
+     *
+     * @param stream    {@link PrintStream} to write to.
+     */
+    public static void setGlobalPrintStream(final PrintStream stream) { out = stream; }
+
+    /**
+     * Get the {@link PrintStream} for writing.
+     *
+     * @return  {@link PrintStream} to write to.
+     */
+    public static PrintStream getGlobalPrintStream() {
+        return out;
+    }
+
+
     /** Returns true if the specified log level is enabled otherwise false. */
     public static final boolean isEnabled(final LogLevel level) {
         return level.ordinal() <= globalLogLevel.ordinal();
diff --git a/src/main/java/htsjdk/samtools/util/Md5CalculatingInputStream.java b/src/main/java/htsjdk/samtools/util/Md5CalculatingInputStream.java
index e0e7cd5..47ea9ff 100755
--- a/src/main/java/htsjdk/samtools/util/Md5CalculatingInputStream.java
+++ b/src/main/java/htsjdk/samtools/util/Md5CalculatingInputStream.java
@@ -65,12 +65,14 @@ public class Md5CalculatingInputStream extends InputStream {
         }
     }
 
+    @Override
     public int read() throws IOException {
         int result = is.read();
         if (result != -1) md5.update((byte)result);
         return result;
     }
 
+    @Override
     public int read(byte[] b) throws IOException {
         int result = is.read(b);
         if (result != -1) md5.update(b, 0, result);
@@ -78,6 +80,7 @@ public class Md5CalculatingInputStream extends InputStream {
     }
 
 
+    @Override
     public int read(byte[] b, int off, int len) throws IOException {
         int result = is.read(b, off, len);
         if (result != -1) md5.update(b, off, result);
@@ -104,6 +107,7 @@ public class Md5CalculatingInputStream extends InputStream {
         }
     }
 
+    @Override
     public void close() throws IOException {
         is.close();
         makeHash();
@@ -116,18 +120,23 @@ public class Md5CalculatingInputStream extends InputStream {
     }
 
     // Methods not supported or overridden because they would not result in a valid hash
+    @Override
     public boolean markSupported() { return false; }
+    @Override
     public void	mark(int readlimit) {
         throw new UnsupportedOperationException("mark() is not supported by the MD5CalculatingInputStream");
     }
+    @Override
     public void	reset() throws IOException {
         throw new UnsupportedOperationException("reset() is not supported by the MD5CalculatingInputStream");
     }
+    @Override
     public long skip(long n) throws IOException {
         throw new UnsupportedOperationException("skip() is not supported by the MD5CalculatingInputStream");
     }
 
     // Methods delegated to the wrapped InputStream
+    @Override
     public int available() throws IOException { return is.available(); }
 
 }
diff --git a/src/main/java/htsjdk/samtools/util/Md5CalculatingOutputStream.java b/src/main/java/htsjdk/samtools/util/Md5CalculatingOutputStream.java
index 3c5a492..8b4c643 100755
--- a/src/main/java/htsjdk/samtools/util/Md5CalculatingOutputStream.java
+++ b/src/main/java/htsjdk/samtools/util/Md5CalculatingOutputStream.java
@@ -31,6 +31,8 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.math.BigInteger;
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 
@@ -43,14 +45,14 @@ public class Md5CalculatingOutputStream extends OutputStream {
 
     private final OutputStream os;
     private final MessageDigest md5;
-    private final File digestFile;
+    private final Path digestFile;
     private String hash;
 
     /**
      * Constructor that takes in the OutputStream that we are wrapping
      * and creates the MD5 MessageDigest
      */
-    public Md5CalculatingOutputStream(OutputStream os, File digestFile) {
+    public Md5CalculatingOutputStream(OutputStream os, Path digestFile) {
         super();
         this.hash = null;
         this.os = os;
@@ -65,17 +67,24 @@ public class Md5CalculatingOutputStream extends OutputStream {
         }
     }
 
+    public Md5CalculatingOutputStream(OutputStream os, File digestFile) {
+        this(os, digestFile == null ? (Path) null : digestFile.toPath());
+    }
+
+    @Override
     public void write(int b) throws IOException {
         md5.update((byte)b);
         os.write(b);
     }
 
+    @Override
     public void write(byte[] b) throws IOException {
         md5.update(b);
         os.write(b);
     }
 
 
+    @Override
     public void write(byte[] b, int off, int len) throws IOException {
         md5.update(b, off, len);
         os.write(b, off, len);
@@ -102,18 +111,20 @@ public class Md5CalculatingOutputStream extends OutputStream {
         }
     }
 
+    @Override
     public void close() throws IOException {
         os.close();
         makeHash();
 
         if(digestFile != null) {
-            BufferedWriter writer = new BufferedWriter(new FileWriter(digestFile));
+            BufferedWriter writer = Files.newBufferedWriter(digestFile);
             writer.write(hash);
             writer.close();
         }
     }
 
     // Pass-through method
+    @Override
     public void flush() throws IOException { os.flush(); }
 
 }
diff --git a/src/main/java/htsjdk/samtools/util/PeekIterator.java b/src/main/java/htsjdk/samtools/util/PeekIterator.java
index 9f16a51..3a43ba5 100644
--- a/src/main/java/htsjdk/samtools/util/PeekIterator.java
+++ b/src/main/java/htsjdk/samtools/util/PeekIterator.java
@@ -41,6 +41,7 @@ public class PeekIterator<T> implements Iterator<T> {
      * @return true if the iteration has more elements. (In other words, returns true if next would return an element 
      * rather than throwing an exception.)
      */
+    @Override
     public boolean hasNext() {
         return peekedElement != null || underlyingIterator.hasNext();  
     }
@@ -49,6 +50,7 @@ public class PeekIterator<T> implements Iterator<T> {
      * @return the next element in the iteration. Calling this method repeatedly until the hasNext() method returns
      * false will return each element in the underlying collection exactly once.
      */
+    @Override
     public T next() {
         if (peekedElement != null) {
             final T ret = peekedElement;
@@ -72,6 +74,7 @@ public class PeekIterator<T> implements Iterator<T> {
     /**
      * Unsupported
      */
+    @Override
     public void remove() {
         throw new UnsupportedOperationException();
     }
diff --git a/src/main/java/htsjdk/samtools/util/PeekableIterator.java b/src/main/java/htsjdk/samtools/util/PeekableIterator.java
index 1587dd2..3df4c42 100644
--- a/src/main/java/htsjdk/samtools/util/PeekableIterator.java
+++ b/src/main/java/htsjdk/samtools/util/PeekableIterator.java
@@ -39,16 +39,19 @@ public class PeekableIterator<Object> implements CloseableIterator<Object> {
     }
 
     /** Closes the underlying iterator. */
+    @Override
     public void close() {
         CloserUtil.close(iterator);
     }
 
     /** True if there are more items, in which case both next() and peek() will return a value. */
+    @Override
     public boolean hasNext() {
         return this.nextObject != null;
     }
 
     /** Returns the next object and advances the iterator. */
+    @Override
     public Object next() {
         Object retval = this.nextObject;
         advance();
@@ -73,6 +76,7 @@ public class PeekableIterator<Object> implements CloseableIterator<Object> {
     }
 
     /** Unsupported Operation. */
+    @Override
     public void remove() {
         throw new UnsupportedOperationException("Not supported: remove");
     }
diff --git a/src/main/java/htsjdk/samtools/util/PositionalOutputStream.java b/src/main/java/htsjdk/samtools/util/PositionalOutputStream.java
index ef28be6..a4643db 100644
--- a/src/main/java/htsjdk/samtools/util/PositionalOutputStream.java
+++ b/src/main/java/htsjdk/samtools/util/PositionalOutputStream.java
@@ -41,20 +41,24 @@ public final class PositionalOutputStream extends OutputStream implements Locati
         this.out = out;
     }
 
+    @Override
     public final void write(final byte[] bytes) throws IOException {
         write(bytes, 0, bytes.length);
     }
 
+    @Override
     public final void write(final byte[] bytes, final int startIndex, final int numBytes) throws IOException {
         position += numBytes;
         out.write(bytes, startIndex, numBytes);
     }
 
+    @Override
     public final void write(final int c)  throws IOException {
         position++;
         out.write(c);
     }
 
+    @Override
     public final long getPosition() { return position; }
 
     @Override
diff --git a/src/main/java/htsjdk/samtools/util/QualityEncodingDetector.java b/src/main/java/htsjdk/samtools/util/QualityEncodingDetector.java
index b0a965c..0147daa 100644
--- a/src/main/java/htsjdk/samtools/util/QualityEncodingDetector.java
+++ b/src/main/java/htsjdk/samtools/util/QualityEncodingDetector.java
@@ -270,6 +270,7 @@ public class QualityEncodingDetector {
                 }
             }
 
+            @Override
             public boolean hasNext() {
                 // If this returns true, the head of the queue will have a next element
                 while (!queue.isEmpty()) {
@@ -281,6 +282,7 @@ public class QualityEncodingDetector {
                 return false;
             }
 
+            @Override
             public FastqRecord next() {
                 if (!hasNext()) throw new NoSuchElementException();
                 final Iterator<FastqRecord> i = queue.poll();
@@ -289,6 +291,7 @@ public class QualityEncodingDetector {
                 return result;
             }
 
+            @Override
             public void remove() {
                 throw new UnsupportedOperationException();
             }
diff --git a/src/main/java/htsjdk/samtools/util/SamLocusIterator.java b/src/main/java/htsjdk/samtools/util/SamLocusIterator.java
index 7a60756..dc6745f 100644
--- a/src/main/java/htsjdk/samtools/util/SamLocusIterator.java
+++ b/src/main/java/htsjdk/samtools/util/SamLocusIterator.java
@@ -268,13 +268,13 @@ public class SamLocusIterator extends AbstractLocusIterator<SamLocusIterator.Rec
         public List<RecordAndOffset> getInsertedInRecord() {
             return (insertedInRecord == null) ? Collections.emptyList() : Collections.unmodifiableList(insertedInRecord);
         }
-        
-        /** 
-         * @return the number of records overlapping the position, with deletions included if they are being tracked. 
+
+        /**
+         * @return the number of records overlapping the position, with deletions included if they are being tracked.
          */
         @Override
-        public int size() { 
-            return super.size() + ((deletedInRecord == null) ? 0 : deletedInRecord.size()); 
+        public int size() {
+            return super.size() + ((deletedInRecord == null) ? 0 : deletedInRecord.size());
         }
 
 
@@ -284,7 +284,7 @@ public class SamLocusIterator extends AbstractLocusIterator<SamLocusIterator.Rec
          */
         @Override
         public boolean isEmpty() {
-            return getRecordAndPositions().isEmpty() &&
+            return getRecordAndOffsets().isEmpty() &&
                     (deletedInRecord == null || deletedInRecord.isEmpty()) &&
                     (insertedInRecord == null || insertedInRecord.isEmpty());
         }
diff --git a/src/main/java/htsjdk/samtools/util/SamRecordIntervalIteratorFactory.java b/src/main/java/htsjdk/samtools/util/SamRecordIntervalIteratorFactory.java
index 5d173a5..5dd7589 100644
--- a/src/main/java/htsjdk/samtools/util/SamRecordIntervalIteratorFactory.java
+++ b/src/main/java/htsjdk/samtools/util/SamRecordIntervalIteratorFactory.java
@@ -107,6 +107,7 @@ public class SamRecordIntervalIteratorFactory {
          *
          * @return true if the iteration has more elements.  Otherwise returns false.
          */
+        @Override
         public boolean hasNext() {
             return next != null;
         }
@@ -117,6 +118,7 @@ public class SamRecordIntervalIteratorFactory {
          * @return the next element in the iteration
          * @throws java.util.NoSuchElementException
          */
+        @Override
         public SAMRecord next() {
             if (next == null) {
                 throw new NoSuchElementException("Iterator has no more elements.");
@@ -131,10 +133,12 @@ public class SamRecordIntervalIteratorFactory {
          *
          * @throws UnsupportedOperationException
          */
+        @Override
         public void remove() {
             throw new UnsupportedOperationException("Remove() not supported by FilteringSamIterator");
         }
 
+        @Override
         public void close() {
             CloserUtil.close(iterator);
         }
diff --git a/src/main/java/htsjdk/samtools/util/SequenceUtil.java b/src/main/java/htsjdk/samtools/util/SequenceUtil.java
index 3108cee..8e399c1 100644
--- a/src/main/java/htsjdk/samtools/util/SequenceUtil.java
+++ b/src/main/java/htsjdk/samtools/util/SequenceUtil.java
@@ -32,6 +32,7 @@ import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMSequenceDictionary;
 import htsjdk.samtools.SAMSequenceRecord;
 import htsjdk.samtools.SAMTag;
+import htsjdk.samtools.fastq.FastqConstants;
 
 import java.io.File;
 import java.math.BigInteger;
@@ -49,6 +50,25 @@ public class SequenceUtil {
 
     public static final byte[] VALID_BASES_UPPER = new byte[]{A, C, G, T};
     public static final byte[] VALID_BASES_LOWER = new byte[]{a, c, g, t};
+    private static final byte[] ACGTN_BASES = new byte[]{A, C, G, T, N};
+    private static final String IUPAC_CODES_STRING = ".aAbBcCdDgGhHkKmMnNrRsStTvVwWyY";
+    /**
+     * A set of bases supported by BAM in reads, see http://samtools.github.io/hts-specs/SAMv1.pdf chapter 4.2 on 'seq' field.
+     * Effectively these are upper cased IUPAC codes with equals sign ('=') and without dot ('.').
+     */
+    private static final byte[] BAM_READ_BASE_SET = "=ABCDGHKMNRSTVWY".getBytes();
+
+    /**
+     * A lookup table to find a corresponding BAM read base.
+     */
+    private static final byte[] bamReadBaseLookup = new byte[127];
+    static {
+        Arrays.fill(bamReadBaseLookup, N);
+        for (final byte base: BAM_READ_BASE_SET) {
+            bamReadBaseLookup[base] = base;
+            bamReadBaseLookup[base + 32] = base;
+        }
+    }
 
     private static final byte A_MASK = 1;
     private static final byte C_MASK = 2;
@@ -56,13 +76,13 @@ public class SequenceUtil {
     private static final byte T_MASK = 8;
 
     private static final byte[] bases = new byte[127];
-
+    private static final byte NON_IUPAC_CODE = 0;
     /*
      * Definition of IUPAC codes:
      * http://www.bioinformatics.org/sms2/iupac.html
      */
     static {
-        Arrays.fill(bases, (byte) 0);
+        Arrays.fill(bases, NON_IUPAC_CODE);
         bases[A] = A_MASK;
         bases[C] = C_MASK;
         bases[G] = G_MASK;
@@ -141,7 +161,24 @@ public class SequenceUtil {
         return false;
     }
 
-    /** Calculates the fraction of bases that are G/C in the sequence. */
+    /**
+     * Check if the given base is one of upper case ACGTN */
+    public static boolean isUpperACGTN(final byte base) {
+        return isValidBase(base, ACGTN_BASES);
+    }
+
+
+    /** Returns all IUPAC codes as a string */
+    public static String getIUPACCodesString() {
+        return IUPAC_CODES_STRING;
+    }
+
+    /** Checks if the given base is a IUPAC code */
+    public static boolean isIUPAC(final byte base) {
+        return bases[base] != NON_IUPAC_CODE;
+    }
+
+    /** Calculates the fraction of bases that are G/C in the sequence */
     public static double calculateGc(final byte[] bases) {
         int gcs = 0;
         for (int i = 0; i < bases.length; ++i) {
@@ -152,6 +189,18 @@ public class SequenceUtil {
         return gcs / (double) bases.length;
     }
 
+    /** Check if the given base belongs to BAM read base set '=ABCDGHKMNRSTVWY' */
+    public static boolean isBamReadBase(final byte base) {
+        return isValidBase(base, BAM_READ_BASE_SET);
+    }
+
+    /** Update and return the given array of bases by upper casing and then replacing all non-BAM read bases with N */
+    public static byte[] toBamReadBasesInPlace(final byte[] bases) {
+        for (int i = 0; i < bases.length; i++)
+            bases[i] = bamReadBaseLookup[bases[i]];
+        return bases;
+    }
+
     /**
      * default signature that forces the lists to be the same size
      *
@@ -620,32 +669,7 @@ public class SequenceUtil {
         }
     }
 
-    /** Reverses and complements the bases in place. */
-    public static void reverseComplement(final byte[] bases) {
-        final int lastIndex = bases.length - 1;
-
-        int i, j;
-        for (i = 0, j = lastIndex; i < j; ++i, --j) {
-            final byte tmp = complement(bases[i]);
-            bases[i] = complement(bases[j]);
-            bases[j] = tmp;
-        }
-        if (bases.length % 2 == 1) {
-            bases[i] = complement(bases[i]);
-        }
-    }
-
-    /** Reverses the quals in place. */
-    public static void reverseQualities(final byte[] quals) {
-        final int lastIndex = quals.length - 1;
 
-        int i, j;
-        for (i = 0, j = lastIndex; i < j; ++i, --j) {
-            final byte tmp = quals[i];
-            quals[i] = quals[j];
-            quals[j] = tmp;
-        }
-    }
 
     /**
      * Returns true if the bases are equal OR if the mismatch can be accounted for by
@@ -836,6 +860,16 @@ public class SequenceUtil {
         return ret;
     }
 
+    /** Reverses and complements the bases in place. */
+    public static void reverseComplement(final byte[] bases) {
+        reverseComplement(bases, 0, bases.length);
+    }
+
+    /** Reverses the quals in place. */
+    public static void reverseQualities(final byte[] quals) {
+        reverse(quals, 0, quals.length);
+    }
+
     public static void reverse(final byte[] array, final int offset, final int len) {
         final int lastIndex = len - 1;
 
@@ -1020,7 +1054,7 @@ public class SequenceUtil {
 
         // NOTE: the while loop isn't necessarily the most efficient way to handle this but we don't
         // expect this to ever happen more than once, just trapping pathological cases
-        while ((readName.endsWith("/1") || readName.endsWith("/2"))) {
+        while ((readName.endsWith(FastqConstants.FIRST_OF_PAIR) || readName.endsWith(FastqConstants.SECOND_OF_PAIR))) {
             // If this is an unpaired run we want to make sure that "/1" isn't tacked on the end of the read name,
             // as this can cause problems down the road (ex. in Picard's MergeBamAlignment).
             readName = readName.substring(0, readName.length() - 2);
diff --git a/src/main/java/htsjdk/samtools/util/SnappyLoader.java b/src/main/java/htsjdk/samtools/util/SnappyLoader.java
index 52abcef..746683c 100644
--- a/src/main/java/htsjdk/samtools/util/SnappyLoader.java
+++ b/src/main/java/htsjdk/samtools/util/SnappyLoader.java
@@ -23,105 +23,92 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.samtools.Defaults;
 import htsjdk.samtools.SAMException;
-import org.xerial.snappy.LoadSnappy;
+import org.xerial.snappy.SnappyError;
 import org.xerial.snappy.SnappyInputStream;
+import org.xerial.snappy.SnappyOutputStream;
 
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.lang.reflect.Constructor;
 
 /**
- * If Snappy is available, obtain single-arg ctors for SnappyInputStream and SnappyOutputStream.
+ * Checks if Snappy is available, and provides methods for wrapping InputStreams and OutputStreams with Snappy if so.
  */
 public class SnappyLoader {
     private static final int SNAPPY_BLOCK_SIZE = 32768;  // keep this as small as can be without hurting compression ratio.
-    private final Constructor<InputStream> SnappyInputStreamCtor;
-    private final Constructor<OutputStream> SnappyOutputStreamCtor;
-    public final boolean SnappyAvailable;
+    private static final Log logger = Log.getInstance(SnappyLoader.class);
 
-    // Force Snappy-java code to be loaded into executable jars.
-    private final SnappyInputStream ignoreMe = null;
+    private final boolean snappyAvailable;
 
-    // Force bcel to load Snappy.
-    //private static final Class SnappyClass = SnappyInputStream.class;
-
-    private static final boolean DefaultVerbosity = Boolean.valueOf(System.getProperty("snappy.loader.verbosity", "false"));
 
     public SnappyLoader() {
-        this(DefaultVerbosity);
+        this(Defaults.DISABLE_SNAPPY_COMPRESSOR);
     }
 
-    /**
-     * Constructs a new SnappyLoader which will check to see if snappy is available in the JVM/library path.
-     * @param verbose if true output a small number of debug messages to System.err
-     */
-    public SnappyLoader(final boolean verbose) {
-        Constructor<InputStream> inputStreamCtor = null;
-        Constructor<OutputStream> outputStreamCtor = null;
-        Class<Error> snappyErrorClass = null;
-
-        if (java.lang.Boolean.valueOf(System.getProperty("snappy.disable", "false"))) {
-            System.err.println("Snappy is disabled via system property.");
+    SnappyLoader(boolean disableSnappy) {
+        if (disableSnappy) {
+            logger.debug("Snappy is disabled via system property.");
+            snappyAvailable = false;
         }
         else {
-            try {
-                final Class<InputStream> snappyInputStreamClass = (Class<InputStream>)Class.forName("org.xerial.snappy.SnappyInputStream");
-                final Class<OutputStream> snappyOutputStreamClass = (Class<OutputStream>)Class.forName("org.xerial.snappy.SnappyOutputStream");
-                snappyErrorClass = (Class<Error>)Class.forName("org.xerial.snappy.SnappyError");
-                inputStreamCtor = snappyInputStreamClass.getConstructor(InputStream.class);
-                outputStreamCtor = snappyOutputStreamClass.getConstructor(OutputStream.class, Integer.TYPE);
+            boolean tmpSnappyAvailable = false;
+            try (final OutputStream test = new SnappyOutputStream(new ByteArrayOutputStream(1000))){
+                test.write("Hello World!".getBytes());
+                tmpSnappyAvailable = true;
+                logger.debug("Snappy successfully loaded.");
             }
-            catch (NoSuchMethodException e) { /* Do nothing. */ }
-            catch (ClassNotFoundException e) { /* Do nothing. */ }
-        }
-
-        this.SnappyInputStreamCtor = inputStreamCtor;
-        this.SnappyOutputStreamCtor = outputStreamCtor;
-
-        if (this.SnappyInputStreamCtor != null && this.SnappyOutputStreamCtor != null) {
-            // Don't try to call any Snappy code until classes have been found via reflection above.
-            boolean tmpSnappyAvailable;
-            try {
-                if (!LoadSnappy.load()) {
-                    if (verbose) System.err.println("Snappy dll failed to load.");
-                    tmpSnappyAvailable = false;
-                }
-                else {
-                    if (verbose) System.err.println("Snappy stream classes loaded.");
-                    tmpSnappyAvailable = true;
-                }
-            } catch (Error e) {
-                if (e.getClass().equals(snappyErrorClass)) {
-                    if (verbose) System.err.println("Snappy dll failed to load: " + e.getMessage());
-                    tmpSnappyAvailable = false;
-                } else {
-                    throw e;
-                }
+            /*
+             * ExceptionInInitializerError: thrown by Snappy if native libs fail to load.
+             * IllegalStateException: thrown within the `test.write` call above if no UTF-8 encoder is found.
+             * IOException: potentially thrown by the `test.write` and `test.close` calls.
+             * SnappyError: potentially thrown for a variety of reasons by Snappy.
+             */
+            catch (final ExceptionInInitializerError | IllegalStateException | IOException | SnappyError e) {
+                logger.warn(e, "Snappy native library failed to load.");
             }
-            SnappyAvailable = tmpSnappyAvailable;
-        }
-        else {
-            if (verbose) System.err.println("Snappy stream classes not loaded.");
-            SnappyAvailable = false;
+            snappyAvailable = tmpSnappyAvailable;
         }
     }
 
-    /** Wrap an InputStream in a SnappyInputStream. If Snappy is not available will throw an exception. */
+    /** Returns true if Snappy is available, false otherwise. */
+    public boolean isSnappyAvailable() { return snappyAvailable; }
+
+    /**
+     * Wrap an InputStream in a SnappyInputStream.
+     * @throws SAMException if Snappy is not available will throw an exception.
+     */
     public InputStream wrapInputStream(final InputStream inputStream) {
-        try {
-            return SnappyInputStreamCtor.newInstance(inputStream);
-        } catch (Exception e) {
-            throw new SAMException("Error instantiating SnappyInputStream", e);
-        }
+        return wrapWithSnappyOrThrow(inputStream, SnappyInputStream::new);
     }
 
-    /** Wrap an InputStream in a SnappyInputStream. If Snappy is not available will throw an exception. */
+    /**
+     * Wrap an OutputStream in a SnappyOutputStream.
+     * @throws SAMException if Snappy is not available
+     */
     public OutputStream wrapOutputStream(final OutputStream outputStream) {
-        try {
-            return SnappyOutputStreamCtor.newInstance(outputStream, SNAPPY_BLOCK_SIZE);
-        } catch (Exception e) {
-            throw new SAMException("Error instantiating SnappyOutputStream", e);
+        return wrapWithSnappyOrThrow(outputStream, (stream) -> new SnappyOutputStream(stream, SNAPPY_BLOCK_SIZE));
+    }
+
+    private interface IOFunction<T,R> {
+        R apply(T input) throws IOException;
+    }
+
+    private <T,R> R wrapWithSnappyOrThrow(T stream, IOFunction<T, R> wrapper){
+        if (isSnappyAvailable()) {
+            try {
+                return wrapper.apply(stream);
+            } catch (Exception e) {
+                throw new SAMException("Error wrapping stream with snappy", e);
+            }
+        } else {
+            final String errorMessage = Defaults.DISABLE_SNAPPY_COMPRESSOR
+                    ? "Cannot wrap stream with snappy compressor because snappy was disabled via the "
+                    + Defaults.DISABLE_SNAPPY_PROPERTY_NAME + " system property."
+                    : "Cannot wrap stream with snappy compressor because we could not load the snappy library.";
+            throw new SAMException(errorMessage);
         }
     }
 }
diff --git a/src/main/java/htsjdk/samtools/util/SortingCollection.java b/src/main/java/htsjdk/samtools/util/SortingCollection.java
index 6babd4e..69ce255 100644
--- a/src/main/java/htsjdk/samtools/util/SortingCollection.java
+++ b/src/main/java/htsjdk/samtools/util/SortingCollection.java
@@ -259,6 +259,7 @@ public class SortingCollection<T> implements Iterable<T> {
      * Prepare to iterate through the records in order.  This method may be called more than once,
      * but add() may not be called after this method has been called.
      */
+    @Override
     public CloseableIterator<T> iterator() {
         if (this.cleanedUp) {
             throw new IllegalStateException("Cannot call iterator() after cleanup() was called.");
@@ -354,14 +355,17 @@ public class SortingCollection<T> implements Iterable<T> {
                         SortingCollection.this.comparator);
         }
 
+        @Override
         public void close() {
             // nothing to do
         }
 
+        @Override
         public boolean hasNext() {
             return this.iterationIndex < SortingCollection.this.numRecordsInRam;
         }
 
+        @Override
         public T next() {
             if (!hasNext()) {
                 throw new NoSuchElementException();
@@ -372,6 +376,7 @@ public class SortingCollection<T> implements Iterable<T> {
             return ret;
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException();
         }
@@ -409,10 +414,12 @@ public class SortingCollection<T> implements Iterable<T> {
             }
         }
 
+        @Override
         public boolean hasNext() {
             return !this.queue.isEmpty();
         }
 
+        @Override
         public T next() {
             if (!hasNext()) {
                 throw new NoSuchElementException();
@@ -430,10 +437,12 @@ public class SortingCollection<T> implements Iterable<T> {
             return ret;
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException();
         }
 
+        @Override
         public void close() {
             while (!this.queue.isEmpty()) {
                 final PeekFileRecordIterator it = this.queue.pollFirst();
@@ -464,10 +473,12 @@ public class SortingCollection<T> implements Iterable<T> {
             }
         }
 
+        @Override
         public boolean hasNext() {
             return this.currentRecord != null;
         }
 
+        @Override
         public T next() {
             if (!hasNext()) {
                 throw new NoSuchElementException();
@@ -477,6 +488,7 @@ public class SortingCollection<T> implements Iterable<T> {
             return ret;
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException();
         }
@@ -485,6 +497,7 @@ public class SortingCollection<T> implements Iterable<T> {
             this.currentRecord = this.codec.decode();
         }
 
+        @Override
         public void close() {
             CloserUtil.close(this.is);
         }
@@ -505,6 +518,7 @@ public class SortingCollection<T> implements Iterable<T> {
     class PeekFileRecordIteratorComparator implements Comparator<PeekFileRecordIterator>, Serializable {
         private static final long serialVersionUID = 1L;
 
+        @Override
         public int compare(final PeekFileRecordIterator lhs, final PeekFileRecordIterator rhs) {
             final int result = comparator.compare(lhs.peek(), rhs.peek());
             if (result == 0) return lhs.n - rhs.n;
diff --git a/src/main/java/htsjdk/samtools/util/SortingLongCollection.java b/src/main/java/htsjdk/samtools/util/SortingLongCollection.java
index 4cf0c36..e75c336 100644
--- a/src/main/java/htsjdk/samtools/util/SortingLongCollection.java
+++ b/src/main/java/htsjdk/samtools/util/SortingLongCollection.java
@@ -336,6 +336,7 @@ public class SortingLongCollection {
     private static class PeekFileValueIteratorComparator implements Comparator<PeekFileValueIterator>, Serializable {
         private static final long serialVersionUID = 1L;
 
+        @Override
         public int compare(final PeekFileValueIterator it1, final PeekFileValueIterator it2) {
             if (it1.peek() < it2.peek()) {
                 return -1;
diff --git a/src/main/java/htsjdk/samtools/util/StringLineReader.java b/src/main/java/htsjdk/samtools/util/StringLineReader.java
index ed383a2..cac81e2 100644
--- a/src/main/java/htsjdk/samtools/util/StringLineReader.java
+++ b/src/main/java/htsjdk/samtools/util/StringLineReader.java
@@ -23,77 +23,18 @@
  */
 package htsjdk.samtools.util;
 
-import java.util.regex.Pattern;
+import java.io.ByteArrayInputStream;
 
 /**
  * Implementation of LineReader that gets its input from a String.  No charset conversion
  * is necessary because the String is in unicode.  Handles CR, LF or CRLF line termination,
  * but if asked to return the line terminator, it always comes back as LF.
+ *
+ * @deprecated use {@link BufferedLineReader#fromString(String)}.
  */
-public class StringLineReader implements LineReader {
-    private static final Pattern CRLF = Pattern.compile("\r\n");
-    private final String theString;
-    private int curPos = 0;
-    private int lineNumber = 0;
-
+ at Deprecated
+public class StringLineReader extends BufferedLineReader {
     public StringLineReader(final String s) {
-        // Simplify later processing by replacing crlf with just lf, and replacing solo cr with lf.
-        // Note that String.replace(String, String) causes a regex to be used, so precompilation should be
-        // the best we can do short of handling the string directly.
-        this.theString = CRLF.matcher(s).replaceAll("\n").replace('\r', '\n');
-    }
-
-    /**
-     * Read a line and remove the line terminator
-     */
-    public String readLine() {
-        return readLine(false);
-    }
-
-    /**
-     * Read a line and optionally include the line terminator
-     *
-     * @param includeTerminators
-     * @return the next line from the input, with \n terminator if present and requested, or null if no more input.
-     */
-    private String readLine(final boolean includeTerminators) {
-        if (curPos == theString.length()) {
-            return null;
-        }
-        final int nextLfIndex = theString.indexOf('\n', curPos);
-        if (nextLfIndex == -1) {
-            final int startPos = curPos;
-            curPos = theString.length();
-            ++lineNumber;
-            return theString.substring(startPos);
-        }
-        final int startPos = curPos;
-        final int endPos = nextLfIndex + (includeTerminators? 1: 0);
-        curPos = nextLfIndex + 1;
-        ++lineNumber;
-        return theString.substring(startPos, endPos);
-    }
-
-    /**
-     * @return 1-based number of line most recently read
-     */
-    public int getLineNumber() {
-        return lineNumber;
-    }
-
-    /**
-     * Non-destructive one-character look-ahead.
-     *
-     * @return If not eof, the next character that would be read.  If eof, -1.
-     */
-    public int peek() {
-        if (curPos == theString.length()) {
-            return -1;
-        }
-        return theString.charAt(curPos);
-    }
-
-    public void close() {
-        curPos = theString.length();
+        super(new ByteArrayInputStream(s.getBytes()));
     }
 }
diff --git a/src/main/java/htsjdk/samtools/util/StringUtil.java b/src/main/java/htsjdk/samtools/util/StringUtil.java
index 9049253..a885ba2 100644
--- a/src/main/java/htsjdk/samtools/util/StringUtil.java
+++ b/src/main/java/htsjdk/samtools/util/StringUtil.java
@@ -312,6 +312,9 @@ public class StringUtil {
         }
         return byteBuffer;
 */
+        if (s == null) {
+            return null;
+        }
         final byte[] byteBuffer = new byte[s.length()];
         s.getBytes(0, byteBuffer.length, byteBuffer, 0);
         return byteBuffer;
@@ -319,6 +322,9 @@ public class StringUtil {
 
     @SuppressWarnings("deprecation")
     public static byte[] stringToBytes(final String s, final int offset, final int length) {
+        if (s == null) {
+            return null;
+        }
         final byte[] byteBuffer = new byte[length];
         s.getBytes(offset, offset + length, byteBuffer, 0);
         return byteBuffer;
diff --git a/src/main/java/htsjdk/samtools/util/TempStreamFactory.java b/src/main/java/htsjdk/samtools/util/TempStreamFactory.java
index 0915037..d807d55 100644
--- a/src/main/java/htsjdk/samtools/util/TempStreamFactory.java
+++ b/src/main/java/htsjdk/samtools/util/TempStreamFactory.java
@@ -51,7 +51,7 @@ public class TempStreamFactory {
      */
     public InputStream wrapTempInputStream(final InputStream inputStream, final int bufferSize) {
         InputStream is = IOUtil.maybeBufferInputStream(inputStream, bufferSize);
-        if (getSnappyLoader().SnappyAvailable) {
+        if (getSnappyLoader().isSnappyAvailable()) {
             try {
                 return getSnappyLoader().wrapInputStream(is);
             } catch (Exception e) {
@@ -71,7 +71,7 @@ public class TempStreamFactory {
     public OutputStream wrapTempOutputStream(final OutputStream outputStream, final int bufferSize) {
         OutputStream os = outputStream;
         if (bufferSize > 0) os = new BufferedOutputStream(os, bufferSize);
-        if (getSnappyLoader().SnappyAvailable) {
+        if (getSnappyLoader().isSnappyAvailable()) {
             try {
                 os = getSnappyLoader().wrapOutputStream(os);
             } catch (Exception e) {
diff --git a/src/main/java/htsjdk/samtools/util/WholeGenomeReferenceSequenceMask.java b/src/main/java/htsjdk/samtools/util/WholeGenomeReferenceSequenceMask.java
index 1263285..b9ef975 100644
--- a/src/main/java/htsjdk/samtools/util/WholeGenomeReferenceSequenceMask.java
+++ b/src/main/java/htsjdk/samtools/util/WholeGenomeReferenceSequenceMask.java
@@ -41,6 +41,7 @@ public class WholeGenomeReferenceSequenceMask implements ReferenceSequenceMask {
     /**
      * @return true if the mask is set for the given sequence and position
      */
+    @Override
     public boolean get(final int sequenceIndex, final int position) {
         if (sequenceIndex < 0) {
             throw new IllegalArgumentException("Negative sequence index " + sequenceIndex);
@@ -55,6 +56,7 @@ public class WholeGenomeReferenceSequenceMask implements ReferenceSequenceMask {
     /**
      * @return the next pos on the given sequence >= position that is set, or -1 if there are no more set positions
      */
+    @Override
     public int nextPosition(final int sequenceIndex, final int position) {
         if (get(sequenceIndex, position + 1)) {
             return position + 1;
@@ -66,6 +68,7 @@ public class WholeGenomeReferenceSequenceMask implements ReferenceSequenceMask {
     /**
      * @return Largest sequence index for which there are set bits.
      */
+    @Override
     public int getMaxSequenceIndex() {
         return header.getSequenceDictionary().size() - 1;
     }
@@ -73,6 +76,7 @@ public class WholeGenomeReferenceSequenceMask implements ReferenceSequenceMask {
     /**
      * @return the largest position on the last sequence index
      */
+    @Override
     public int getMaxPosition() {
         SAMSequenceRecord lastSequenceRecord = header.getSequence(getMaxSequenceIndex());
         return lastSequenceRecord.getSequenceLength();
diff --git a/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java b/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java
index c15e7c6..fb3ac9a 100644
--- a/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java
+++ b/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java
@@ -40,9 +40,9 @@ public class DeflaterFactory {
      * Returns a deflater object that will be used when writing BAM files.
      * Subclasses may override to provide their own deflater implementation.
      * @param compressionLevel the compression level (0-9)
-     * @param nowrap if true then use GZIP compatible compression
+     * @param gzipCompatible if true then use GZIP compatible compression
      */
-    public Deflater makeDeflater(final int compressionLevel, final boolean nowrap) {
-        return new Deflater(compressionLevel, nowrap);
+    public Deflater makeDeflater(final int compressionLevel, final boolean gzipCompatible) {
+        return new Deflater(compressionLevel, gzipCompatible);
     }
 }
diff --git a/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java b/src/main/java/htsjdk/samtools/util/zip/InflaterFactory.java
similarity index 64%
copy from src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java
copy to src/main/java/htsjdk/samtools/util/zip/InflaterFactory.java
index c15e7c6..c03dc9a 100644
--- a/src/main/java/htsjdk/samtools/util/zip/DeflaterFactory.java
+++ b/src/main/java/htsjdk/samtools/util/zip/InflaterFactory.java
@@ -23,26 +23,27 @@
  */
 package htsjdk.samtools.util.zip;
 
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import java.util.zip.Deflater;
+import htsjdk.samtools.util.BlockGunzipper;
+import java.util.zip.Inflater;
 
 /**
- * Factory for {@link Deflater} objects used by {@link BlockCompressedOutputStream}.
- * This class may be extended to provide alternative deflaters (e.g., for improved performance).
+ * Factory for {@link Inflater} objects used by {@link BlockGunzipper}.
+ * This class may be extended to provide alternative inflaters (e.g., for improved performance).
+ * The default implementation returns a JDK {@link Inflater}
  */
-public class DeflaterFactory {
+public class InflaterFactory {
 
-    public DeflaterFactory() {
+    public InflaterFactory() {
         //Note: made explicit constructor to make searching for references easier
     }
 
     /**
-     * Returns a deflater object that will be used when writing BAM files.
-     * Subclasses may override to provide their own deflater implementation.
-     * @param compressionLevel the compression level (0-9)
-     * @param nowrap if true then use GZIP compatible compression
+     * Returns an inflater object that will be used when reading DEFLATE compressed files.
+     * Subclasses may override to provide their own inflater implementation.
+     * The default implementation returns a JDK {@link Inflater}
+     * @param gzipCompatible if true then use GZIP compatible compression
      */
-    public Deflater makeDeflater(final int compressionLevel, final boolean nowrap) {
-        return new Deflater(compressionLevel, nowrap);
+    public Inflater makeInflater(final boolean gzipCompatible) {
+        return new Inflater(gzipCompatible);
     }
 }
diff --git a/src/main/java/htsjdk/tribble/AbstractFeatureReader.java b/src/main/java/htsjdk/tribble/AbstractFeatureReader.java
index 80d9a6c..d65783f 100644
--- a/src/main/java/htsjdk/tribble/AbstractFeatureReader.java
+++ b/src/main/java/htsjdk/tribble/AbstractFeatureReader.java
@@ -25,11 +25,13 @@ import htsjdk.tribble.util.TabixUtils;
 import java.io.File;
 import java.io.IOException;
 import java.net.URI;
+import java.nio.channels.SeekableByteChannel;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Set;
+import java.util.function.Function;
 
 /**
  * jrobinso
@@ -43,6 +45,11 @@ public abstract class AbstractFeatureReader<T extends Feature, SOURCE> implement
     // the path to underlying data source
     String path;
 
+    // a wrapper to apply to the raw stream of the Feature file to allow features like prefetching and caching to be injected
+    final Function<SeekableByteChannel, SeekableByteChannel> wrapper;
+    // a wrapper to apply to the raw stream of the index file
+    final Function<SeekableByteChannel, SeekableByteChannel> indexWrapper;
+
     // the query source, codec, and header
     // protected final QuerySource querySource;
     protected final FeatureCodec<T, SOURCE> codec;
@@ -60,38 +67,51 @@ public abstract class AbstractFeatureReader<T extends Feature, SOURCE> implement
     }
 
     /**
-     * {@link #getFeatureReader(String, String, FeatureCodec, boolean)} with {@code null} for indexResource
+     * {@link #getFeatureReader(String, String, FeatureCodec, boolean, Function, Function)} with {@code null} for indexResource, wrapper, and indexWrapper
      * @throws TribbleException
      */
     public static <FEATURE extends Feature, SOURCE> AbstractFeatureReader<FEATURE, SOURCE> getFeatureReader(final String featureResource, final FeatureCodec<FEATURE, SOURCE> codec, final boolean requireIndex) throws TribbleException {
-        return getFeatureReader(featureResource, null, codec, requireIndex);
+        return getFeatureReader(featureResource, null, codec, requireIndex, null, null);
+    }
+
+
+    /**
+     * {@link #getFeatureReader(String, String, FeatureCodec, boolean, Function, Function)} with {@code null} for wrapper, and indexWrapper
+     * @throws TribbleException
+     */
+    public static <FEATURE extends Feature, SOURCE> AbstractFeatureReader<FEATURE, SOURCE> getFeatureReader(final String featureResource, String indexResource, final FeatureCodec<FEATURE, SOURCE> codec, final boolean requireIndex) throws TribbleException {
+        return getFeatureReader(featureResource, indexResource, codec, requireIndex, null, null);
     }
 
     /**
      *
      * @param featureResource the feature file to create from
      * @param indexResource   the index for the feature file. If null, will auto-generate (if necessary)
-     * @param codec
+     * @param codec           the codec to use to decode the individual features
      * @param requireIndex    whether an index is required for this file
-     * @return
+     * @param wrapper         a wrapper to apply to the byte stream from the featureResource allowing injecting features
+     *                        like caching and prefetching of the stream, may be null, will only be applied if featureResource
+     *                        is a uri representing a {@link java.nio.file.Path}
+     * @param indexWrapper    a wrapper to apply to the byte stream from the indexResource, may be null, will only be
+     *                        applied if indexResource is a uri representing a {@link java.nio.file.Path}
+     *
      * @throws TribbleException
      */
-    public static <FEATURE extends Feature, SOURCE> AbstractFeatureReader<FEATURE, SOURCE> getFeatureReader(final String featureResource, String indexResource, final FeatureCodec<FEATURE, SOURCE> codec, final boolean requireIndex) throws TribbleException {
-
+    public static <FEATURE extends Feature, SOURCE> AbstractFeatureReader<FEATURE, SOURCE> getFeatureReader(final String featureResource, String indexResource, final FeatureCodec<FEATURE, SOURCE> codec, final boolean requireIndex, Function<SeekableByteChannel, SeekableByteChannel> wrapper, Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) throws TribbleException {
         try {
             // Test for tabix index
             if (methods.isTabix(featureResource, indexResource)) {
                 if ( ! (codec instanceof AsciiFeatureCodec) )
                     throw new TribbleException("Tabix indexed files only work with ASCII codecs, but received non-Ascii codec " + codec.getClass().getSimpleName());
-                return new TabixFeatureReader<FEATURE, SOURCE>(featureResource, indexResource, (AsciiFeatureCodec) codec);
+                return new TabixFeatureReader<>(featureResource, indexResource, (AsciiFeatureCodec) codec, wrapper, indexWrapper);
             }
             // Not tabix => tribble index file (might be gzipped, but not block gzipped)
             else {
-                return new TribbleIndexedFeatureReader<FEATURE, SOURCE>(featureResource, indexResource, codec, requireIndex);
+                return new TribbleIndexedFeatureReader<>(featureResource, indexResource, codec, requireIndex, wrapper, indexWrapper);
             }
-        } catch (IOException e) {
+        } catch (final IOException e) {
             throw new TribbleException.MalformedFeatureFile("Unable to create BasicFeatureReader using feature file ", featureResource, e);
-        } catch (TribbleException e) {
+        } catch (final TribbleException e) {
             e.setSource(featureResource);
             throw e;
         }
@@ -108,16 +128,24 @@ public abstract class AbstractFeatureReader<T extends Feature, SOURCE> implement
      */
     public static <FEATURE extends Feature, SOURCE> AbstractFeatureReader<FEATURE, SOURCE> getFeatureReader(final String featureResource, final FeatureCodec<FEATURE, SOURCE>  codec, final Index index) throws TribbleException {
         try {
-            return new TribbleIndexedFeatureReader<FEATURE, SOURCE>(featureResource, codec, index);
-        } catch (IOException e) {
+            return new TribbleIndexedFeatureReader<>(featureResource, codec, index);
+        } catch (final IOException e) {
             throw new TribbleException.MalformedFeatureFile("Unable to create AbstractFeatureReader using feature file ", featureResource, e);
         }
 
     }
 
     protected AbstractFeatureReader(final String path, final FeatureCodec<T, SOURCE> codec) {
+        this(path, codec, null, null);
+    }
+
+    protected AbstractFeatureReader(final String path, final FeatureCodec<T, SOURCE> codec,
+                                    final Function<SeekableByteChannel, SeekableByteChannel> wrapper,
+                                    final Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) {
         this.path = path;
         this.codec = codec;
+        this.wrapper = wrapper;
+        this.indexWrapper = indexWrapper;
     }
 
     /**
@@ -169,25 +197,30 @@ public abstract class AbstractFeatureReader<T extends Feature, SOURCE> implement
      *
      * @return the header object we've read-in
      */
+    @Override
     public Object getHeader() {
         return header.getHeaderValue();
     }
 
     static class EmptyIterator<T extends Feature> implements CloseableTribbleIterator<T> {
-        public Iterator iterator() { return this; }
-        public boolean hasNext() { return false; }
-        public T next() { return null; }
-        public void remove() { }
+        @Override public Iterator<T> iterator() { return this; }
+        @Override public boolean hasNext() { return false; }
+        @Override public T next() { return null; }
+        @Override public void remove() { }
         @Override public void close() { }
     }
 
+    public static boolean isTabix(String resourcePath, String indexPath) throws IOException {
+        if(indexPath == null){
+            indexPath = ParsingUtils.appendToPath(resourcePath, TabixUtils.STANDARD_INDEX_EXTENSION);
+        }
+        return hasBlockCompressedExtension(resourcePath) && ParsingUtils.resourceExists(indexPath);
+    }
+
     public static class ComponentMethods{
 
         public boolean isTabix(String resourcePath, String indexPath) throws IOException{
-            if(indexPath == null){
-                indexPath = ParsingUtils.appendToPath(resourcePath, TabixUtils.STANDARD_INDEX_EXTENSION);
-            }
-            return hasBlockCompressedExtension(resourcePath) && ParsingUtils.resourceExists(indexPath);
+            return AbstractFeatureReader.isTabix(resourcePath, indexPath);
         }
     }
 }
diff --git a/src/main/java/htsjdk/tribble/AsciiFeatureCodec.java b/src/main/java/htsjdk/tribble/AsciiFeatureCodec.java
index 141b764..c258576 100644
--- a/src/main/java/htsjdk/tribble/AsciiFeatureCodec.java
+++ b/src/main/java/htsjdk/tribble/AsciiFeatureCodec.java
@@ -18,8 +18,10 @@
 
 package htsjdk.tribble;
 
+import htsjdk.samtools.util.BlockCompressedInputStream;
 import htsjdk.samtools.util.CloserUtil;
 import htsjdk.samtools.util.LocationAware;
+import htsjdk.samtools.util.Log;
 import htsjdk.tribble.readers.*;
 
 import java.io.IOException;
@@ -34,6 +36,7 @@ import java.io.InputStream;
  * @param <T> The feature type this codec reads
  */
 public abstract class AsciiFeatureCodec<T extends Feature> extends AbstractFeatureCodec<T, LineIterator> {
+    private static final Log log = Log.getInstance(AsciiFeatureCodec.class);
     protected AsciiFeatureCodec(final Class<T> myClass) {
         super(myClass);
     }
@@ -49,14 +52,8 @@ public abstract class AsciiFeatureCodec<T extends Feature> extends AbstractFeatu
     }
 
     @Override
-    public LocationAware makeIndexableSourceFromStream(final InputStream bufferedInputStream) {
-        final PositionalBufferedStream pbs;
-        if (bufferedInputStream instanceof PositionalBufferedStream) {
-            pbs = (PositionalBufferedStream) bufferedInputStream;
-        } else {
-            pbs = new PositionalBufferedStream(bufferedInputStream);
-        }
-        return new AsciiLineReaderIterator(new AsciiLineReader(pbs));
+    public LocationAware makeIndexableSourceFromStream(final InputStream inputStream) {
+        return new AsciiLineReaderIterator(AsciiLineReader.from(inputStream));
     }
 
     @Override
diff --git a/src/main/java/htsjdk/tribble/Feature.java b/src/main/java/htsjdk/tribble/Feature.java
index 941790f..9ed852b 100644
--- a/src/main/java/htsjdk/tribble/Feature.java
+++ b/src/main/java/htsjdk/tribble/Feature.java
@@ -27,13 +27,14 @@ package htsjdk.tribble;
 import htsjdk.samtools.util.Locatable;
 
 /**
- * Represents a locus on a reference sequence.   All Features are expected to return 1-based closed-ended intervals.
+ * Marker interface for Locatables with Tribble support. A Feature represents a record in a tribble-supported file format.
+ * As {@link Locatable}, represents a locus on a reference sequence and is expected to return 1-based closed-ended intervals.
  */
 public interface Feature extends Locatable {
 
     /**
      * Return the features reference sequence name, e.g chromosome or contig
-     * @deprecated use getContig() instead
+     * @deprecated on 03/2015. Use getContig() instead.
      */
     @Deprecated
     default public String getChr() {
diff --git a/src/main/java/htsjdk/tribble/FeatureCodec.java b/src/main/java/htsjdk/tribble/FeatureCodec.java
index f14191a..e33d21b 100644
--- a/src/main/java/htsjdk/tribble/FeatureCodec.java
+++ b/src/main/java/htsjdk/tribble/FeatureCodec.java
@@ -27,13 +27,28 @@ import java.io.InputStream;
 /**
  * The base interface for classes that read in features.
  * <p/>
- * FeatureCodecs have to implement two key methods:
+ * FeatureCodecs must implement several key methods:
  * <p/>
- * {@link #readHeader(SOURCE)} - Reads the header, provided a {@link SOURCE} pointing at the beginning of the source input.
- * {@link #decode(SOURCE)} - Reads a {@link Feature} record, provided a {@link SOURCE} pointing at the beginning of a record within the 
- * source input.
+ * <ul>
+ * <li>{@link #makeSourceFromStream} Return a {@link SOURCE} for this {@link FeatureCodec} given an input stream that is buffered.
+ * <li>{@link #makeIndexableSourceFromStream} Return a {@link SOURCE} for this {@link FeatureCodec} that implements {@link LocationAware},
+ * and is thus suitable for use during indexing. During the indexing process, the indexer passes the {@link SOURCE} to the codec
+ * to consume Features from the underlying {@link SOURCE}, one at a time, recording the Feature location via the {@link SOURCE}'s
+ * {@link LocationAware} interface. Therefore, it is essential that the {@link SOURCE} implementation, the {@link #readHeader}
+ * method, and the {@link #decodeLoc} method, not introduce any buffering that would that would advance the {@link SOURCE}
+ * more than a single feature (or the more than the size of the header, in the case of {@link #readHeader}). Otherwise the
+ * index will be corrupt.
+ * <li>{@link #readHeader} - Reads the header, provided a {@link SOURCE} pointing at the beginning of the source input.
+ * The implementation of this method must not consume any input from the underlying SOURCE beyond the end of the header.
+ * <li>{@link #decode} - Reads a {@link Feature} record, provided a {@link SOURCE} pointing at the beginning of a
+ * record within the source input.
+ * <li>{@link #decodeLoc} - Reads a {@link Feature} record, provided a {@link SOURCE} pointing at the beginning of a
+ * record within the source input. The implementation of this method must not consume any input from the underlying stream
+ * beyond the end of the {@link Feature} returned.
+ * </ul>
  * <p/>
- * Note that it's not safe to carry state about the {@link SOURCE} within the codec.  There's no guarantee about its  state between calls.
+ * Note that it's not safe to carry state about the {@link SOURCE} within the codec.  There's no guarantee about its
+ * state between calls.
  *
  * @param <FEATURE_TYPE> The type of {@link Feature} this codec generates
  * @param <SOURCE> The type of the data source this codec reads from
@@ -88,14 +103,22 @@ public interface FeatureCodec<FEATURE_TYPE extends Feature, SOURCE> {
     public SOURCE makeSourceFromStream(final InputStream bufferedInputStream);
 
     /**
-     * Generates a {@link LocationAware} reader of type {@link SOURCE}.  Like {@link #makeSourceFromStream(java.io.InputStream)}, except
+     * Return a {@link SOURCE} for this {@link FeatureCodec} that implements {@link LocationAware},
+     * and is thus suitable for use during indexing. Like {@link #makeSourceFromStream(java.io.InputStream)}, except
      * the {@link LocationAware} compatibility is required for creating indexes.
-     * 
+     * </p>
      * Implementers of this method must return a type that is both {@link LocationAware} as well as {@link SOURCE}.  Note that this 
      * requirement cannot be enforced via the method signature due to limitations in Java's generic typing system.  Instead, consumers
      * should cast the call result into a {@link SOURCE} when applicable.
+     *</p>
+     * NOTE: During the indexing process, the indexer passes the {@link SOURCE} to the codec
+     * to consume Features from the underlying {@link SOURCE}, one at a time, recording the Feature location via the {@link SOURCE}'s
+     * {@link LocationAware} interface. Therefore, it is essential that the {@link SOURCE} implementation, the {@link #readHeader}
+     * method, and the {@link #decodeLoc} method, which are used during indexing, not introduce any buffering that would that
+     * would advance the {@link SOURCE} more than a single feature (or the more than the size of the header, in the case of
+     * {@link #readHeader}).
      */
-    public LocationAware makeIndexableSourceFromStream(final InputStream bufferedInputStream);
+    public LocationAware makeIndexableSourceFromStream(final InputStream inputStream);
 
     /** Adapter method that assesses whether the provided {@link SOURCE} has more data. True if it does, false otherwise. */
     public boolean isDone(final SOURCE source);
@@ -125,7 +148,7 @@ public interface FeatureCodec<FEATURE_TYPE extends Feature, SOURCE> {
      * Define the tabix format for the feature, used for indexing. Default implementation throws an exception.
      *
      * Note that only {@link AsciiFeatureCodec} could read tabix files as defined in
-     * {@link AbstractFeatureReader#getFeatureReader(String, String, FeatureCodec, boolean)}
+     * {@link AbstractFeatureReader#getFeatureReader(String, String, FeatureCodec, boolean, java.util.function.Function, java.util.function.Function)}
      *
      * @return the format to use with tabix
      * @throws TribbleException if the format is not defined
diff --git a/src/main/java/htsjdk/tribble/FeatureReader.java b/src/main/java/htsjdk/tribble/FeatureReader.java
index 3471393..c7773a2 100644
--- a/src/main/java/htsjdk/tribble/FeatureReader.java
+++ b/src/main/java/htsjdk/tribble/FeatureReader.java
@@ -32,6 +32,7 @@ public interface FeatureReader<T extends Feature> extends Closeable {
 
     public CloseableTribbleIterator<T> iterator() throws IOException;
 
+    @Override
     public void close() throws IOException;
 
     public List<String> getSequenceNames();
diff --git a/src/main/java/htsjdk/tribble/SimpleFeature.java b/src/main/java/htsjdk/tribble/SimpleFeature.java
index ddc62fa..0365dc5 100644
--- a/src/main/java/htsjdk/tribble/SimpleFeature.java
+++ b/src/main/java/htsjdk/tribble/SimpleFeature.java
@@ -39,14 +39,17 @@ public class SimpleFeature implements Feature {
         this.end = end;
     }
 
+    @Override
     public String getContig() {
         return contig;
     }
 
+    @Override
     public int getStart() {
         return start;
     }
 
+    @Override
     public int getEnd() {
         return end;
     }
diff --git a/src/main/java/htsjdk/tribble/TabixFeatureReader.java b/src/main/java/htsjdk/tribble/TabixFeatureReader.java
index 5d90295..e722433 100644
--- a/src/main/java/htsjdk/tribble/TabixFeatureReader.java
+++ b/src/main/java/htsjdk/tribble/TabixFeatureReader.java
@@ -30,9 +30,11 @@ import htsjdk.tribble.util.ParsingUtils;
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.channels.SeekableByteChannel;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
+import java.util.function.Function;
 
 /**
  * @author Jim Robinson
@@ -50,10 +52,7 @@ public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatu
      * @throws IOException
      */
     public TabixFeatureReader(final String featureFile, final AsciiFeatureCodec codec) throws IOException {
-        super(featureFile, codec);
-        tabixReader = new TabixReader(featureFile);
-        sequenceNames = new ArrayList<String>(tabixReader.getChromosomes());
-        readHeader();
+        this(featureFile, null, codec, null, null);
     }
 
     /**
@@ -64,9 +63,25 @@ public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatu
      * @throws IOException
      */
     public TabixFeatureReader(final String featureFile, final String indexFile, final AsciiFeatureCodec codec) throws IOException {
-        super(featureFile, codec);
-        tabixReader = new TabixReader(featureFile, indexFile);
-        sequenceNames = new ArrayList<String>(tabixReader.getChromosomes());
+        this(featureFile, indexFile, codec, null, null);
+    }
+
+    /**
+     *
+     * @param featureFile     path to a feature file. Can be a local file, http url, or ftp url
+     * @param indexFile       path to the index file.
+     * @param wrapper         a wrapper to apply to the byte stream from the featureResource allowing injecting features
+     *                        like caching and prefetching of the stream, may be null, will only be applied if featureFile
+     *                        is a uri representing a {@link java.nio.file.Path}
+     * @param indexWrapper    a wrapper to apply to the byte stream from the indexResource, may be null, will only be
+     *                        applied if indexFile is a uri representing a {@link java.nio.file.Path}
+     */
+    public TabixFeatureReader(final String featureFile, final String indexFile, final AsciiFeatureCodec codec,
+                              final Function<SeekableByteChannel, SeekableByteChannel> wrapper,
+                              final Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) throws IOException {
+        super(featureFile, codec, wrapper, indexWrapper);
+        tabixReader = new TabixReader(featureFile, indexFile, wrapper, indexWrapper);
+        sequenceNames = new ArrayList<>(tabixReader.getChromosomes());
         readHeader();
     }
 
@@ -80,7 +95,7 @@ public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatu
     private void readHeader() throws IOException {
         SOURCE source = null;
         try {
-            source = codec.makeSourceFromStream(new PositionalBufferedStream(new BlockCompressedInputStream(ParsingUtils.openInputStream(path))));
+            source = codec.makeSourceFromStream(new PositionalBufferedStream(new BlockCompressedInputStream(ParsingUtils.openInputStream(path, wrapper))));
             header = codec.readHeader(source);
         } catch (Exception e) {
             throw new TribbleException.MalformedFeatureFile("Unable to parse header with error: " + e.getMessage(), path, e);
@@ -97,6 +112,7 @@ public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatu
     }
 
 
+    @Override
     public List<String> getSequenceNames() {
         return sequenceNames;
     }
@@ -110,6 +126,7 @@ public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatu
      * @return
      * @throws IOException
      */
+    @Override
     public CloseableTribbleIterator<T> query(final String chr, final int start, final int end) throws IOException {
         final List<String> mp = getSequenceNames();
         if (mp == null) throw new TribbleException.TabixReaderFailure("Unable to find sequence named " + chr +
@@ -121,13 +138,15 @@ public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatu
         return new FeatureIterator<T>(lineReader, start - 1, end);
     }
 
+    @Override
     public CloseableTribbleIterator<T> iterator() throws IOException {
-        final InputStream is = new BlockCompressedInputStream(ParsingUtils.openInputStream(path));
+        final InputStream is = new BlockCompressedInputStream(ParsingUtils.openInputStream(path, wrapper));
         final PositionalBufferedStream stream = new PositionalBufferedStream(is);
         final LineReader reader = new SynchronousLineReader(stream);
         return new FeatureIterator<T>(reader, 0, Integer.MAX_VALUE);
     }
 
+    @Override
     public void close() throws IOException {
         tabixReader.close();
     }
@@ -184,10 +203,12 @@ public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatu
         }
 
 
+        @Override
         public boolean hasNext() {
             return currentRecord != null;
         }
 
+        @Override
         public T next() {
             T ret = currentRecord;
             try {
@@ -200,14 +221,17 @@ public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatu
 
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException("Remove is not supported in Iterators");
         }
 
+        @Override
         public void close() {
             lineReader.close();
         }
 
+        @Override
         public Iterator<T> iterator() {
             return this;
         }
diff --git a/src/main/java/htsjdk/tribble/Tribble.java b/src/main/java/htsjdk/tribble/Tribble.java
index 468f55d..f2c07a2 100644
--- a/src/main/java/htsjdk/tribble/Tribble.java
+++ b/src/main/java/htsjdk/tribble/Tribble.java
@@ -27,6 +27,7 @@ import htsjdk.tribble.util.ParsingUtils;
 import htsjdk.tribble.util.TabixUtils;
 
 import java.io.File;
+import java.nio.file.Path;
 
 /**
  * Common, tribble wide constants and static functions
@@ -37,9 +38,9 @@ public class Tribble {
     public final static String STANDARD_INDEX_EXTENSION = ".idx";
 
     /**
-     * Return the name of the index file for the provided vcf {@code filename}
+     * Return the name of the index file for the provided {@code filename}
      * Does not actually create an index
-     * @param filename  name of the vcf file
+     * @param filename  name of the file
      * @return non-null String representing the index filename
      */
     public static String indexFile(final String filename) {
@@ -47,9 +48,9 @@ public class Tribble {
     }
 
     /**
-     * Return the File of the index file for the provided vcf {@code file}
+     * Return the File of the index file for the provided {@code file}
      * Does not actually create an index
-     * @param file  the vcf file
+     * @param file  the file
      * @return a non-null File representing the index
      */
     public static File indexFile(final File file) {
@@ -57,9 +58,19 @@ public class Tribble {
     }
 
     /**
-     * Return the name of the tabix index file for the provided vcf {@code filename}
+     * Return the name of the index file for the provided {@code path}
      * Does not actually create an index
-     * @param filename  name of the vcf file
+     * @param path the path
+     * @return Path representing the index filename
+     */
+    public static Path indexPath(final Path path) {
+        return path.getFileSystem().getPath(indexFile(path.toAbsolutePath().toString()));
+    }
+
+    /**
+     * Return the name of the tabix index file for the provided {@code filename}
+     * Does not actually create an index
+     * @param filename  name of the file
      * @return non-null String representing the index filename
      */
     public static String tabixIndexFile(final String filename) {
@@ -67,9 +78,9 @@ public class Tribble {
     }
 
     /**
-     * Return the File of the tabix index file for the provided vcf {@code file}
+     * Return the File of the tabix index file for the provided {@code file}
      * Does not actually create an index
-     * @param file  the vcf file
+     * @param file  the file
      * @return a non-null File representing the index
      */
     public static File tabixIndexFile(final File file) {
@@ -77,9 +88,19 @@ public class Tribble {
     }
 
     /**
-     * Return the name of the index file for the provided vcf {@code filename} and {@code extension}
+     * Return the name of the tabix index file for the provided {@code path}
+     * Does not actually create an index
+     * @param path the path
+     * @return Path representing the index filename
+     */
+    public static Path tabixIndexPath(final Path path) {
+        return path.getFileSystem().getPath(tabixIndexFile(path.toAbsolutePath().toString()));
+    }
+
+    /**
+     * Return the name of the index file for the provided {@code filename} and {@code extension}
      * Does not actually create an index
-     * @param filename  name of the vcf file
+     * @param filename  name of the file
      * @param extension the extension to use for the index
      * @return non-null String representing the index filename
      */
@@ -88,9 +109,9 @@ public class Tribble {
     }
 
     /**
-     * Return the File of the index file for the provided vcf {@code file} and {@code extension}
+     * Return the File of the index file for the provided {@code file} and {@code extension}
      * Does not actually create an index
-     * @param file  the vcf file
+     * @param file  the file
      * @param extension the extension to use for the index
      * @return a non-null File representing the index
      */
diff --git a/src/main/java/htsjdk/tribble/TribbleException.java b/src/main/java/htsjdk/tribble/TribbleException.java
index 86202eb..abcbc25 100644
--- a/src/main/java/htsjdk/tribble/TribbleException.java
+++ b/src/main/java/htsjdk/tribble/TribbleException.java
@@ -54,6 +54,7 @@ public class TribbleException extends RuntimeException {
      * override the default message with ours, which attaches the source file in question
      * @return a string with our internal error, along with the causitive source file (or other input source)
      */
+    @Override
     public String getMessage() {
         String ret = super.getMessage();
         if ( source != null )
@@ -125,6 +126,13 @@ public class TribbleException extends RuntimeException {
         }
     }
 
+    public static class CorruptedIndexFile extends TribbleException {
+        public CorruptedIndexFile(String message, String f, Exception e) {
+            super(message,e);
+            setSource(f);
+        }
+    }
+
     public static class TabixReaderFailure extends TribbleException {
         public TabixReaderFailure(String message, String f, Exception e) {
             super(message,e);
diff --git a/src/main/java/htsjdk/tribble/TribbleIndexedFeatureReader.java b/src/main/java/htsjdk/tribble/TribbleIndexedFeatureReader.java
index 514782d..8ff9c14 100644
--- a/src/main/java/htsjdk/tribble/TribbleIndexedFeatureReader.java
+++ b/src/main/java/htsjdk/tribble/TribbleIndexedFeatureReader.java
@@ -33,15 +33,15 @@ import htsjdk.tribble.readers.PositionalBufferedStream;
 import htsjdk.tribble.util.ParsingUtils;
 
 import java.io.BufferedInputStream;
-import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
-import java.net.URISyntaxException;
 import java.net.URLEncoder;
+import java.nio.channels.SeekableByteChannel;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
+import java.util.function.Function;
 import java.util.zip.GZIPInputStream;
 
 /**
@@ -80,8 +80,13 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
      * @throws IOException
      */
     public TribbleIndexedFeatureReader(final String featurePath, final FeatureCodec<T, SOURCE> codec, final boolean requireIndex) throws IOException {
+        this(featurePath, codec, requireIndex, null, null);
+    }
 
-        super(featurePath, codec);
+    public TribbleIndexedFeatureReader(final String featurePath, final FeatureCodec<T, SOURCE> codec, final boolean requireIndex,
+                                       Function<SeekableByteChannel, SeekableByteChannel> wrapper,
+                                       Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) throws IOException {
+        super(featurePath, codec, wrapper, indexWrapper);
 
         if (requireIndex) {
             this.loadIndex();
@@ -104,9 +109,23 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
      * @throws IOException
      */
     public TribbleIndexedFeatureReader(final String featureFile, final String indexFile, final FeatureCodec<T, SOURCE> codec, final boolean requireIndex) throws IOException {
-        this(featureFile, codec, false); // required to read the header
+        this(featureFile, indexFile, codec, requireIndex, null, null);
+    }
+
+        /**
+         * @param featureFile  - path to the feature file, can be a local file path, http url, or ftp url, or any other
+         *                     uri supported by a {@link java.nio.file.Path} plugin
+         * @param indexFile    - path to the index file
+         * @param codec        - codec to decode the features
+         * @param requireIndex - true if the reader will be queries for specific ranges.  An index (idx) file must exist
+         * @throws IOException
+         */
+    public TribbleIndexedFeatureReader(final String featureFile, final String indexFile, final FeatureCodec<T, SOURCE> codec, final boolean requireIndex,
+                                       Function<SeekableByteChannel, SeekableByteChannel> wrapper,
+                                       Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) throws IOException {
+        this(featureFile, codec, false, wrapper, indexWrapper); // required to read the header
         if (indexFile != null && ParsingUtils.resourceExists(indexFile)) {
-            index = IndexFactory.loadIndex(indexFile);
+            index = IndexFactory.loadIndex(indexFile, indexWrapper);
             this.needCheckForIndex = false;
         } else {
             if (requireIndex) {
@@ -118,6 +137,8 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
         }
     }
 
+
+
     /**
      * @param featureFile - path to the feature file, can be a local file path, http url, or ftp url
      * @param codec       - codec to decode the features
@@ -139,12 +160,12 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
     private void loadIndex() throws IOException{
         String indexFile = Tribble.indexFile(this.path);
         if (ParsingUtils.resourceExists(indexFile)) {
-            index = IndexFactory.loadIndex(indexFile);
+            index = IndexFactory.loadIndex(indexFile, indexWrapper);
         } else {
             // See if the index itself is gzipped
             indexFile = ParsingUtils.appendToPath(indexFile, ".gz");
             if (ParsingUtils.resourceExists(indexFile)) {
-                index = IndexFactory.loadIndex(indexFile);
+                index = IndexFactory.loadIndex(indexFile, indexWrapper);
             }
         }
         this.needCheckForIndex = false;
@@ -164,11 +185,11 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
         final SeekableStream result;
         if (reuseStreamInQuery()) {
             // if the stream points to an underlying file, only create the underlying seekable stream once
-            if (seekableStream == null) seekableStream = SeekableStreamFactory.getInstance().getStreamFor(path);
+            if (seekableStream == null) seekableStream = SeekableStreamFactory.getInstance().getStreamFor(path, wrapper);
             result = seekableStream;
         } else {
             // we are not reusing the stream, so make a fresh copy each time we request it
-            result = SeekableStreamFactory.getInstance().getStreamFor(path);
+            result = SeekableStreamFactory.getInstance().getStreamFor(path, wrapper);
         }
 
         return result;
@@ -183,6 +204,7 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
         return pathIsRegularFile;
     }
 
+    @Override
     public void close() throws IOException {
         // close the seekable stream if that's necessary
         if (seekableStream != null) seekableStream.close();
@@ -193,6 +215,7 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
      *
      * @return list of strings of the contig names
      */
+    @Override
     public List<String> getSequenceNames() {
         return !this.hasIndex() ? new ArrayList<String>() : new ArrayList<String>(index.getSequenceNames());
     }
@@ -218,8 +241,9 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
         InputStream is = null;
         PositionalBufferedStream pbs = null;
         try {
-            is = ParsingUtils.openInputStream(path);
+            is = ParsingUtils.openInputStream(path, wrapper);
             if (hasBlockCompressedExtension(new URI(URLEncoder.encode(path, "UTF-8")))) {
+                // TODO: TEST/FIX THIS! https://github.com/samtools/htsjdk/issues/944
                 // TODO -- warning I don't think this can work, the buffered input stream screws up position
                 is = new GZIPInputStream(new BufferedInputStream(is));
             }
@@ -252,6 +276,7 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
      * @return an iterator of records in this interval
      * @throws IOException
      */
+    @Override
     public CloseableTribbleIterator<T> query(final String chr, final int start, final int end) throws IOException {
 
         if (!this.hasIndex()) {
@@ -271,36 +296,12 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
      * @return Return an iterator to iterate over the entire file
      * @throws IOException
      */
+    @Override
     public CloseableTribbleIterator<T> iterator() throws IOException {
         return new WFIterator();
     }
 
     /**
-     * @deprecated use {@link #hasBlockCompressedExtension(String)} instead
-     */
-    //Visible for testing
-    @Deprecated
-    static boolean isGZIPPath(final String path) {
-        if (path.toLowerCase().endsWith(".gz")) {
-            return true;
-        }
-        else {
-            String uriPath = null;
-            try {
-                URI uri = new URI(path);
-                if (uri != null) {
-                    uriPath = uri.getPath();
-                    return uriPath != null && uriPath.toLowerCase().endsWith(".gz");
-                }
-                return false;
-            }
-            catch (URISyntaxException e) {
-                return false;
-            }
-        }
-    }
-
-    /**
      * Class to iterator over an entire file.
      */
     class WFIterator implements CloseableTribbleIterator<T> {
@@ -313,7 +314,7 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
          * @throws IOException
          */
         public WFIterator() throws IOException {
-            final InputStream inputStream = ParsingUtils.openInputStream(path);
+            final InputStream inputStream = ParsingUtils.openInputStream(path, wrapper);
 
             final PositionalBufferedStream pbs;
             if (hasBlockCompressedExtension(path)) {
@@ -324,9 +325,9 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
             } else {
                 pbs = new PositionalBufferedStream(inputStream, 512000);
             }
-            /**
+            /*
              * The header was already read from the original source in the constructor; don't read it again, since some codecs keep state
-             * about its initializagtion.  Instead, skip that part of the stream.
+             * about its initialization.  Instead, skip that part of the stream.
              */
             pbs.skip(header.getHeaderEnd());
             source = codec.makeSourceFromStream(pbs);
@@ -424,10 +425,12 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
         }
 
 
+        @Override
         public boolean hasNext() {
             return currentRecord != null;
         }
 
+        @Override
         public T next() {
             final T ret = currentRecord;
             try {
@@ -511,11 +514,13 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
         }
 
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException("Remove is not supported.");
         }
 
 
+        @Override
         public void close() {
             // Note that this depends on BlockStreamWrapper not actually closing the underlying stream
             codec.close(source);
@@ -529,6 +534,7 @@ public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends Abst
             }
         }
 
+        @Override
         public Iterator<T> iterator() {
             return this;
         }
diff --git a/src/main/java/htsjdk/tribble/bed/BEDCodec.java b/src/main/java/htsjdk/tribble/bed/BEDCodec.java
index ea1e889..2ee4be7 100644
--- a/src/main/java/htsjdk/tribble/bed/BEDCodec.java
+++ b/src/main/java/htsjdk/tribble/bed/BEDCodec.java
@@ -76,8 +76,8 @@ public class BEDCodec extends AsciiFeatureCodec<BEDFeature> {
         if (line.trim().isEmpty()) {
             return null;
         }
-
-        if (line.startsWith("#") || line.startsWith("track") || line.startsWith("browser")) {
+        // discard header lines in case the caller hasn't called readHeader
+        if (isBEDHeaderLine(line)) {
             this.readHeaderLine(line);
             return null;
         }
@@ -86,11 +86,38 @@ public class BEDCodec extends AsciiFeatureCodec<BEDFeature> {
         return decode(tokens);
     }
 
+    /**
+     * The BED codec doesn't retain the actual header, but we need to parse through
+     * it and advance to the beginning of the first feature. This is especially true
+     * if we're indexing, since we want to underlying stream offset to be established
+     * correctly, but is also the case for when we're simply iterating to satisfy a
+     * feature query (otherwise the feature reader can terminate prematurely if the
+     * header is large).
+     * @param lineIterator
+     * @return Always null. The BEDCodec currently doesn't model or preserve the BED header.
+     */
     @Override
-    public Object readActualHeader(LineIterator reader) {
+    public Object readActualHeader(final LineIterator lineIterator) {
+        while (lineIterator.hasNext()) {
+            // Only peek, since we don't want to actually consume a line of input unless its a header line.
+            // This prevents us from advancing past the first feature.
+            final String nextLine = lineIterator.peek();
+            if (isBEDHeaderLine(nextLine)) {
+                // advance the iterator and consume the line (which is a no-op)
+                this.readHeaderLine(lineIterator.next());
+            } else {
+                return null; // break out when we've seen the end of the header
+            }
+        }
+
         return null;
     }
 
+    // Return true if the candidateLine looks like a BED header line.
+    private boolean isBEDHeaderLine(final String candidateLine) {
+        return candidateLine.startsWith("#") || candidateLine.startsWith("track") || candidateLine.startsWith("browser");
+    }
+
     public BEDFeature decode(String[] tokens) {
         int tokenCount = tokens.length;
 
diff --git a/src/main/java/htsjdk/tribble/bed/FullBEDFeature.java b/src/main/java/htsjdk/tribble/bed/FullBEDFeature.java
index eab5688..975777d 100644
--- a/src/main/java/htsjdk/tribble/bed/FullBEDFeature.java
+++ b/src/main/java/htsjdk/tribble/bed/FullBEDFeature.java
@@ -39,6 +39,7 @@ public class FullBEDFeature extends SimpleBEDFeature implements BEDFeature {
 
     }
 
+    @Override
     public java.util.List<Exon> getExons() {
         return exons;
     }
diff --git a/src/main/java/htsjdk/tribble/bed/SimpleBEDFeature.java b/src/main/java/htsjdk/tribble/bed/SimpleBEDFeature.java
index 77a030f..4a64168 100644
--- a/src/main/java/htsjdk/tribble/bed/SimpleBEDFeature.java
+++ b/src/main/java/htsjdk/tribble/bed/SimpleBEDFeature.java
@@ -56,14 +56,17 @@ public class SimpleBEDFeature implements BEDFeature {
         return chr;
     }
 
+    @Override
     public int getStart() {
         return start;
     }
 
+    @Override
     public int getEnd() {
         return end;
     }
 
+    @Override
     public Strand getStrand() {
         return strand;
     }
@@ -84,6 +87,7 @@ public class SimpleBEDFeature implements BEDFeature {
         this.end = end;
     }
 
+    @Override
     public String getType() {
         return type;
     }
@@ -92,6 +96,7 @@ public class SimpleBEDFeature implements BEDFeature {
         this.type = type;
     }
 
+    @Override
     public Color getColor() {
         return color;
     }
@@ -100,6 +105,7 @@ public class SimpleBEDFeature implements BEDFeature {
         this.color = color;
     }
 
+    @Override
     public String getDescription() {
         return description;
     }
@@ -108,6 +114,7 @@ public class SimpleBEDFeature implements BEDFeature {
         this.description = description;
     }
 
+    @Override
     public String getName() {
         return name;
     }
@@ -116,6 +123,7 @@ public class SimpleBEDFeature implements BEDFeature {
         this.name = name;
     }
 
+    @Override
     public float getScore() {
         return score;
     }
@@ -124,6 +132,7 @@ public class SimpleBEDFeature implements BEDFeature {
         this.score = score;
     }
 
+    @Override
     public String getLink() {
         return link;
     }
@@ -134,6 +143,7 @@ public class SimpleBEDFeature implements BEDFeature {
 
     final static List<FullBEDFeature.Exon> emptyExonList = new ArrayList();
 
+    @Override
     public java.util.List<FullBEDFeature.Exon> getExons() {
         return emptyExonList;
     }
diff --git a/src/main/java/htsjdk/tribble/example/CountRecords.java b/src/main/java/htsjdk/tribble/example/CountRecords.java
index 230c1bf..3bb8e41 100644
--- a/src/main/java/htsjdk/tribble/example/CountRecords.java
+++ b/src/main/java/htsjdk/tribble/example/CountRecords.java
@@ -29,7 +29,6 @@ import htsjdk.tribble.Feature;
 import htsjdk.tribble.FeatureCodec;
 import htsjdk.tribble.Tribble;
 import htsjdk.tribble.bed.BEDCodec;
-import htsjdk.tribble.gelitext.GeliTextCodec;
 import htsjdk.tribble.index.Index;
 import htsjdk.tribble.index.IndexFactory;
 import htsjdk.tribble.index.linear.LinearIndex;
@@ -193,8 +192,6 @@ public class CountRecords {
         //    return new VCFCodec();
         if (featureFile.getName().endsWith(".bed") || featureFile.getName().endsWith(".BED") )
             return new BEDCodec();
-        if (featureFile.getName().endsWith(".geli.calls") || featureFile.getName().endsWith(".geli") )
-            return new GeliTextCodec();
         throw new IllegalArgumentException("Unable to determine correct file type based on the file name, for file -> " + featureFile);
     }
 }
diff --git a/src/main/java/htsjdk/tribble/gelitext/DiploidGenotype.java b/src/main/java/htsjdk/tribble/gelitext/DiploidGenotype.java
deleted file mode 100644
index f533432..0000000
--- a/src/main/java/htsjdk/tribble/gelitext/DiploidGenotype.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.tribble.gelitext;
-
-
-/**
- * Class DiploidGenotype
- *
- * Enum describing all possible combinations of diploid genotype variations;
- * AA, AC, etc.
- *
- * @author aaron
- */
- at Deprecated
-public enum DiploidGenotype {
-    AA, AC, AG, AT, CC, CG, CT, GG, GT, TT;
-
-    public static DiploidGenotype toDiploidGenotype(String genotype) {
-        if (genotype.length() != 2)
-            throw new DiploidGenotypeException("Genotype string for conversion should be of length 2, we were passed = " + genotype);
-        genotype = genotype.toUpperCase();
-        for (DiploidGenotype g: DiploidGenotype.values())
-            if (g.toString().equals(genotype)) return g;
-        throw new DiploidGenotypeException("Unable to find genotype matching " + genotype);
-    }
-
-    public boolean isHet() {
-        return toString().toCharArray()[0] != toString().toCharArray()[1];
-    }
-
-    public boolean containsBase(char base) {
-        return (toString().charAt(0) == base || toString().charAt(1) == base);
-    }
-}
-
- at Deprecated
-class DiploidGenotypeException extends RuntimeException {
-    DiploidGenotypeException(String s) {
-        super(s);
-    }
-
-    DiploidGenotypeException(String s, Throwable throwable) {
-        super(s, throwable);
-    }
-}
\ No newline at end of file
diff --git a/src/main/java/htsjdk/tribble/gelitext/GeliTextCodec.java b/src/main/java/htsjdk/tribble/gelitext/GeliTextCodec.java
deleted file mode 100644
index 394b5dc..0000000
--- a/src/main/java/htsjdk/tribble/gelitext/GeliTextCodec.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.tribble.gelitext;
-
-import htsjdk.samtools.util.CollectionUtil;
-import htsjdk.tribble.AsciiFeatureCodec;
-import htsjdk.tribble.Feature;
-import htsjdk.tribble.exception.CodecLineParsingException;
-import htsjdk.tribble.readers.LineIterator;
-
-import java.util.Arrays;
-
-
-/**
- * <p/>
- * A codec for parsing geli text files, which is the text version of the geli binary format.
- * <p/>
- * <p/>
- * GELI text has the following tab-seperated fields:
- * contig             the contig (string)
- * position           the position on the contig (long)
- * refBase            the reference base (char)
- * depthOfCoverage    the depth of coverage at this position (int)
- * maximumMappingQual the maximum mapping quality of a read at this position (int)
- * genotype           the called genotype (string)
- * LODBestToReference the LOD score of the best to the reference (double)
- * LODBestToNext      the LOD score of the best to the next best genotype (double)
- * likelihoods        the array of all genotype likelihoods, in ordinal ordering (array of 10 doubles, in ordinal order)
- *
- * @author aaron
- * @deprecated This is deprecated and unsupported.
- */
- at Deprecated
-public class GeliTextCodec extends AsciiFeatureCodec<GeliTextFeature> {
-    public GeliTextCodec() {
-        super(GeliTextFeature.class);
-    }
-
-    public Feature decodeLoc(final String line) {
-        return decode(line);
-    }
-
-    @Override
-    public GeliTextFeature decode(final String line) {
-        // clean out header lines and comments
-        if (line.startsWith("#") || line.startsWith("@"))
-            return null;
-
-        // parse into tokens
-        final String[] parts = line.trim().split("\\s+");
-        return decode(parts);
-    }
-
-    @Override
-    public boolean canDecode(String path){
-	return path.toLowerCase().endsWith(".geli.calls") || path.toLowerCase().endsWith(".geli");
-    }
-
-    @Override
-    public Object readActualHeader(LineIterator reader) {
-        return null;
-    }
-
-    public GeliTextFeature decode(final String[] tokens) {
-        try {
-            // check that we got the correct number of tokens in the split
-            if (tokens.length != 18)
-                throw new CodecLineParsingException("Invalid GeliTextFeature row found -- incorrect element count.  Expected 18, got " + tokens.length + " line = " + CollectionUtil.join(Arrays.asList(tokens), " "));
-
-            // UPPER case and sort
-            final char[] x = tokens[5].toUpperCase().toCharArray();
-            Arrays.sort(x);
-            final String bestGenotype = new String(x);
-
-            final double[] genotypeLikelihoods = new double[10];
-            for (int pieceIndex = 8, offset = 0; pieceIndex < 18; pieceIndex++, offset++) {
-                genotypeLikelihoods[offset] = Double.valueOf(tokens[pieceIndex]);
-            }
-            return new GeliTextFeature(tokens[0],
-                    Long.valueOf(tokens[1]),
-                    Character.toUpperCase(tokens[2].charAt(0)),
-                    Integer.valueOf(tokens[3]),
-                    Integer.valueOf(tokens[4]),
-                    DiploidGenotype.toDiploidGenotype(bestGenotype),
-                    Double.valueOf(tokens[6]),
-                    Double.valueOf(tokens[7]),
-                    genotypeLikelihoods);
-        } catch (CodecLineParsingException e) {
-            e.printStackTrace();
-            throw new RuntimeException("Unable to parse line " + CollectionUtil.join(Arrays.asList(tokens), " "), e);
-        } catch (NumberFormatException e) {
-            e.printStackTrace();
-            throw new RuntimeException("Unable to parse line " + CollectionUtil.join(Arrays.asList(tokens), " "), e);
-        }
-    }
-}
diff --git a/src/main/java/htsjdk/tribble/gelitext/GeliTextFeature.java b/src/main/java/htsjdk/tribble/gelitext/GeliTextFeature.java
deleted file mode 100644
index baad1ca..0000000
--- a/src/main/java/htsjdk/tribble/gelitext/GeliTextFeature.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.tribble.gelitext;
-
-import htsjdk.tribble.Feature;
-
-import java.util.Arrays;
-
-
-/**
- *         <p/>
- *         Class GeliTextFeature
- *         <p/>
- *         This is a feature for the Geli text object, which is the text version of the Geli binary genotyping format.
- *
- * @author aaron
- * @deprecated this is deprecated and no longer supported
- */
- at Deprecated
-public class GeliTextFeature implements Feature {
-
-    private final String contig;                // the contig name
-    private final long position;                // the position on the contig
-    private final char refBase;                 // the reference base
-    private final int depthOfCoverage;          // the depth of coverage at this position
-    private final int maximumMappingQual;       // the maximum mapping quality of a read at this position
-    private final DiploidGenotype genotype;     // the called genotype
-    private final double LODBestToReference;    // the LOD score of the best to the reference
-    private final double LODBestToNext;         // the LOD score of the best to the next best genotype
-    private final double likelihoods[];         // the array of all genotype likelihoods, in ordinal order
-
-    /**
-     * Create a geli text feature, given:
-     *
-     * @param contig             the contig
-     * @param position           the position on the contig
-     * @param refBase            the reference base
-     * @param depthOfCoverage    the depth of coverage at this position
-     * @param maximumMappingQual the maximum mapping quality of a read at this position
-     * @param genotype           the called genotype
-     * @param LODBestToReference the LOD score of the best to the reference
-     * @param LODBestToNext      the LOD score of the best to the next best genotype
-     * @param likelihoods        the array of all genotype likelihoods, in ordinal ordering
-     */
-    public GeliTextFeature(String contig,
-                           long position,
-                           char refBase,
-                           int depthOfCoverage,
-                           int maximumMappingQual,
-                           DiploidGenotype genotype,
-                           double LODBestToReference,
-                           double LODBestToNext,
-                           double[] likelihoods) {
-        this.contig = contig;
-        this.position = position;
-        this.refBase = refBase;
-        this.depthOfCoverage = depthOfCoverage;
-        this.maximumMappingQual = maximumMappingQual;
-        this.genotype = genotype;
-        this.LODBestToReference = LODBestToReference;
-        this.LODBestToNext = LODBestToNext;
-        this.likelihoods = likelihoods;
-    }
-
-    @Override
-    public String getContig() {
-        return this.contig;
-    }
-
-    /** Return the start position in 1-based coordinates (first base is 1) */
-    public int getStart() {
-        return (int) this.position;
-    }
-
-    /**
-     * Return the end position following 1-based fully closed conventions.  The length of a feature is
-     * end - start + 1;
-     */
-    public int getEnd() {
-        return (int) this.position;
-    }
-
-    public char getRefBase() {
-        return refBase;
-    }
-
-    public int getDepthOfCoverage() {
-        return depthOfCoverage;
-    }
-
-    public int getMaximumMappingQual() {
-        return maximumMappingQual;
-    }
-
-    public DiploidGenotype getGenotype() {
-        return genotype;
-    }
-
-    public double getLODBestToNext() {
-        return LODBestToNext;
-    }
-
-    public double getLODBestToReference() {
-        return LODBestToReference;
-    }
-
-    public double[] getLikelihoods() {
-        return likelihoods;
-    }
-
-    private static double Epsilon = 0.0001;
-    public boolean equals(Object o) {
-        if (!(o instanceof GeliTextFeature)) return false;
-        GeliTextFeature other = (GeliTextFeature)o;
-        if (!Arrays.equals(likelihoods,other.likelihoods)) return false;
-        if (!contig.equals(other.contig)) return false;
-        if (!(position == other.position)) return false;
-        if (!(refBase == other.refBase)) return false;
-        if (!(depthOfCoverage == other.depthOfCoverage)) return false;
-        if (!(maximumMappingQual == other.maximumMappingQual)) return false;
-        if (!(genotype == other.genotype)) return false;
-        if (!(Math.abs(LODBestToReference - other.LODBestToReference) < Epsilon)) return false;
-        if (!(Math.abs(LODBestToNext - other.LODBestToNext) < Epsilon)) return false;
-        return true;
-    }
-
-}
diff --git a/src/main/java/htsjdk/tribble/index/AbstractIndex.java b/src/main/java/htsjdk/tribble/index/AbstractIndex.java
index 47e31cc..ac90e5d 100644
--- a/src/main/java/htsjdk/tribble/index/AbstractIndex.java
+++ b/src/main/java/htsjdk/tribble/index/AbstractIndex.java
@@ -18,6 +18,9 @@
 
 package htsjdk.tribble.index;
 
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Log;
+import htsjdk.samtools.util.RuntimeIOException;
 import htsjdk.tribble.Tribble;
 import htsjdk.tribble.TribbleException;
 import htsjdk.tribble.util.LittleEndianInputStream;
@@ -25,8 +28,9 @@ import htsjdk.tribble.util.LittleEndianOutputStream;
 
 import java.io.BufferedOutputStream;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.LinkedHashMap;
@@ -67,11 +71,12 @@ public abstract class AbstractIndex implements MutableIndex {
     private final static long NO_TS = -1L;
 
     protected int version;                    // Our version value
-    protected File indexedFile = null;         // The file we've created this index for
+    protected Path indexedPath = null;         // The file we've created this index for
     protected long indexedFileSize = NO_FILE_SIZE; // The size of the indexed file
     protected long indexedFileTS = NO_TS;      // The timestamp
     protected String indexedFileMD5 = NO_MD5;        // The MD5 value, generally not filled in (expensive to calc)
     protected int flags;
+    protected final Log logger = Log.getInstance(this.getClass());
 
     public boolean hasFileSize() {
         return indexedFileSize != NO_FILE_SIZE;
@@ -101,6 +106,7 @@ public abstract class AbstractIndex implements MutableIndex {
      * @param obj
      * @return true if this and obj are 'effectively' equivalent data structures.
      */
+    @Override
     public boolean equalsIgnoreProperties(final Object obj) {
         if (this == obj) return true;
         if (!(obj instanceof AbstractIndex)) {
@@ -115,8 +121,8 @@ public abstract class AbstractIndex implements MutableIndex {
             return false;
         }
 
-        if (indexedFile != other.indexedFile && (indexedFile == null || !indexedFile.equals(other.indexedFile))) {
-            System.err.printf("equals indexedFile: this %s != other %s%n", indexedFile, other.indexedFile);
+        if (indexedPath != other.indexedPath && (indexedPath == null || !indexedPath.equals(other.indexedPath))) {
+            System.err.printf("equals indexedPath: this %s != other %s%n", indexedPath, other.indexedPath);
             return false;
         }
 
@@ -158,18 +164,27 @@ public abstract class AbstractIndex implements MutableIndex {
      * @param featureFile the feature file to create an index from
      */
     public AbstractIndex(final String featureFile) {
-        this(new File(featureFile));
+        this();
+        try {
+            this.indexedPath = IOUtil.getPath(featureFile).toAbsolutePath();
+        } catch (IOException e) {
+            throw new IllegalArgumentException("IO error: " + e.getMessage(), e);
+        }
     }
 
     public AbstractIndex(final File featureFile) {
+        this(featureFile.toPath());
+    }
+
+    public AbstractIndex(final Path featurePath) {
         this();
-        this.indexedFile = featureFile;
+        this.indexedPath = featurePath.toAbsolutePath();
     }
 
     public AbstractIndex(final AbstractIndex parent) {
         this();
         this.version = parent.version;
-        this.indexedFile = parent.indexedFile;
+        this.indexedPath = parent.indexedPath;
         this.indexedFileSize = parent.indexedFileSize;
         this.indexedFileTS = parent.indexedFileTS;
         this.indexedFileMD5 = parent.indexedFileMD5;
@@ -194,12 +209,23 @@ public abstract class AbstractIndex implements MutableIndex {
      *
      * @return true if we're up to date, false otherwise
      */
+    @Override
     public boolean isCurrentVersion() {
         return version == VERSION;
     }
 
+    /**
+     * Gets the indexed file.
+     * @throws UnsupportedOperationException if the path cannot be represented as a file.
+     * @deprecated on 03/2017. Use {@link #getIndexedPath()} instead.
+     */
+    @Deprecated
     public File getIndexedFile() {
-        return indexedFile;
+        return getIndexedPath().toFile();
+    }
+
+    public Path getIndexedPath() {
+        return indexedPath;
     }
 
     public long getIndexedFileSize() {
@@ -226,15 +252,20 @@ public abstract class AbstractIndex implements MutableIndex {
         this.indexedFileMD5 = md5;
     }
 
+    @Override
     public boolean containsChromosome(final String chr) {
         return chrIndices.containsKey(chr);
     }
 
     public void finalizeIndex() {
-        // these two functions must be called now because the file may be being written during on the fly indexing
-        if (indexedFile != null) {
-            this.indexedFileSize = indexedFile.length();
-            this.indexedFileTS = indexedFile.lastModified();
+        try {
+            // these two functions must be called now because the file may be being written during on the fly indexing
+            if (indexedPath != null) {
+                this.indexedFileSize = Files.size(indexedPath);
+                this.indexedFileTS = Files.getLastModifiedTime(indexedPath).toMillis();
+            }
+        } catch (IOException e) {
+            throw new RuntimeIOException(e);
         }
     }
 
@@ -248,7 +279,7 @@ public abstract class AbstractIndex implements MutableIndex {
         dos.writeInt(MAGIC_NUMBER);
         dos.writeInt(getType());
         dos.writeInt(version);
-        dos.writeString(indexedFile.getAbsolutePath());
+        dos.writeString(indexedPath.toUri().toString());
         dos.writeLong(indexedFileSize);
         dos.writeLong(indexedFileTS);
         dos.writeString(indexedFileMD5);
@@ -271,7 +302,7 @@ public abstract class AbstractIndex implements MutableIndex {
     private void readHeader(final LittleEndianInputStream dis) throws IOException {
 
         version = dis.readInt();
-        indexedFile = new File(dis.readString());
+        indexedPath = IOUtil.getPath(dis.readString());
         indexedFileSize = dis.readLong();
         indexedFileTS = dis.readLong();
         indexedFileMD5 = dis.readString();
@@ -306,10 +337,12 @@ public abstract class AbstractIndex implements MutableIndex {
         }
     }
 
+    @Override
     public List<String> getSequenceNames() {
         return new ArrayList<String>(chrIndices.keySet());
     }
 
+    @Override
     public List<Block> getBlocks(final String chr, final int start, final int end) {
         return getChrIndex(chr).getBlocks(start, end);
     }
@@ -332,6 +365,7 @@ public abstract class AbstractIndex implements MutableIndex {
         }
     }
 
+    @Override
     public void write(final LittleEndianOutputStream stream) throws IOException {
         writeHeader(stream);
 
@@ -343,18 +377,21 @@ public abstract class AbstractIndex implements MutableIndex {
     }
 
     @Override
-    public void write(final File idxFile) throws IOException {
-        try(final LittleEndianOutputStream idxStream = new LittleEndianOutputStream(new BufferedOutputStream(new FileOutputStream(idxFile)))) {
+    public void write(final Path idxPath) throws IOException {
+        try(final LittleEndianOutputStream idxStream = new LittleEndianOutputStream(new BufferedOutputStream(Files.newOutputStream(idxPath)))) {
             write(idxStream);
         }
     }
 
     @Override
-    public void writeBasedOnFeatureFile(final File featureFile) throws IOException {
-        if (!featureFile.isFile()) return;
-        write(Tribble.indexFile(featureFile));
+    public void writeBasedOnFeaturePath(final Path featurePath) throws IOException {
+        if (!Files.isRegularFile(featurePath)) {
+            throw new IOException("Cannot write based on a non-regular file: " + featurePath.toUri());
+        }
+        write(Tribble.indexPath(featurePath));
     }
 
+
     public void read(final LittleEndianInputStream dis) throws IOException {
         try {
             readHeader(dis);
@@ -380,7 +417,7 @@ public abstract class AbstractIndex implements MutableIndex {
     }
 
     protected void printIndexInfo() {
-        System.out.println(String.format("Index for %s with %d indices", indexedFile, chrIndices.size()));
+        System.out.println(String.format("Index for %s with %d indices", indexedPath, chrIndices.size()));
         final BlockStats stats = getBlockStats(true);
         System.out.println(String.format("  total blocks %d", stats.total));
         System.out.println(String.format("  total empty blocks %d", stats.empty));
@@ -418,10 +455,12 @@ public abstract class AbstractIndex implements MutableIndex {
         return String.format("%12d blocks (%12d empty (%.2f%%))", stats.total, stats.empty, (100.0 * stats.empty) / stats.total);
     }
 
+    @Override
     public void addProperty(final String key, final String value) {
         properties.put(key, value);
     }
 
+    @Override
     public void addProperties(final Map<String, String> properties) {
         this.properties.putAll(properties);
     }
@@ -431,6 +470,7 @@ public abstract class AbstractIndex implements MutableIndex {
      *
      * @return the mapping of values as an unmodifiable map
      */
+    @Override
     public Map<String, String> getProperties() {
         return Collections.unmodifiableMap(properties);
     }
diff --git a/src/main/java/htsjdk/tribble/index/DynamicIndexCreator.java b/src/main/java/htsjdk/tribble/index/DynamicIndexCreator.java
index 52153a5..17274ac 100644
--- a/src/main/java/htsjdk/tribble/index/DynamicIndexCreator.java
+++ b/src/main/java/htsjdk/tribble/index/DynamicIndexCreator.java
@@ -31,6 +31,7 @@ import htsjdk.tribble.index.linear.LinearIndexCreator;
 import htsjdk.tribble.util.MathUtils;
 
 import java.io.File;
+import java.nio.file.Path;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
@@ -56,15 +57,18 @@ public class DynamicIndexCreator extends TribbleIndexCreator {
     MathUtils.RunningStat stats = new MathUtils.RunningStat();
     long basesSeen = 0;
     Feature lastFeature = null;
-    File inputFile;
 
-    public DynamicIndexCreator(final File inputFile, final IndexFactory.IndexBalanceApproach iba) {
+    public DynamicIndexCreator(final Path inputPath, final IndexFactory.IndexBalanceApproach iba) {
         this.iba = iba;
         // get a list of index creators
-        this.inputFile = inputFile;
-        creators = getIndexCreators(inputFile,iba);
+        creators = getIndexCreators(inputPath, iba);
+    }
+
+    public DynamicIndexCreator(final File inputFile, final IndexFactory.IndexBalanceApproach iba) {
+        this(inputFile.toPath(), iba);
     }
 
+    @Override
     public Index finalizeIndex(final long finalFilePosition) {
         // finalize all of the indexes
         // return the score of the indexes we've generated
@@ -89,19 +93,19 @@ public class DynamicIndexCreator extends TribbleIndexCreator {
 
     /**
      * create a list of index creators (initialized) representing the common index types we'd suspect they'd like to use
-     * @param inputFile the input file to use to create the indexes
+     * @param inputPath the input path to use to create the indexes
      * @return a map of index type to the best index for that balancing approach
      */
-    private Map<IndexFactory.IndexType,TribbleIndexCreator> getIndexCreators(final File inputFile, final IndexFactory.IndexBalanceApproach iba) {
+    private Map<IndexFactory.IndexType,TribbleIndexCreator> getIndexCreators(final Path inputPath, final IndexFactory.IndexBalanceApproach iba) {
         final Map<IndexFactory.IndexType,TribbleIndexCreator> creators = new HashMap<IndexFactory.IndexType,TribbleIndexCreator>();
 
         if (iba == IndexFactory.IndexBalanceApproach.FOR_SIZE) {
             // add a linear index with the default bin size
-            final LinearIndexCreator linearNormal = new LinearIndexCreator(inputFile, LinearIndexCreator.DEFAULT_BIN_WIDTH);
+            final LinearIndexCreator linearNormal = new LinearIndexCreator(inputPath, LinearIndexCreator.DEFAULT_BIN_WIDTH);
             creators.put(IndexFactory.IndexType.LINEAR,linearNormal);
 
             // create a tree index with the default size
-            final IntervalIndexCreator treeNormal = new IntervalIndexCreator(inputFile, IntervalIndexCreator.DEFAULT_FEATURE_COUNT);
+            final IntervalIndexCreator treeNormal = new IntervalIndexCreator(inputPath, IntervalIndexCreator.DEFAULT_FEATURE_COUNT);
             creators.put(IndexFactory.IndexType.INTERVAL_TREE,treeNormal);
         }
 
@@ -110,12 +114,12 @@ public class DynamicIndexCreator extends TribbleIndexCreator {
         if (iba == IndexFactory.IndexBalanceApproach.FOR_SEEK_TIME) {
             // create a linear index with a small bin size
             final LinearIndexCreator linearSmallBin =
-                    new LinearIndexCreator(inputFile, Math.max(200, LinearIndexCreator.DEFAULT_BIN_WIDTH / 4));
+                    new LinearIndexCreator(inputPath, Math.max(200, LinearIndexCreator.DEFAULT_BIN_WIDTH / 4));
             creators.put(IndexFactory.IndexType.LINEAR,linearSmallBin);
 
             // create a tree index with a small index size
             final IntervalIndexCreator treeSmallBin =
-                    new IntervalIndexCreator(inputFile, Math.max(20, IntervalIndexCreator.DEFAULT_FEATURE_COUNT / 8));
+                    new IntervalIndexCreator(inputPath, Math.max(20, IntervalIndexCreator.DEFAULT_FEATURE_COUNT / 8));
             creators.put(IndexFactory.IndexType.INTERVAL_TREE,treeSmallBin);
         }
 
@@ -123,6 +127,7 @@ public class DynamicIndexCreator extends TribbleIndexCreator {
     }
 
 
+    @Override
     public void addFeature(final Feature f, final long filePosition) {
         // protected static Map<Double,Index> createIndex(FileBasedFeatureIterator<Feature> iterator, Map<IndexType,IndexCreator> creators, IndexBalanceApproach iba) {
         // feed each feature to the indexes we've created
diff --git a/src/main/java/htsjdk/tribble/index/Index.java b/src/main/java/htsjdk/tribble/index/Index.java
index ca6cc60..51982c6 100644
--- a/src/main/java/htsjdk/tribble/index/Index.java
+++ b/src/main/java/htsjdk/tribble/index/Index.java
@@ -27,6 +27,7 @@ import htsjdk.tribble.util.LittleEndianOutputStream;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.file.Path;
 import java.util.List;
 import java.util.Map;
 
@@ -72,17 +73,43 @@ public interface Index {
     /**
      * Writes the index into a file.
      *
+     * Default implementation delegates to {@link #write(Path)}
+     *
      * @param idxFile Where to write the index.
      * @throws IOException if the index is unable to write to the specified file
      */
-    public void write(final File idxFile) throws IOException;
+    public default void write(final File idxFile) throws IOException {
+        write(idxFile.toPath());
+    }
+
+    /**
+     * Writes the index into a path.
+     *
+     * @param indexPath Where to write the index.
+     * @throws IOException if the index is unable to write to the specified path.
+     */
+    public void write(final Path indexPath) throws IOException;
 
     /**
      * Write an appropriately named and located Index file based on the name and location of the featureFile.
-     * If featureFile is not a normal file, the index will silently not be written.
+     *
+     * Default implementation delegates to {@link #writeBasedOnFeaturePath(Path)}
+     *
      * @param featureFile
+     * @throws IOException if featureFile is not a normal file.
+     */
+    public default void writeBasedOnFeatureFile(File featureFile) throws IOException {
+        writeBasedOnFeaturePath(featureFile.toPath());
+    }
+
+    /**
+     * Write an appropriately named and located Index file based on the name and location of the featureFile.
+     * If featureFile is not a normal file, the index will silently not be written.
+     *
+     * @param featurePath
+     * @throws IOException if featureFile is not a normal file.
      */
-    public void writeBasedOnFeatureFile(File featureFile) throws IOException;
+    public void writeBasedOnFeaturePath(Path featurePath) throws IOException;
 
     /**
      * @return get the list of properties for this index.  Returns null if no properties.
diff --git a/src/main/java/htsjdk/tribble/index/IndexFactory.java b/src/main/java/htsjdk/tribble/index/IndexFactory.java
index 4e23e93..6007cf1 100644
--- a/src/main/java/htsjdk/tribble/index/IndexFactory.java
+++ b/src/main/java/htsjdk/tribble/index/IndexFactory.java
@@ -28,9 +28,7 @@ import htsjdk.samtools.SAMSequenceDictionary;
 import htsjdk.samtools.seekablestream.ISeekableStreamFactory;
 import htsjdk.samtools.seekablestream.SeekableStream;
 import htsjdk.samtools.seekablestream.SeekableStreamFactory;
-import htsjdk.samtools.util.BlockCompressedInputStream;
-import htsjdk.samtools.util.BlockCompressedStreamConstants;
-import htsjdk.samtools.util.LocationAware;
+import htsjdk.samtools.util.*;
 import htsjdk.tribble.*;
 import htsjdk.tribble.index.interval.IntervalIndexCreator;
 import htsjdk.tribble.index.interval.IntervalTreeIndex;
@@ -39,7 +37,7 @@ import htsjdk.tribble.index.linear.LinearIndexCreator;
 import htsjdk.tribble.index.tabix.TabixFormat;
 import htsjdk.tribble.index.tabix.TabixIndex;
 import htsjdk.tribble.index.tabix.TabixIndexCreator;
-import htsjdk.tribble.readers.PositionalBufferedStream;
+import htsjdk.tribble.readers.*;
 import htsjdk.tribble.util.LittleEndianInputStream;
 import htsjdk.tribble.util.ParsingUtils;
 import htsjdk.tribble.util.TabixUtils;
@@ -49,16 +47,20 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.io.EOFException;
 import java.io.InputStream;
 import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.nio.channels.SeekableByteChannel;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
+import java.util.function.Function;
 import java.util.zip.GZIPInputStream;
 
 /**
  * Factory class for creating indexes.  It is the responsibility of this class to determine and create the
- * correct index type from the input file or stream.  Only LinearIndex and IntervalTreeIndex are supported
+ * correct index type from the input file or stream.  LinearIndex, IntervalTreeIndex, and TabixIndex are supported
  * by this factory.
  */
 public class IndexFactory {
@@ -90,9 +92,7 @@ public class IndexFactory {
         public IndexCreator getIndexCreator() {
             try {
                 return indexCreatorClass.newInstance();
-            } catch ( final InstantiationException e ) {
-                throw new TribbleException("Couldn't make index creator in " + this, e);
-            } catch ( final IllegalAccessException e ) {
+            } catch ( final InstantiationException | IllegalAccessException e ) {
                 throw new TribbleException("Couldn't make index creator in " + this, e);
             }
         }
@@ -162,20 +162,39 @@ public class IndexFactory {
      * @param indexFile from which to load the index
      */
     public static Index loadIndex(final String indexFile) {
+        return loadIndex(indexFile, null);
+    }
+
+    /**
+     * Load in index from the specified file.   The type of index (LinearIndex or IntervalTreeIndex) is determined
+     * at run time by reading the type flag in the file.
+     *
+     * @param indexFile from which to load the index
+     * @param indexWrapper a wrapper to apply to the raw byte stream of the index file, only applied to uri's loaded as
+     *                     {@link java.nio.file.Path}
+     */
+    public static Index loadIndex(final String indexFile, Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) {
         // Must be buffered, because getIndexType uses mark and reset
-        try (BufferedInputStream bufferedInputStream = new BufferedInputStream(indexFileInputStream(indexFile), Defaults.NON_ZERO_BUFFER_SIZE)){
+        try (BufferedInputStream bufferedInputStream = new BufferedInputStream(indexFileInputStream(indexFile, indexWrapper), Defaults.NON_ZERO_BUFFER_SIZE)) {
             final Class<Index> indexClass = IndexType.getIndexType(bufferedInputStream).getIndexType();
             final Constructor<Index> ctor = indexClass.getConstructor(InputStream.class);
             return ctor.newInstance(bufferedInputStream);
+        } catch (final TribbleException ex) {
+            throw ex;
         } catch (final IOException ex) {
             throw new TribbleException.UnableToReadIndexFile("Unable to read index file", indexFile, ex);
+        } catch (final InvocationTargetException ex) {
+            if (ex.getCause() instanceof EOFException) {
+                throw new TribbleException.CorruptedIndexFile("Index file is corrupted", indexFile, ex);
+            }
+            throw new RuntimeException(ex);
         } catch (final Exception ex) {
             throw new RuntimeException(ex);
         }
     }
 
-    private static InputStream indexFileInputStream(final String indexFile) throws IOException {
-        final InputStream inputStreamInitial = ParsingUtils.openInputStream(indexFile);
+    private static InputStream indexFileInputStream(final String indexFile, Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) throws IOException {
+        final InputStream inputStreamInitial = ParsingUtils.openInputStream(indexFile, indexWrapper);
         if (indexFile.endsWith(".gz")) {
             return new GZIPInputStream(inputStreamInitial);
         }
@@ -364,8 +383,10 @@ public class IndexFactory {
             lastFeature = currentFeature;
         }
 
+        // Get the end position of the last feature before closing the iterator
+        long finalPosition = iterator.getPosition();
         iterator.close();
-        return creator.finalizeIndex(iterator.getPosition());
+        return creator.finalizeIndex(finalPosition);
     }
 
     private static String featToString(final Feature feature){
@@ -398,62 +419,53 @@ public class IndexFactory {
 
         /**
          *
-         * @param inputFile The file from which to read. Stream for reading is opened on construction.
+         * @param inputFile The file from which to read. Stream for reading is opened on construction. May not be null.
          * @param codec
          */
         public FeatureIterator(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE> codec) {
+            if (inputFile == null) {
+                throw new IllegalArgumentException("FeatureIterator input file cannot be null");
+            }
             this.codec = codec;
             this.inputFile = inputFile;
-            final FeatureCodecHeader header = readHeader();
-            source = (SOURCE) codec.makeIndexableSourceFromStream(initStream(inputFile, header.getHeaderEnd()));
-            readNextFeature();
-        }
-
-        /**
-         * Some codecs,  e.g. VCF files,  need the header to decode features.  This is a rather poor design,
-         * the internal header is set as a side-affect of reading it, but we have to live with it for now.
-         */
-        private FeatureCodecHeader readHeader() {
             try {
-                final SOURCE source = this.codec.makeSourceFromStream(initStream(inputFile, 0));
-                final FeatureCodecHeader header = this.codec.readHeader(source);
-                codec.close(source);
-                return header;
+                if (AbstractFeatureReader.hasBlockCompressedExtension(inputFile)) {
+                    final BlockCompressedInputStream bcs = initIndexableBlockCompressedStream(inputFile);
+                    source = (SOURCE) codec.makeIndexableSourceFromStream(bcs);
+                } else {
+                    final PositionalBufferedStream ps = initIndexablePositionalStream(inputFile);
+                    source = (SOURCE) codec.makeIndexableSourceFromStream(ps);
+                }
+                this.codec.readHeader(source);
+                readNextFeature();
             } catch (final IOException e) {
                 throw new TribbleException.InvalidHeader("Error reading header " + e.getMessage());
             }
         }
 
-        private PositionalBufferedStream initStream(final File inputFile, final long skip) {
+        private static PositionalBufferedStream initIndexablePositionalStream(final File inputFile) {
             try {
                 final FileInputStream fileStream = new FileInputStream(inputFile);
-                final InputStream is;
+                return new PositionalBufferedStream(fileStream);
+            } catch (final FileNotFoundException e) {
+                throw new TribbleException.FeatureFileDoesntExist("Unable to open the input file, most likely the file doesn't exist.", inputFile.getAbsolutePath());
+            }
+        }
 
-                // if this looks like a block compressed file and it in fact is, we will use it
-                // otherwise we will use the file as is
-                if (AbstractFeatureReader.hasBlockCompressedExtension(inputFile)) {
-                    // make a buffered stream to test that this is in fact a valid block compressed file
-                    final int bufferSize = Math.max(Defaults.BUFFER_SIZE, BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE);
-                    final BufferedInputStream bufferedStream = new BufferedInputStream(fileStream, bufferSize);
+        private static BlockCompressedInputStream initIndexableBlockCompressedStream(final File inputFile) {
+            final int bufferSize = Math.max(Defaults.BUFFER_SIZE, BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE);
 
-                    if (!BlockCompressedInputStream.isValidFile(bufferedStream)) {
-                        throw new TribbleException.MalformedFeatureFile("Input file is not in valid block compressed format.", inputFile.getAbsolutePath());
-                    }
+            // make a buffered stream to test that this is in fact a valid block compressed file
+            try (final BufferedInputStream bufferedStream = new BufferedInputStream(new FileInputStream(inputFile), bufferSize)){
 
-                    final ISeekableStreamFactory ssf = SeekableStreamFactory.getInstance();
-                    // if we got here, the file is valid, make a SeekableStream for the BlockCompressedInputStream to read from
-                    final SeekableStream seekableStream =
-                            ssf.getBufferedStream(ssf.getStreamFor(inputFile.getAbsolutePath()));
-                    is = new BlockCompressedInputStream(seekableStream);
+                if (!BlockCompressedInputStream.isValidFile(bufferedStream)) {
+                    throw new TribbleException.MalformedFeatureFile("Input file is not in valid block compressed format.",
+                            inputFile.getAbsolutePath());
                 }
-                else {
-                    is = fileStream;
-                }
-
-                final PositionalBufferedStream pbs = new PositionalBufferedStream(is);
 
-                if ( skip > 0 ) pbs.skip(skip);
-                return pbs;
+                final ISeekableStreamFactory ssf = SeekableStreamFactory.getInstance();
+                final SeekableStream seekableStream = ssf.getStreamFor(inputFile.getAbsolutePath());
+                return new BlockCompressedInputStream(seekableStream);
             } catch (final FileNotFoundException e) {
                 throw new TribbleException.FeatureFileDoesntExist("Unable to open the input file, most likely the file doesn't exist.", inputFile.getAbsolutePath());
             } catch (final IOException e) {
@@ -461,10 +473,12 @@ public class IndexFactory {
             }
         }
 
+        @Override
         public boolean hasNext() {
             return nextFeature != null;
         }
 
+        @Override
         public Feature next() {
             final Feature ret = nextFeature;
             readNextFeature();
@@ -474,6 +488,7 @@ public class IndexFactory {
         /**
          * @throws UnsupportedOperationException
          */
+        @Override
         public void remove() {
             throw new UnsupportedOperationException("We cannot remove");
         }
diff --git a/src/main/java/htsjdk/tribble/index/interval/Interval.java b/src/main/java/htsjdk/tribble/index/interval/Interval.java
index 9d47877..6c0e648 100644
--- a/src/main/java/htsjdk/tribble/index/interval/Interval.java
+++ b/src/main/java/htsjdk/tribble/index/interval/Interval.java
@@ -76,6 +76,7 @@ public class Interval implements Comparable {
     }
 
 
+    @Override
     public int compareTo(Object o) {
         Interval other = (Interval) o;
         if (this.start < other.start)
diff --git a/src/main/java/htsjdk/tribble/index/interval/IntervalIndexCreator.java b/src/main/java/htsjdk/tribble/index/interval/IntervalIndexCreator.java
index e826eda..58e2f87 100644
--- a/src/main/java/htsjdk/tribble/index/interval/IntervalIndexCreator.java
+++ b/src/main/java/htsjdk/tribble/index/interval/IntervalIndexCreator.java
@@ -25,6 +25,7 @@ import htsjdk.tribble.index.TribbleIndexCreator;
 import htsjdk.tribble.index.interval.IntervalTreeIndex.ChrIndex;
 
 import java.io.File;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.LinkedList;
 
@@ -51,17 +52,26 @@ public class IntervalIndexCreator extends TribbleIndexCreator {
 
     private final ArrayList<MutableInterval> intervals = new ArrayList<MutableInterval>();
 
-    File inputFile;
+    Path inputPath;
 
-    public IntervalIndexCreator(final File inputFile, final int featuresPerInterval) {
-        this.inputFile = inputFile;
+    public IntervalIndexCreator(final Path inputPath, final int featuresPerInterval) {
+        this.inputPath = inputPath;
         this.featuresPerInterval = featuresPerInterval;
     }
 
+    public IntervalIndexCreator(final File inputFile, final int featuresPerInterval) {
+        this(inputFile.toPath(), featuresPerInterval);
+    }
+
     public IntervalIndexCreator(final File inputFile) {
-        this(inputFile, DEFAULT_FEATURE_COUNT);
+        this(inputFile.toPath());
+    }
+
+    public IntervalIndexCreator(final Path inputPath) {
+        this(inputPath, DEFAULT_FEATURE_COUNT);
     }
 
+    @Override
     public void addFeature(final Feature feature, final long filePosition) {
         // if we don't have a chrIndex yet, or if the last one was for the previous contig, create a new one
         if (chrList.isEmpty() || !chrList.getLast().getName().equals(feature.getContig())) {
@@ -105,8 +115,9 @@ public class IntervalIndexCreator extends TribbleIndexCreator {
      * @param finalFilePosition the final file position, for indexes that have to close out with the final position
      * @return a Tree Index
      */
+    @Override
     public Index finalizeIndex(final long finalFilePosition) {
-        final IntervalTreeIndex featureIndex = new IntervalTreeIndex(inputFile.getAbsolutePath());
+        final IntervalTreeIndex featureIndex = new IntervalTreeIndex(inputPath);
         // dump the remaining bins to the index
         addIntervalsToLastChr(finalFilePosition);
         featureIndex.setChrIndex(chrList);
diff --git a/src/main/java/htsjdk/tribble/index/interval/IntervalTreeIndex.java b/src/main/java/htsjdk/tribble/index/interval/IntervalTreeIndex.java
index 055888e..c4b2865 100644
--- a/src/main/java/htsjdk/tribble/index/interval/IntervalTreeIndex.java
+++ b/src/main/java/htsjdk/tribble/index/interval/IntervalTreeIndex.java
@@ -25,6 +25,7 @@ import htsjdk.tribble.util.LittleEndianOutputStream;
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -57,6 +58,15 @@ public class IntervalTreeIndex extends AbstractIndex {
      *
      * @param featureFile File which we are indexing
      */
+    public IntervalTreeIndex(final Path featureFile) {
+        super(featureFile);
+    }
+
+    /**
+     * Prepare to build an index.
+     *
+     * @param featureFile File which we are indexing
+     */
     public IntervalTreeIndex(final String featureFile) {
         super(featureFile);
     }
@@ -119,6 +129,7 @@ public class IntervalTreeIndex extends AbstractIndex {
             tree = new IntervalTree();
         }
 
+        @Override
         public String getName() {
             return name;
         }
@@ -127,11 +138,13 @@ public class IntervalTreeIndex extends AbstractIndex {
             tree.insert(iv);
         }
 
+        @Override
         public List<Block> getBlocks() {
             return null;
         }
 
 
+        @Override
         public List<Block> getBlocks(final int start, final int end) {
 
             // Get intervals and build blocks list
@@ -148,6 +161,7 @@ public class IntervalTreeIndex extends AbstractIndex {
 
             // Sort blocks by start position
             Arrays.sort(blocks, new Comparator<Block>() {
+                @Override
                 public int compare(final Block b1, final Block b2) {
                     // this is a little cryptic because the normal method (b1.getStartPosition() - b2.getStartPosition()) wraps in int space and we incorrectly sort the blocks in extreme cases
                     return b1.getStartPosition() - b2.getStartPosition() < 1 ? -1 : (b1.getStartPosition() - b2.getStartPosition() > 1 ? 1 : 0);
@@ -175,6 +189,7 @@ public class IntervalTreeIndex extends AbstractIndex {
             System.out.println(tree.toString());
         }
 
+        @Override
         public void write(final LittleEndianOutputStream dos) throws IOException {
 
             dos.writeString(name);
@@ -190,6 +205,7 @@ public class IntervalTreeIndex extends AbstractIndex {
 
         }
 
+        @Override
         public void read(final LittleEndianInputStream dis) throws IOException {
 
             tree = new IntervalTree();
diff --git a/src/main/java/htsjdk/tribble/index/linear/LinearIndex.java b/src/main/java/htsjdk/tribble/index/linear/LinearIndex.java
index 4f4d910..3d7905a 100644
--- a/src/main/java/htsjdk/tribble/index/linear/LinearIndex.java
+++ b/src/main/java/htsjdk/tribble/index/linear/LinearIndex.java
@@ -28,6 +28,7 @@ import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintStream;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -71,12 +72,21 @@ public class LinearIndex extends AbstractIndex {
      * @param indices
      * @param featureFile
      */
-    public LinearIndex(final List<ChrIndex> indices, final File featureFile) {
-        super(featureFile.getAbsolutePath());
+    public LinearIndex(final List<ChrIndex> indices, final Path featureFile) {
+        super(featureFile);
         for (final ChrIndex index : indices)
             chrIndices.put(index.getName(), index);
     }
 
+    /**
+     * Initialize using the specified {@code indices}
+     * @param indices
+     * @param featureFile
+     */
+    public LinearIndex(final List<ChrIndex> indices, final File featureFile) {
+        this(indices, featureFile.toPath());
+    }
+
     private LinearIndex(final LinearIndex parent, final List<ChrIndex> indices) {
         super(parent);
         for (final ChrIndex index : indices)
@@ -92,6 +102,14 @@ public class LinearIndex extends AbstractIndex {
     }
 
     /**
+     * Initialize with default parameters
+     * @param featurePath Path for which this is an index
+     */
+    public LinearIndex(final Path featurePath) {
+        super(featurePath);
+    }
+
+    /**
      * Load from file.
      * @param inputStream This method assumes that the input stream is already buffered as appropriate.
      */
@@ -101,6 +119,7 @@ public class LinearIndex extends AbstractIndex {
         read(dis);
     }
 
+    @Override
     public boolean isCurrentVersion() {
         if (!super.isCurrentVersion()) return false;
 
@@ -117,6 +136,7 @@ public class LinearIndex extends AbstractIndex {
         return INDEX_TYPE;
     }
 
+    @Override
     public List<String> getSequenceNames() {
         return (chrIndices == null ? Collections.EMPTY_LIST :
                 Collections.unmodifiableList(new ArrayList<String>(chrIndices.keySet())));
@@ -173,6 +193,7 @@ public class LinearIndex extends AbstractIndex {
             this.nFeatures = 0;
         }
 
+        @Override
         public String getName() {
             return name;
         }
@@ -186,10 +207,12 @@ public class LinearIndex extends AbstractIndex {
             return blocks.size();
         }
 
+        @Override
         public List<Block> getBlocks() {
             return blocks;
         }
 
+        @Override
         public List<Block> getBlocks(final int start, final int end) {
             if (blocks.isEmpty()) {
                 return Collections.emptyList();
@@ -231,6 +254,7 @@ public class LinearIndex extends AbstractIndex {
             this.nFeatures++;
         }
 
+        @Override
         public void write(final LittleEndianOutputStream dos) throws IOException {
 
             // Chr name, binSize,  # bins,  longest feature
@@ -253,6 +277,7 @@ public class LinearIndex extends AbstractIndex {
             dos.writeLong(pos + size);
         }
 
+        @Override
         public void read(final LittleEndianInputStream dis) throws IOException {
             name = dis.readString();
             binWidth = dis.readInt();
diff --git a/src/main/java/htsjdk/tribble/index/linear/LinearIndexCreator.java b/src/main/java/htsjdk/tribble/index/linear/LinearIndexCreator.java
index 1158fdf..9109705 100644
--- a/src/main/java/htsjdk/tribble/index/linear/LinearIndexCreator.java
+++ b/src/main/java/htsjdk/tribble/index/linear/LinearIndexCreator.java
@@ -29,6 +29,7 @@ import htsjdk.tribble.index.Index;
 import htsjdk.tribble.index.TribbleIndexCreator;
 
 import java.io.File;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.LinkedList;
 
@@ -43,20 +44,28 @@ public class LinearIndexCreator  extends TribbleIndexCreator {
     private int binWidth = DEFAULT_BIN_WIDTH;
 
     // the input file
-    private final File inputFile;
+    private final Path inputFile;
 
     private final LinkedList<LinearIndex.ChrIndex> chrList = new LinkedList<LinearIndex.ChrIndex>();
     private int longestFeature= 0;
 
     private final ArrayList<Block> blocks = new ArrayList<Block>();
 
-    public LinearIndexCreator(final File inputFile, final int binSize) {
-        this.inputFile = inputFile;
+    public LinearIndexCreator(final Path inputPath, final int binSize) {
+        this.inputFile = inputPath;
         binWidth = binSize;
     }
 
+    public LinearIndexCreator(final File inputFile, final int binSize) {
+        this(inputFile.toPath(), binSize);
+    }
+
     public LinearIndexCreator(final File inputFile) {
-        this(inputFile, DEFAULT_BIN_WIDTH);
+        this(inputFile.toPath());
+    }
+
+    public LinearIndexCreator(final Path inputPath) {
+        this(inputPath, DEFAULT_BIN_WIDTH);
     }
 
     /**
@@ -64,6 +73,7 @@ public class LinearIndexCreator  extends TribbleIndexCreator {
      * @param feature the feature, from which we use the contig, start, and stop
      * @param filePosition the position of the file at the BEGINNING of the current feature
      */
+    @Override
     public void addFeature(final Feature feature, final long filePosition) {
         // fi we don't have a chrIndex yet, or if the last one was for the previous contig, create a new one
         if (chrList.isEmpty() || !chrList.getLast().getName().equals(feature.getContig())) {
@@ -97,6 +107,7 @@ public class LinearIndexCreator  extends TribbleIndexCreator {
      * @param finalFilePosition the final file position, for indexes that have to close out with the final position
      * @return an Index object
      */
+    @Override
     public Index finalizeIndex(final long finalFilePosition) {
         if (finalFilePosition == 0)
             throw new IllegalArgumentException("finalFilePosition != 0, -> " + finalFilePosition);
diff --git a/src/main/java/htsjdk/tribble/index/tabix/TabixFormat.java b/src/main/java/htsjdk/tribble/index/tabix/TabixFormat.java
index 0cf910f..e9fe292 100644
--- a/src/main/java/htsjdk/tribble/index/tabix/TabixFormat.java
+++ b/src/main/java/htsjdk/tribble/index/tabix/TabixFormat.java
@@ -55,7 +55,8 @@ public class TabixFormat implements Cloneable {
     public int endPositionColumn;
     /** Lines in the file being indexed that start with this character are ignored. */
     public char metaCharacter;
-    /** This is part of the index header, but does not appear to be used. */
+
+    /** TODO: This is written, and part of the index header, but does not appear to be used. */
     public int numHeaderLinesToSkip;
 
     public TabixFormat() {
diff --git a/src/main/java/htsjdk/tribble/index/tabix/TabixIndex.java b/src/main/java/htsjdk/tribble/index/tabix/TabixIndex.java
index 044cefe..d7cc31c 100644
--- a/src/main/java/htsjdk/tribble/index/tabix/TabixIndex.java
+++ b/src/main/java/htsjdk/tribble/index/tabix/TabixIndex.java
@@ -30,7 +30,10 @@ import htsjdk.samtools.LinearIndex;
 import htsjdk.samtools.util.BlockCompressedInputStream;
 import htsjdk.samtools.util.BlockCompressedOutputStream;
 import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Log;
 import htsjdk.samtools.util.StringUtil;
+import htsjdk.tribble.Tribble;
 import htsjdk.tribble.TribbleException;
 import htsjdk.tribble.index.Block;
 import htsjdk.tribble.index.Index;
@@ -44,6 +47,8 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.util.*;
 
 /**
@@ -95,6 +100,13 @@ public class TabixIndex implements Index {
         this(new BlockCompressedInputStream(tabixFile), true);
     }
 
+    /**
+     * Convenient ctor that opens the path, wraps with with BGZF reader, and closes after reading index.
+     */
+    public TabixIndex(final Path tabixPath) throws IOException {
+        this(new BlockCompressedInputStream(Files.newInputStream(tabixPath)), true);
+    }
+
     private TabixIndex(final InputStream inputStream, final boolean closeInputStream) throws IOException {
         final LittleEndianInputStream dis = new LittleEndianInputStream(inputStream);
         if (dis.readInt() != MAGIC_NUMBER) {
@@ -199,24 +211,27 @@ public class TabixIndex implements Index {
     /**
      * Writes the index with BGZF.
      *
-     * @param tabixFile Where to write the index.
+     * @param tabixPath Where to write the index.
      */
     @Override
-    public void write(final File tabixFile) throws IOException {
-        try(final LittleEndianOutputStream los = new LittleEndianOutputStream(new BlockCompressedOutputStream(tabixFile))) {
+    public void write(final Path tabixPath) throws IOException {
+        try(final LittleEndianOutputStream los = new LittleEndianOutputStream(new BlockCompressedOutputStream(Files.newOutputStream(tabixPath), null))) {
             write(los);
         }
     }
 
     /**
-     * Writes to a file with appropriate name and directory based on feature file.
+     * Writes to a path with appropriate name and directory based on feature path.
      *
-     * @param featureFile File being indexed.
+     * @param featurePath Path being indexed.
+     * @throws IOException if featureFile is not a normal file.
      */
     @Override
-    public void writeBasedOnFeatureFile(final File featureFile) throws IOException {
-        if (!featureFile.isFile()) return;
-        write(new File(featureFile.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION));
+    public void writeBasedOnFeaturePath(final Path featurePath) throws IOException {
+        if (!Files.isRegularFile(featurePath)) {
+            throw new IOException("Cannot write based on a non-regular file: " + featurePath.toUri());
+        }
+        write(Tribble.tabixIndexPath(featurePath));
     }
 
     /**
diff --git a/src/main/java/htsjdk/tribble/readers/AsciiLineReader.java b/src/main/java/htsjdk/tribble/readers/AsciiLineReader.java
index 8f06205..6240b0b 100644
--- a/src/main/java/htsjdk/tribble/readers/AsciiLineReader.java
+++ b/src/main/java/htsjdk/tribble/readers/AsciiLineReader.java
@@ -17,13 +17,12 @@
  */
 package htsjdk.tribble.readers;
 
+import htsjdk.samtools.util.BlockCompressedInputStream;
 import htsjdk.samtools.util.LocationAware;
+import htsjdk.samtools.util.Log;
 import htsjdk.tribble.TribbleException;
 
 import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileReader;
 import java.io.IOException;
 import java.io.InputStream;
 
@@ -36,17 +35,36 @@ import java.io.InputStream;
  * @author jrobinso
  */
 public class AsciiLineReader implements LineReader, LocationAware {
+    private static final Log log = Log.getInstance(AsciiLineReader.class);
+
     private static final int BUFFER_OVERFLOW_INCREASE_FACTOR = 2;
     private static final byte LINEFEED = (byte) ('\n' & 0xff);
     private static final byte CARRIAGE_RETURN = (byte) ('\r' & 0xff);
 
-    PositionalBufferedStream is;
-    char[] lineBuffer;
+    private PositionalBufferedStream is;
+    private char[] lineBuffer;
+    private int lineTerminatorLength = -1;
+
+    protected AsciiLineReader() {};
 
+    /**
+     * Note: This class implements LocationAware, which requires preservation of virtual file pointers on BGZF inputs.
+     * However, if the inputStream wrapped by this class is a BlockCompressedInputStream, it violates that contract by
+     * wrapping the stream and returning positional file offsets instead.
+     *
+     * @deprecated 8/8/2017 use {@link #from}
+     */
+    @Deprecated
     public AsciiLineReader(final InputStream is){
+        // NOTE: This will wrap the input stream in a PositionalBufferedStream even if its already a PositionalBufferedStream
         this(new PositionalBufferedStream(is));
     }
 
+    /**
+     * @deprecated 8/8/2017 use {@link #from}
+     * @param is the {@link PositionalBufferedStream} input stream to be wrapped
+     */
+    @Deprecated
     public AsciiLineReader(final PositionalBufferedStream is) {
         this.is = is;
         // Allocate this only once, even though it is essentially a local variable of
@@ -55,8 +73,31 @@ public class AsciiLineReader implements LineReader, LocationAware {
     }
 
     /**
+     * Create an AsciiLineReader of the appropriate type for a given InputStream.
+     * @param inputStream An InputStream-derived class that implements BlockCompressedInputStream or PositionalBufferedStream
+     * @return AsciiLineReader that wraps inputStream
+     */
+    public static AsciiLineReader from(final InputStream inputStream) {
+        if (inputStream instanceof BlockCompressedInputStream) {
+            // For block compressed inputs, we need to ensure that no buffering takes place above the input stream to
+            // ensure that the correct (virtual file pointer) positions returned from this stream are preserved for
+            // the indexer. We can't used AsciiLineReader in this case since it wraps the input stream with a
+            // PositionalBufferedInputStream.
+            return new BlockCompressedAsciiLineReader((BlockCompressedInputStream) inputStream);
+        } else if (inputStream instanceof PositionalBufferedStream) {
+            // if this is already a PositionalBufferedStream, don't let AsciiLineReader wrap it with another one...
+            return new AsciiLineReader((PositionalBufferedStream)inputStream);
+        } else {
+            log.warn("Creating an indexable source for an AsciiFeatureCodec using a stream that is " +
+                    "neither a PositionalBufferedStream nor a BlockCompressedInputStream");
+            return new AsciiLineReader(new PositionalBufferedStream(inputStream)); // wrap the stream in a PositionalBufferedStream
+        }
+    }
+
+    /**
      * @return The position of the InputStream
      */
+    @Override
     public long getPosition(){
         if(is == null){
             throw new TribbleException("getPosition() called but no default stream was provided to the class on creation");
@@ -64,16 +105,29 @@ public class AsciiLineReader implements LineReader, LocationAware {
         return is.getPosition();
     }
 
+    /** Returns the length of the line terminator read after the last read line.  Returns either:
+     * -1 if no line has been read
+     * 0  after the last line if the last line in the file had no CR or LF line ending
+     * 1  if the line ended with CR or LF
+     * 2  if the line ended with CR and LF
+     */
+    public int getLineTerminatorLength() {
+        return this.lineTerminatorLength;
+    }
+
     /**
      * Read a line of text.  A line is considered to be terminated by any one
      * of a line feed ('\n'), a carriage return ('\r'), or a carriage return
      * followed immediately by a linefeed.
      *
+     * @deprecated 8/8/2017 use {@link #from} to create a new AsciiLineReader and {@link #readLine()}
+     *
      * @param stream the stream to read the next line from
      * @return A String containing the contents of the line or null if the
      *         end of the stream has been reached
      */
-    public final String readLine(final PositionalBufferedStream stream) throws IOException{
+    @Deprecated
+    public String readLine(final PositionalBufferedStream stream) throws IOException{
         int linePosition = 0;
 
         while (true) {
@@ -82,6 +136,7 @@ public class AsciiLineReader implements LineReader, LocationAware {
             if (b == -1) {
                 // eof reached.  Return the last line, or null if this is a new line
                 if (linePosition > 0) {
+                    this.lineTerminatorLength = 0;
                     return new String(lineBuffer, 0, linePosition);
                 } else {
                     return null;
@@ -92,11 +147,15 @@ public class AsciiLineReader implements LineReader, LocationAware {
             if (c == LINEFEED || c == CARRIAGE_RETURN) {
                 if (c == CARRIAGE_RETURN && stream.peek() == LINEFEED) {
                     stream.read(); // <= skip the trailing \n in case of \r\n termination
+                    this.lineTerminatorLength = 2;
+                }
+                else {
+                    this.lineTerminatorLength = 1;
                 }
 
                 return new String(lineBuffer, 0, linePosition);
             } else {
-                // Expand line buffer size if neccessary.  Reserve at least 2 characters
+                // Expand line buffer size if necessary.  Reserve at least 2 characters
                 // for potential line-terminators in return string
 
                 if (linePosition > (lineBuffer.length - 3)) {
@@ -113,9 +172,10 @@ public class AsciiLineReader implements LineReader, LocationAware {
     /**
      * Same as {@link #readLine(PositionalBufferedStream)} but uses the stream provided in the constructor
      *
-     * @return
+     * @return The next string, or null when input is exhausted.
      */
-    public final String readLine() throws IOException{
+    @Override
+    public String readLine() throws IOException{
         if ( is == null ){
             throw new TribbleException("readLine() called without an explicit stream argument but no default stream was provided to the class on creation");
         }
@@ -128,58 +188,5 @@ public class AsciiLineReader implements LineReader, LocationAware {
         lineBuffer = null;
     }
 
-    public static void main(final String[] args) throws Exception {
-        final File testFile = new File(args[0]);
-        final int iterations = Integer.valueOf(args[1]);
-        final boolean includeBufferedReader = Boolean.valueOf(args[2]);
-        long t0, lineCount, dt;
-        double rate;
-
-        System.out.printf("Testing %s%n", args[0]);
-        for (int i = 0; i < iterations; i++) {
-            if ( includeBufferedReader ) {
-                final BufferedReader reader2 = new BufferedReader(new FileReader(testFile));
-                t0 = System.currentTimeMillis();
-                lineCount = 0;
-                while (reader2.readLine() != null) {
-                    lineCount++;
-                }
-                dt = System.currentTimeMillis() - t0;
-                rate = ((double) lineCount) / dt;
-                printStatus("BufferedReader", lineCount, rate, dt);
-                reader2.close();
-            }
-
-            if ( includeBufferedReader ) {
-                final LongLineBufferedReader longLineBufferedReader = new LongLineBufferedReader(new BufferedReader(new FileReader(testFile)));
-                t0 = System.currentTimeMillis();
-                lineCount = 0;
-                while (longLineBufferedReader.readLine() != null) {
-                    lineCount++;
-                }
-                dt = System.currentTimeMillis() - t0;
-                rate = ((double) lineCount) / dt;
-                printStatus("BufferedReader", lineCount, rate, dt);
-                longLineBufferedReader.close();
-            }
-            
-            final PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(testFile));
-            final LineReader reader = new AsciiLineReader(pbs);
-            t0 = System.currentTimeMillis();
-            lineCount = 0;
-            while (reader.readLine() != null) {
-                lineCount++;
-            }
-            dt = System.currentTimeMillis() - t0;
-            rate = ((double) lineCount) / dt;
-            printStatus("PositionalBufferedStream", lineCount, rate, dt);
-            pbs.close();
-        }
-    }
-
-    private static final void printStatus(final String name, final long lineCount, final double rate, final long dt) {
-        System.out.printf("%30s: %d lines read.  Rate = %.2e lines per second.  DT = %d%n", name, lineCount, rate, dt);
-        System.out.flush();
-    }
 }
 
diff --git a/src/main/java/htsjdk/tribble/readers/BlockCompressedAsciiLineReader.java b/src/main/java/htsjdk/tribble/readers/BlockCompressedAsciiLineReader.java
new file mode 100644
index 0000000..b4f05a3
--- /dev/null
+++ b/src/main/java/htsjdk/tribble/readers/BlockCompressedAsciiLineReader.java
@@ -0,0 +1,44 @@
+package htsjdk.tribble.readers;
+
+import htsjdk.samtools.util.BlockCompressedInputStream;
+import htsjdk.samtools.util.CloserUtil;
+
+import java.io.IOException;
+
+/**
+ * An AsciiLineReader implementation that wraps a BlockCompressedInputStream and provides no additional buffering.
+ * Useful for cases where we need to preserve virtual file pointers in the underlying stream, such as during indexing.
+ */
+class BlockCompressedAsciiLineReader extends AsciiLineReader {
+
+    final private BlockCompressedInputStream bcs;
+
+    public BlockCompressedAsciiLineReader(final BlockCompressedInputStream inputBlockCompressedStream) {
+        bcs = inputBlockCompressedStream;
+    }
+
+    /**
+     * Read a single line of input, advance the underlying stream only enough to read the line.
+     */
+    @Override
+    public String readLine() throws IOException {
+        return bcs.readLine();
+    };
+
+    @Override
+    public String readLine(final PositionalBufferedStream stream) {
+        throw new UnsupportedOperationException("A BlockCompressedAsciiLineReader class cannot be used to read from a PositionalBufferedStream");
+    }
+
+    @Override
+    public void close() {
+        if (bcs != null) {
+            CloserUtil.close(bcs);
+        }
+    }
+
+    @Override
+    public long getPosition() {
+        return bcs.getPosition();
+    }
+}
diff --git a/src/main/java/htsjdk/tribble/readers/LineReader.java b/src/main/java/htsjdk/tribble/readers/LineReader.java
index 969b6b5..2782afc 100644
--- a/src/main/java/htsjdk/tribble/readers/LineReader.java
+++ b/src/main/java/htsjdk/tribble/readers/LineReader.java
@@ -39,5 +39,6 @@ public interface LineReader extends Closeable {
     public String readLine() throws IOException;
 
 
+    @Override
     public void close();
 }
diff --git a/src/main/java/htsjdk/tribble/readers/LineReaderUtil.java b/src/main/java/htsjdk/tribble/readers/LineReaderUtil.java
deleted file mode 100644
index 83a0545..0000000
--- a/src/main/java/htsjdk/tribble/readers/LineReaderUtil.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package htsjdk.tribble.readers;
-
-import java.io.InputStream;
-import java.io.StringReader;
-
-/**
- * A collection of factories for generating {@link LineReader}s.
- *
- * @Deprecated use {@link SynchronousLineReader} directly.
- * @author mccowan
- */
- at Deprecated
-public class LineReaderUtil {
-    @Deprecated
-    public enum LineReaderOption {
-        ASYNCHRONOUS,   //Note: the asynchronous option has no effect - this class does not provide asynchronous reading anymore
-        SYNCHRONOUS
-    }
-
-    /**
-     * Creates a line reader from the given stream.
-     * @Deprecated use <code>new SynchronousLineReader(stream);</code>
-     */
-    @Deprecated
-    public static LineReader fromBufferedStream(final InputStream stream) {
-        return new SynchronousLineReader(stream);
-    }
-
-    /**
-     * Creates a line reader from the given string reader.
-     * @Deprecated use <code>new SynchronousLineReader(stringReader);</code>
-     */
-    @Deprecated
-    public static LineReader fromStringReader(final StringReader stringReader) {
-        return new SynchronousLineReader(stringReader);
-    }
-
-    /**
-     * Creates a line reader from the given string reader.
-     * @Deprecated Asynchronous mode is not going to be supported. Use <code>new SynchronousLineReader(stringReader);</code>
-     */
-    @Deprecated
-    public static LineReader fromStringReader(final StringReader stringReader, final Object ignored) {
-        return new SynchronousLineReader(stringReader);
-    }
-
-    /**
-     * Convenience factory for composing a LineReader from an InputStream.
-     * @Deprecated Asynchronous mode is not going to be supported. Use <code>new SynchronousLineReader(bufferedStream);</code>
-     */
-    @Deprecated
-    public static LineReader fromBufferedStream(final InputStream bufferedStream, final Object ignored) {
-        return new SynchronousLineReader(bufferedStream);
-    }
-
-}
diff --git a/src/main/java/htsjdk/tribble/readers/LongLineBufferedReader.java b/src/main/java/htsjdk/tribble/readers/LongLineBufferedReader.java
index 5ca8e8d..dbb6593 100644
--- a/src/main/java/htsjdk/tribble/readers/LongLineBufferedReader.java
+++ b/src/main/java/htsjdk/tribble/readers/LongLineBufferedReader.java
@@ -153,6 +153,7 @@ public class LongLineBufferedReader extends Reader {
      *         end of the stream has been reached
      * @throws IOException If an I/O error occurs
      */
+    @Override
     public int read() throws IOException {
         synchronized (lock) {
             ensureOpen();
@@ -250,6 +251,7 @@ public class LongLineBufferedReader extends Reader {
      *         stream has been reached
      * @throws IOException If an I/O error occurs
      */
+    @Override
     public int read(char cbuf[], int off, int len) throws IOException {
         synchronized (lock) {
             ensureOpen();
@@ -362,6 +364,7 @@ public class LongLineBufferedReader extends Reader {
      * @throws IllegalArgumentException If <code>n</code> is negative.
      * @throws IOException              If an I/O error occurs
      */
+    @Override
     public long skip(long n) throws IOException {
         if (n < 0L) {
             throw new IllegalArgumentException("skip value is negative");
@@ -401,6 +404,7 @@ public class LongLineBufferedReader extends Reader {
      *
      * @throws IOException If an I/O error occurs
      */
+    @Override
     public boolean ready() throws IOException {
         synchronized (lock) {
             ensureOpen();
@@ -429,6 +433,7 @@ public class LongLineBufferedReader extends Reader {
     /**
      * Tells whether this stream supports the mark() operation, which it does.
      */
+    @Override
     public boolean markSupported() {
         return true;
     }
@@ -448,6 +453,7 @@ public class LongLineBufferedReader extends Reader {
      * @throws IllegalArgumentException If readAheadLimit is < 0
      * @throws IOException              If an I/O error occurs
      */
+    @Override
     public void mark(int readAheadLimit) throws IOException {
         if (readAheadLimit < 0) {
             throw new IllegalArgumentException("Read-ahead limit < 0");
@@ -466,6 +472,7 @@ public class LongLineBufferedReader extends Reader {
      * @throws IOException If the stream has never been marked,
      *                     or if the mark has been invalidated
      */
+    @Override
     public void reset() throws IOException {
         synchronized (lock) {
             ensureOpen();
@@ -478,6 +485,7 @@ public class LongLineBufferedReader extends Reader {
         }
     }
 
+    @Override
     public void close() throws IOException {
         synchronized (lock) {
             if (in == null)
diff --git a/src/main/java/htsjdk/tribble/readers/PositionalBufferedStream.java b/src/main/java/htsjdk/tribble/readers/PositionalBufferedStream.java
index ac642df..22cd6c1 100644
--- a/src/main/java/htsjdk/tribble/readers/PositionalBufferedStream.java
+++ b/src/main/java/htsjdk/tribble/readers/PositionalBufferedStream.java
@@ -28,7 +28,11 @@ import java.io.InputStreamReader;
 
 /**
  * A wrapper around an {@code InputStream} which performs it's own buffering, and keeps track of the position.
- * 
+ *
+ * TODO: This class implements Positional, which in turn extends LocationAware, which requires preservation of
+ * virtual file pointers on BGZF inputs. However, if the inputStream wrapped by this class is a BlockCompressedInputStream,
+ * it violates that contract by wrapping the stream and returning positional file offsets instead.
+ *
  * @author depristo
  */
 public final class PositionalBufferedStream extends InputStream implements Positional {
@@ -48,6 +52,7 @@ public final class PositionalBufferedStream extends InputStream implements Posit
         nextChar = nChars = 0;
     }
 
+    @Override
     public final long getPosition() {
         return position;
     }
@@ -129,6 +134,7 @@ public final class PositionalBufferedStream extends InputStream implements Posit
         return nChars;
     }
 
+    @Override
     public final long skip(final long nBytes) throws IOException {
         long remainingToSkip = nBytes;
 
@@ -156,6 +162,7 @@ public final class PositionalBufferedStream extends InputStream implements Posit
         return actuallySkipped;
     }
 
+    @Override
     public final void close() {
         try {
             is.close();
diff --git a/src/main/java/htsjdk/tribble/readers/TabixIteratorLineReader.java b/src/main/java/htsjdk/tribble/readers/TabixIteratorLineReader.java
index 49b6f0c..2a04725 100644
--- a/src/main/java/htsjdk/tribble/readers/TabixIteratorLineReader.java
+++ b/src/main/java/htsjdk/tribble/readers/TabixIteratorLineReader.java
@@ -40,6 +40,7 @@ public class TabixIteratorLineReader implements LineReader {
         this.iterator = iterator;
     }
 
+    @Override
     public String readLine() {
         try {
             return iterator != null ? iterator.next() : null;
@@ -48,6 +49,7 @@ public class TabixIteratorLineReader implements LineReader {
         }
     }
 
+    @Override
     public void close() {
         // Ignore -
     }
diff --git a/src/main/java/htsjdk/tribble/readers/TabixReader.java b/src/main/java/htsjdk/tribble/readers/TabixReader.java
index 8867d07..93a2ac3 100644
--- a/src/main/java/htsjdk/tribble/readers/TabixReader.java
+++ b/src/main/java/htsjdk/tribble/readers/TabixReader.java
@@ -34,25 +34,29 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.channels.SeekableByteChannel;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
+import java.util.function.Function;
 
 /**
  * @author Heng Li <hengli at broadinstitute.org>
  */
 public class TabixReader {
-    private String mFn;
-    private String mIdxFn;
-    private BlockCompressedInputStream mFp;
+    private final String mFilePath;
+    private final String mIndexPath;
+    private final Function<SeekableByteChannel, SeekableByteChannel> mIndexWrapper;
+    private final BlockCompressedInputStream mFp;
 
     private int mPreset;
     private int mSc;
     private int mBc;
     private int mEc;
     private int mMeta;
+    
     //private int mSkip; (not used)
     private String[] mSeq;
 
@@ -61,6 +65,8 @@ public class TabixReader {
     private static int MAX_BIN = 37450;
     //private static int TAD_MIN_CHUNK_GAP = 32768; (not used)
     private static int TAD_LIDX_SHIFT = 14;
+    /** default buffer size for <code>readLine()</code> */
+    private static final int DEFAULT_BUFFER_SIZE = 1000;
 
     protected static class TPair64 implements Comparable<TPair64> {
         long u, v;
@@ -75,6 +81,7 @@ public class TabixReader {
             v = p.v;
         }
 
+        @Override
         public int compareTo(final TPair64 p) {
             return u == p.u ? 0 : ((u < p.u) ^ (u < 0) ^ (p.u < 0)) ? -1 : 1; // unsigned 64-bit comparison
         }
@@ -96,40 +103,64 @@ public class TabixReader {
     }
 
     /**
-     * @param fn File name of the data file
+     * @param filePath path to the data file/uri
+     */
+    public TabixReader(final String filePath) throws IOException {
+        this(filePath, null, SeekableStreamFactory.getInstance().getBufferedStream(SeekableStreamFactory.getInstance().getStreamFor(filePath)));
+    }
+
+    /**
+     * @param filePath path to the of the data file/uri
+     * @param indexPath Full path to the index file. Auto-generated if null
      */
-    public TabixReader(final String fn) throws IOException {
-        this(fn, null, SeekableStreamFactory.getInstance().getBufferedStream(SeekableStreamFactory.getInstance().getStreamFor(fn)));
+    public TabixReader(final String filePath, final String indexPath) throws IOException {
+        this(filePath, indexPath, SeekableStreamFactory.getInstance().getBufferedStream(SeekableStreamFactory.getInstance().getStreamFor(filePath)));
     }
 
     /**
-     * @param fn File name of the data file
-     * @param idxFn Full path to the index file. Auto-generated if null
+     * @param filePath path to the data file/uri
+     * @param indexPath Full path to the index file. Auto-generated if null
+     * @param wrapper a wrapper to apply to the raw byte stream of the data file if is a uri representing a {@link java.nio.file.Path}
+     * @param indexWrapper a wrapper to apply to the raw byte stream of the index file if it is a uri representing a {@link java.nio.file.Path}
      */
-    public TabixReader(final String fn, final String idxFn) throws IOException {
-        this(fn, idxFn, SeekableStreamFactory.getInstance().getBufferedStream(SeekableStreamFactory.getInstance().getStreamFor(fn)));
+    public TabixReader(final String filePath, final String indexPath,
+                       final Function<SeekableByteChannel, SeekableByteChannel> wrapper,
+                       final Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) throws IOException {
+        this(filePath, indexPath, SeekableStreamFactory.getInstance().getBufferedStream(SeekableStreamFactory.getInstance().getStreamFor(filePath, wrapper)), indexWrapper);
     }
 
+
     /**
-     * @param fn File name of the data file  (used for error messages only)
+     * @param filePath Path to the data file  (used for error messages only)
      * @param stream Seekable stream from which the data is read
      */
-    public TabixReader(final String fn, SeekableStream stream) throws IOException {
-        this(fn, null, stream);
+    public TabixReader(final String filePath, SeekableStream stream) throws IOException {
+        this(filePath, null, stream);
     }
 
     /**
-     * @param fn File name of the data file  (used for error messages only)
-     * @param idxFn Full path to the index file. Auto-generated if null
+     * @param filePath Path to the data file  (used for error messages only)
+     * @param indexPath Full path to the index file. Auto-generated if null
      * @param stream Seekable stream from which the data is read
      */
-    public TabixReader(final String fn, final String idxFn, SeekableStream stream) throws IOException {
-        mFn = fn;
+    public TabixReader(final String filePath, final String indexPath, SeekableStream stream) throws IOException {
+        this(filePath, indexPath, stream, null);
+    }
+
+    /**
+     * @param filePath Path to the data file (used for error messages only)
+     * @param indexPath Full path to the index file. Auto-generated if null
+     * @param indexWrapper a wrapper to apply to the raw byte stream of the index file if it is a uri representing a {@link java.nio.file.Path}
+     * @param stream Seekable stream from which the data is read
+     */
+    public TabixReader(final String filePath, final String indexPath, SeekableStream stream, Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) throws IOException {
+        mFilePath = filePath;
         mFp = new BlockCompressedInputStream(stream);
-        if(idxFn == null){
-            mIdxFn = ParsingUtils.appendToPath(fn, TabixUtils.STANDARD_INDEX_EXTENSION);
+        mIndexWrapper = indexWrapper;
+        if(indexPath == null){
+            mIndexPath = ParsingUtils.appendToPath(filePath, TabixUtils.STANDARD_INDEX_EXTENSION);
         } else {
-            mIdxFn = idxFn;
+            mIndexPath = indexPath;
         }
         readIndex();
     }
@@ -137,7 +168,7 @@ public class TabixReader {
     /** return the source (filename/URL) of that reader */
     public String getSource()
         {
-        return this.mFn;
+        return this.mFilePath;
         }
 
     private static int reg2bins(final int beg, final int _end, final int[] list) {
@@ -161,13 +192,25 @@ public class TabixReader {
     }
 
     public static long readLong(final InputStream is) throws IOException {
-        byte[] buf = new byte[8];
+        final byte[] buf = new byte[8];
         is.read(buf);
         return ByteBuffer.wrap(buf).order(ByteOrder.LITTLE_ENDIAN).getLong();
     }
 
     public static String readLine(final InputStream is) throws IOException {
-        StringBuffer buf = new StringBuffer();
+        return readLine(is, DEFAULT_BUFFER_SIZE);
+    }
+
+    /** 
+     * reads a line with a defined buffer-size 
+     * 
+     * @param is the input stream
+     * @param bufferCapacity the buffer size, must be greater than 0
+     * @return the line or null if there is no more input
+     * @throws IOException
+     */
+    private static String readLine(final InputStream is, final int bufferCapacity) throws IOException {
+        final StringBuffer buf = new StringBuffer(bufferCapacity);
         int c;
         while ((c = is.read()) >= 0 && c != '\n')
             buf.append((char) c);
@@ -175,19 +218,21 @@ public class TabixReader {
         return buf.toString();
     }
 
+    
+    
     /**
      * Read the Tabix index from a file
      *
      * @param fp File pointer
      */
-    private void readIndex(SeekableStream fp) throws IOException {
+    private void readIndex(final SeekableStream fp) throws IOException {
         if (fp == null) return;
-        BlockCompressedInputStream is = new BlockCompressedInputStream(fp);
+        final  BlockCompressedInputStream is = new BlockCompressedInputStream(fp);
         byte[] buf = new byte[4];
 
         is.read(buf, 0, 4); // read "TBI\1"
         mSeq = new String[readInt(is)]; // # sequences
-        mChr2tid = new HashMap<String, Integer>();
+        mChr2tid = new HashMap<String, Integer>( this.mSeq.length );
         mPreset = readInt(is);
         mSc = readInt(is);
         mBc = readInt(is);
@@ -202,9 +247,9 @@ public class TabixReader {
             if (buf[i] == 0) {
                 byte[] b = new byte[i - j];
                 System.arraycopy(buf, j, b, 0, b.length);
-                String s = new String(b);
-                mChr2tid.put(s, k);
-                mSeq[k++] = s;
+                final String contig = new String(b);
+                mChr2tid.put(contig, k);
+                mSeq[k++] = contig;
                 j = i + 1;
             }
         }
@@ -238,20 +283,20 @@ public class TabixReader {
      * Read the Tabix index from the default file.
      */
     private void readIndex() throws IOException {
-        ISeekableStreamFactory ssf = SeekableStreamFactory.getInstance();
-        readIndex(ssf.getBufferedStream(ssf.getStreamFor(mIdxFn), 128000));
+        final ISeekableStreamFactory ssf = SeekableStreamFactory.getInstance();
+        readIndex(ssf.getBufferedStream(ssf.getStreamFor(mIndexPath, mIndexWrapper), 128000));
     }
 
     /**
      * Read one line from the data file.
      */
     public String readLine() throws IOException {
-        return readLine(mFp);
+        return readLine(mFp, DEFAULT_BUFFER_SIZE);
     }
 
     /** return chromosome ID or -1 if it is unknown */
     public int chr2tid(final String chr) {
-        Integer tid=this.mChr2tid.get(chr);
+       final Integer tid = this.mChr2tid.get(chr);
        return tid==null?-1:tid;
     }
 
@@ -386,11 +431,10 @@ public class TabixReader {
                     ++i;
                 }
                 String s;
-                if ((s = readLine(mFp)) != null) {
+                if ((s = readLine(mFp, DEFAULT_BUFFER_SIZE)) != null) {
                     TIntv intv;
-                    char[] str = s.toCharArray();
                     curr_off = mFp.getFilePointer();
-                    if (str.length == 0 || str[0] == mMeta) continue;
+                    if (s.isEmpty() || s.charAt(0) == mMeta) continue;
                     intv = getIntv(s);
                     if (intv.tid != tid || intv.beg >= end) break; // no need to proceed
                     else if (intv.end > beg && intv.beg < end) return s; // overlap; return
@@ -488,26 +532,6 @@ public class TabixReader {
        return query(tid, start, end);
    }
 
-    public static void main(String[] args) {
-        if (args.length < 1) {
-            System.out.println("Usage: java -cp .:sam.jar TabixReader <in.gz> [region]");
-            System.exit(1);
-        }
-        try {
-            TabixReader tr = new TabixReader(args[0]);
-            String s;
-            if (args.length == 1) { // no region is specified; print the whole file
-                while ((s = tr.readLine()) != null)
-                    System.out.println(s);
-            } else { // a region is specified; random access
-                TabixReader.Iterator iter = tr.query(args[1]); // get the iterator
-                while ((s = iter.next()) != null)
-                    System.out.println(s);
-            }
-        } catch (IOException e) {
-        }
-    }
-
     // ADDED BY JTR
     public void close() {
         if(mFp != null) {
diff --git a/src/main/java/htsjdk/tribble/util/HTTPHelper.java b/src/main/java/htsjdk/tribble/util/HTTPHelper.java
index 90e6228..cdd6b27 100644
--- a/src/main/java/htsjdk/tribble/util/HTTPHelper.java
+++ b/src/main/java/htsjdk/tribble/util/HTTPHelper.java
@@ -57,6 +57,7 @@ public class HTTPHelper implements URLHelper {
         proxy = p;
     }
 
+    @Override
     public URL getUrl() {
         return url;
     }
@@ -65,6 +66,7 @@ public class HTTPHelper implements URLHelper {
      * @return content length of the resource
      * @throws IOException
      */
+    @Override
     public long getContentLength() throws IOException {
 
         HttpURLConnection con = null;
@@ -84,6 +86,7 @@ public class HTTPHelper implements URLHelper {
     }
 
 
+    @Override
     public InputStream openInputStream() throws IOException {
 
         HttpURLConnection connection = openConnection();
@@ -98,7 +101,11 @@ public class HTTPHelper implements URLHelper {
      * @param end   end of range ni bytes
      * @return
      * @throws IOException
+     *
+     * @deprecated  since 12/10/14  Will be removed in a future release, as is somewhat fragile
+     * and not used.
      */
+    @Override
     @Deprecated
     public InputStream openInputStreamForRange(long start, long end) throws IOException {
 
@@ -118,6 +125,7 @@ public class HTTPHelper implements URLHelper {
         return connection;
     }
 
+    @Override
     public boolean exists() throws IOException {
         HttpURLConnection con = null;
         try {
diff --git a/src/main/java/htsjdk/tribble/util/LittleEndianOutputStream.java b/src/main/java/htsjdk/tribble/util/LittleEndianOutputStream.java
index 9bec071..eab2f87 100644
--- a/src/main/java/htsjdk/tribble/util/LittleEndianOutputStream.java
+++ b/src/main/java/htsjdk/tribble/util/LittleEndianOutputStream.java
@@ -25,11 +25,13 @@ public final class LittleEndianOutputStream extends FilterOutputStream {
         super(out);
     }
 
+    @Override
     public void write(int b) throws IOException {
         out.write(b);
         written++;
     }
 
+    @Override
     public void write(byte[] data, int offset, int length)
             throws IOException {
         out.write(data, offset, length);
diff --git a/src/main/java/htsjdk/tribble/util/ParsingUtils.java b/src/main/java/htsjdk/tribble/util/ParsingUtils.java
index 2a88ed7..70c3a3d 100644
--- a/src/main/java/htsjdk/tribble/util/ParsingUtils.java
+++ b/src/main/java/htsjdk/tribble/util/ParsingUtils.java
@@ -23,6 +23,8 @@
  */
 package htsjdk.tribble.util;
 
+import htsjdk.samtools.seekablestream.SeekablePathStream;
+import htsjdk.samtools.util.IOUtil;
 import java.awt.Color;
 import java.io.File;
 import java.io.FileInputStream;
@@ -31,6 +33,8 @@ import java.io.InputStream;
 import java.lang.reflect.Constructor;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -40,6 +44,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.WeakHashMap;
+import java.util.function.Function;
 
 /**
  * @author jrobinso
@@ -76,18 +81,37 @@ public class ParsingUtils {
     }
 
 
+    /**
+     * @return an input stream from the given path
+     * @throws IOException
+     */
     public static InputStream openInputStream(String path)
             throws IOException {
+        return openInputStream(path, null);
+    }
 
-        InputStream inputStream;
+    /**
+     * open an input stream from the given path and wrap the raw byte stream with a wrapper if given
+     *
+     * the wrapper will only be applied to paths that are not http, https, ftp, or file, i.e. any {@link java.nio.file.Path}
+     * using a custom filesystem plugin
+     * @param path a uri like string
+     * @param wrapper to wrap the input stream in, may be used to implement caching or prefetching, etc
+     * @return
+     * @throws IOException
+     */
+    public static InputStream openInputStream(String path, Function<SeekableByteChannel, SeekableByteChannel> wrapper)
+            throws IOException {
 
+        final InputStream inputStream;
         if (path.startsWith("http:") || path.startsWith("https:") || path.startsWith("ftp:")) {
             inputStream = getURLHelper(new URL(path)).openInputStream();
+        } else if (IOUtil.hasScheme(path)) {
+            inputStream = new SeekablePathStream(IOUtil.getPath(path), wrapper);
         } else {
             File file = new File(path);
             inputStream = new FileInputStream(file);
         }
-
         return inputStream;
     }
 
@@ -400,6 +424,8 @@ public class ParsingUtils {
             }
             URLHelper helper = getURLHelper(url);
             return helper.exists();
+        } else if (IOUtil.hasScheme(resource)) {
+            return Files.exists(IOUtil.getPath(resource));
         } else {
             return (new File(resource)).exists();
         }
diff --git a/src/main/java/htsjdk/tribble/util/TabixUtils.java b/src/main/java/htsjdk/tribble/util/TabixUtils.java
index aa365cd..5ae9f8a 100644
--- a/src/main/java/htsjdk/tribble/util/TabixUtils.java
+++ b/src/main/java/htsjdk/tribble/util/TabixUtils.java
@@ -55,6 +55,7 @@ public class TabixUtils {
             v = p.v;
         }
 
+        @Override
         public int compareTo(final TPair64 p) {
             return u == p.u ? 0 : ((u < p.u) ^ (u < 0) ^ (p.u < 0)) ? -1 : 1; // unsigned 64-bit comparison
         }
diff --git a/src/main/java/htsjdk/variant/bcf2/BCF2Codec.java b/src/main/java/htsjdk/variant/bcf2/BCF2Codec.java
index a945764..4926c80 100644
--- a/src/main/java/htsjdk/variant/bcf2/BCF2Codec.java
+++ b/src/main/java/htsjdk/variant/bcf2/BCF2Codec.java
@@ -25,6 +25,7 @@
 
 package htsjdk.variant.bcf2;
 
+import htsjdk.samtools.util.IOUtil;
 import htsjdk.tribble.BinaryFeatureCodec;
 import htsjdk.tribble.Feature;
 import htsjdk.tribble.FeatureCodecHeader;
@@ -44,10 +45,8 @@ import htsjdk.variant.vcf.VCFContigHeaderLine;
 import htsjdk.variant.vcf.VCFHeader;
 import htsjdk.variant.vcf.VCFHeaderLineType;
 
-import java.io.ByteArrayInputStream;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
+import java.io.*;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -207,21 +206,11 @@ public final class BCF2Codec extends BinaryFeatureCodec<VariantContext> {
 
     @Override
     public boolean canDecode( final String path ) {
-        FileInputStream fis = null;
-        try {
-            fis = new FileInputStream(path);
+        try (InputStream fis = Files.newInputStream(IOUtil.getPath(path)) ){
             final BCFVersion version = BCFVersion.readBCFVersion(fis);
             return version != null && version.getMajorVersion() == ALLOWED_MAJOR_VERSION;
-        } catch ( FileNotFoundException e ) {
-            return false;
-        } catch ( IOException e ) {
+        } catch ( final IOException e ) {
             return false;
-        } finally {
-            try {
-                if ( fis != null ) fis.close();
-            } catch ( IOException e ) {
-                // do nothing
-            }
         }
     }
 
diff --git a/src/main/java/htsjdk/variant/variantcontext/Allele.java b/src/main/java/htsjdk/variant/variantcontext/Allele.java
index 44fc6aa..71aa201 100644
--- a/src/main/java/htsjdk/variant/variantcontext/Allele.java
+++ b/src/main/java/htsjdk/variant/variantcontext/Allele.java
@@ -523,6 +523,7 @@ public class Allele implements Comparable<Allele>, Serializable {
             return null;    // couldn't find anything
     }
 
+    @Override
     public int compareTo(final Allele other) {
         if ( isReference() && other.isNonReference() )
             return -1;
diff --git a/src/main/java/htsjdk/variant/variantcontext/FastGenotype.java b/src/main/java/htsjdk/variant/variantcontext/FastGenotype.java
index 665e672..495ba41 100644
--- a/src/main/java/htsjdk/variant/variantcontext/FastGenotype.java
+++ b/src/main/java/htsjdk/variant/variantcontext/FastGenotype.java
@@ -154,6 +154,7 @@ public final class FastGenotype extends Genotype {
     //
     // ---------------------------------------------------------------------------------------------------------
 
+    @Override
     public Map<String, Object> getExtendedAttributes() {
         return extendedAttributes;
     }
diff --git a/src/main/java/htsjdk/variant/variantcontext/GenotypeLikelihoods.java b/src/main/java/htsjdk/variant/variantcontext/GenotypeLikelihoods.java
index ee3e08d..605f298 100644
--- a/src/main/java/htsjdk/variant/variantcontext/GenotypeLikelihoods.java
+++ b/src/main/java/htsjdk/variant/variantcontext/GenotypeLikelihoods.java
@@ -183,6 +183,10 @@ public class GenotypeLikelihoods {
      * If you <strong>know</strong> you're biallelic, use <code>getGQLog10FromLikelihoods</code> directly.
      * @param genotype - actually a genotype type (no call, hom ref, het, hom var)
      * @return an unsafe quantity that could be negative. In the bi-allelic case, the GQ resulting from best minus next best (if the type is the best).
+     *
+     * @deprecated since 2/5/13 use
+     * {@link GenotypeLikelihoods#getLog10GQ(Genotype, VariantContext)} or
+     * {@link GenotypeLikelihoods#getLog10GQ(Genotype, List)}
      */
     @Deprecated
     public double getLog10GQ(GenotypeType genotype){
@@ -554,6 +558,8 @@ public class GenotypeLikelihoods {
      *
      * @param PLindex   the PL index
      * @return the allele index pair
+     *
+     * @deprecated since 2/5/13
      */
     @Deprecated
     public static GenotypeLikelihoodsAllelePair getAllelePairUsingDeprecatedOrdering(final int PLindex) {
diff --git a/src/main/java/htsjdk/variant/variantcontext/JEXLMap.java b/src/main/java/htsjdk/variant/variantcontext/JEXLMap.java
index 33ec595..c4664b0 100644
--- a/src/main/java/htsjdk/variant/variantcontext/JEXLMap.java
+++ b/src/main/java/htsjdk/variant/variantcontext/JEXLMap.java
@@ -78,6 +78,7 @@ class JEXLMap implements Map<JexlVCMatchExp, Boolean> {
      * @throws IllegalArgumentException when {@code key} is {@code null} or
      *                                  when any of the JexlVCMatchExp (i.e. keys) contains invalid Jexl expressions.
      */
+    @Override
     public Boolean get(Object key) {
         if (key == null) {
             throw new IllegalArgumentException("Query key is null");
@@ -101,8 +102,10 @@ class JEXLMap implements Map<JexlVCMatchExp, Boolean> {
      * @param o the key
      * @return true if we have a value for that key
      */
+    @Override
     public boolean containsKey(Object o) { return jexl.containsKey(o); }
 
+    @Override
     public Set<JexlVCMatchExp> keySet() {
         return jexl.keySet();
     }
@@ -119,6 +122,7 @@ class JEXLMap implements Map<JexlVCMatchExp, Boolean> {
      *
      * @throws IllegalArgumentException when any of the JexlVCMatchExp (i.e. keys) contains invalid Jexl expressions.
      */
+    @Override
     public Collection<Boolean> values() {
         for (final JexlVCMatchExp exp : jexl.keySet()) {
             jexl.computeIfAbsent(exp, k -> evaluateExpression(exp));
@@ -129,16 +133,20 @@ class JEXLMap implements Map<JexlVCMatchExp, Boolean> {
     /**
      * @return the number of keys, i.e. {@link JexlVCMatchExp}'s held by this mapping.
      */
+    @Override
     public int size() {
         return jexl.size();
     }
 
+    @Override
     public boolean isEmpty() { return this.jexl.isEmpty(); }
 
+    @Override
     public Boolean put(JexlVCMatchExp jexlVCMatchExp, Boolean aBoolean) {
         return jexl.put(jexlVCMatchExp, aBoolean);
     }
 
+    @Override
     public void putAll(Map<? extends JexlVCMatchExp, ? extends Boolean> map) {
         jexl.putAll(map);
     }
@@ -207,21 +215,25 @@ class JEXLMap implements Map<JexlVCMatchExp, Boolean> {
 
     // this doesn't make much sense to implement, boolean doesn't offer too much variety to deal
     // with evaluating every key in the internal map.
+    @Override
     public boolean containsValue(Object o) {
         throw new UnsupportedOperationException("containsValue() not supported on a JEXLMap");
     }
 
     // this doesn't make much sense
+    @Override
     public Boolean remove(Object o) {
         throw new UnsupportedOperationException("remove() not supported on a JEXLMap");
     }
 
 
+    @Override
     public Set<Entry<JexlVCMatchExp, Boolean>> entrySet() {
         throw new UnsupportedOperationException("entrySet() not supported on a JEXLMap");
     }
 
     // nope
+    @Override
     public void clear() {
         throw new UnsupportedOperationException("clear() not supported on a JEXLMap");
     }
diff --git a/src/main/java/htsjdk/variant/variantcontext/VariantContext.java b/src/main/java/htsjdk/variant/variantcontext/VariantContext.java
index 55825fb..6def89e 100644
--- a/src/main/java/htsjdk/variant/variantcontext/VariantContext.java
+++ b/src/main/java/htsjdk/variant/variantcontext/VariantContext.java
@@ -1663,6 +1663,7 @@ public class VariantContext implements Feature, Serializable {
      * underlying vcf file, VariantContexts representing the same biological event may have different start positions depending on the
      * specifics of the vcf file they are derived from
      */
+    @Override
     public int getStart() {
         return (int)start;
     }
@@ -1673,6 +1674,7 @@ public class VariantContext implements Feature, Serializable {
      * For VariantContexts with a single alternate allele, if that allele is an insertion, the end position will be on the reference base
      * before the insertion event.  If the single alt allele is a deletion, the end will be on the final deleted reference base.
      */
+    @Override
     public int getEnd() {
         return (int)stop;
     }
diff --git a/src/main/java/htsjdk/variant/variantcontext/VariantContextUtils.java b/src/main/java/htsjdk/variant/variantcontext/VariantContextUtils.java
index face55b..6988b4c 100644
--- a/src/main/java/htsjdk/variant/variantcontext/VariantContextUtils.java
+++ b/src/main/java/htsjdk/variant/variantcontext/VariantContextUtils.java
@@ -52,15 +52,12 @@ public class VariantContextUtils {
 
     /** Use a {@link Lazy} {@link JexlEngine} instance to avoid class-loading issues. (Applications that access this class are otherwise
      * forced to build a {@link JexlEngine} instance, which depends on some apache logging libraries that mightn't be packaged.) */
-    final public static Lazy<JexlEngine> engine = new Lazy<JexlEngine>(new Lazy.LazyInitializer<JexlEngine>() {
-        @Override
-        public JexlEngine make() {
-            final JexlEngine jexl = new JexlEngine();
-            jexl.setSilent(false); // will throw errors now for selects that don't evaluate properly
-            jexl.setLenient(false);
-            jexl.setDebug(false);
-            return jexl;
-        }
+    final public static Lazy<JexlEngine> engine = new Lazy<>(() -> {
+        final JexlEngine jexl = new JexlEngine();
+        jexl.setSilent(false); // will throw errors now for selects that don't evaluate properly
+        jexl.setLenient(false);
+        jexl.setDebug(false);
+        return jexl;
     });
     private final static boolean ASSUME_MISSING_FIELDS_ARE_STRINGS = false;
     
diff --git a/src/main/java/htsjdk/variant/variantcontext/VariantJEXLContext.java b/src/main/java/htsjdk/variant/variantcontext/VariantJEXLContext.java
index 34cde33..0125863 100644
--- a/src/main/java/htsjdk/variant/variantcontext/VariantJEXLContext.java
+++ b/src/main/java/htsjdk/variant/variantcontext/VariantJEXLContext.java
@@ -76,6 +76,7 @@ class VariantJEXLContext implements JexlContext {
         this.vc = vc;
     }
 
+    @Override
     public Object get(String name) {
         Object result = null;
         if ( attributes.containsKey(name) ) { // dynamic resolution of name -> value via map
@@ -89,6 +90,7 @@ class VariantJEXLContext implements JexlContext {
         return result;
     }
 
+    @Override
     public boolean has(String name) {
         return get(name) != null;
     }
@@ -96,6 +98,7 @@ class VariantJEXLContext implements JexlContext {
     /**
      * @throws UnsupportedOperationException
      */
+    @Override
     public void	set(String name, Object value) {
         throw new UnsupportedOperationException("remove() not supported on a VariantJEXLContext");
     }
diff --git a/src/main/java/htsjdk/variant/variantcontext/filter/FilteringIterator.java b/src/main/java/htsjdk/variant/variantcontext/filter/FilteringIterator.java
index 04609a8..44362d6 100644
--- a/src/main/java/htsjdk/variant/variantcontext/filter/FilteringIterator.java
+++ b/src/main/java/htsjdk/variant/variantcontext/filter/FilteringIterator.java
@@ -36,7 +36,7 @@ import java.util.Iterator;
  *
  * @author Yossi Farjoun
  *
- * use {@link FilteringVariantContextIterator} instead
+ * @deprecated  since 2/29/16 use {@link FilteringVariantContextIterator} instead
  */
 
 @Deprecated
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriter.java
index 4604316..66b3220 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriter.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriter.java
@@ -39,10 +39,12 @@ public class AsyncVariantContextWriter extends AbstractAsyncWriter<VariantContex
 
     @Override protected final String getThreadNamePrefix() { return "VariantContextWriterThread-"; }
 
+    @Override
     public void add(final VariantContext vc) {
         write(vc);
     }
 
+    @Override
     public void writeHeader(final VCFHeader header) {
         this.underlyingWriter.writeHeader(header);
     }
@@ -51,4 +53,10 @@ public class AsyncVariantContextWriter extends AbstractAsyncWriter<VariantContex
     public boolean checkError() {
         return false;
     }
+
+    @Override
+    public void setHeader(final VCFHeader header) {
+        this.underlyingWriter.setHeader(header);
+    }
+
 }
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriter.java
index f9dd458..9582e00 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriter.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2FieldWriter.java
@@ -231,6 +231,7 @@ public abstract class BCF2FieldWriter {
             super(header, fieldEncoder);
         }
 
+        @Override
         public void addGenotype(final BCF2Encoder encoder, final VariantContext vc, final Genotype g) throws IOException {
             final String fieldValue = g.getFilters();
             getFieldEncoder().encodeValue(encoder, fieldValue, encodingType, nValuesPerGenotype);
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/BCF2Writer.java b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2Writer.java
index 8c16aac..cf578d8 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/BCF2Writer.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/BCF2Writer.java
@@ -122,6 +122,10 @@ class BCF2Writer extends IndexingVariantContextWriter {
     private VCFHeader lastVCFHeaderOfUnparsedGenotypes = null;
     private boolean canPassOnUnparsedGenotypeDataForLastVCFHeader = false;
 
+    // is the header or body written to the output stream?
+    private boolean outputHasBeenWritten;
+
+
     public BCF2Writer(final File location, final OutputStream output, final SAMSequenceDictionary refDict,
                       final boolean enableOnTheFlyIndexing, final boolean doNotWriteGenotypes) {
         super(writerName(location, output), location, output, refDict, enableOnTheFlyIndexing);
@@ -145,39 +149,13 @@ class BCF2Writer extends IndexingVariantContextWriter {
 
     @Override
     public void writeHeader(VCFHeader header) {
-        // make sure the header is sorted correctly
-        header = new VCFHeader(header.getMetaDataInSortedOrder(), header.getGenotypeSamples());
-
-        // create the config offsets map
-        if ( header.getContigLines().isEmpty() ) {
-            if ( ALLOW_MISSING_CONTIG_LINES ) {
-                if ( GeneralUtils.DEBUG_MODE_ENABLED ) {
-                    System.err.println("No contig dictionary found in header, falling back to reference sequence dictionary");
-                }
-                createContigDictionary(VCFUtils.makeContigHeaderLines(getRefDict(), null));
-            } else {
-                throw new IllegalStateException("Cannot write BCF2 file with missing contig lines");
-            }
-        } else {
-            createContigDictionary(header.getContigLines());
-        }
-
-        // set up the map from dictionary string values -> offset
-        final ArrayList<String> dict = BCF2Utils.makeDictionary(header);
-        for ( int i = 0; i < dict.size(); i++ ) {
-            stringDictionaryMap.put(dict.get(i), i);
-        }
-
-        sampleNames = header.getGenotypeSamples().toArray(new String[header.getNGenotypeSamples()]);
-
-        // setup the field encodings
-        fieldManager.setup(header, encoder, stringDictionaryMap);
+        setHeader(header);
 
         try {
             // write out the header into a byte stream, get its length, and write everything to the file
             final ByteArrayOutputStream capture = new ByteArrayOutputStream();
             final OutputStreamWriter writer = new OutputStreamWriter(capture);
-            this.header = VCFWriter.writeHeader(header, writer, doNotWriteGenotypes, VCFWriter.getVersionLine(), "BCF2 stream");
+            this.header = VCFWriter.writeHeader(this.header, writer, VCFWriter.getVersionLine(), "BCF2 stream");
             writer.append('\0'); // the header is null terminated by a byte
             writer.close();
 
@@ -185,6 +163,7 @@ class BCF2Writer extends IndexingVariantContextWriter {
             new BCFVersion(MAJOR_VERSION, MINOR_VERSION).write(outputStream);
             BCF2Type.INT32.write(headerBytes.length, outputStream);
             outputStream.write(headerBytes);
+            outputHasBeenWritten = true;
         } catch (IOException e) {
             throw new RuntimeIOException("BCF2 stream: Got IOException while trying to write BCF2 header", e);
         }
@@ -204,6 +183,7 @@ class BCF2Writer extends IndexingVariantContextWriter {
 
             // write the two blocks to disk
             writeBlock(infoBlock, genotypesBlock);
+            outputHasBeenWritten = true;
         }
         catch ( IOException e ) {
             throw new RuntimeIOException("Error writing record to BCF2 file: " + vc.toString(), e);
@@ -221,6 +201,39 @@ class BCF2Writer extends IndexingVariantContextWriter {
         super.close();
     }
 
+    @Override
+    public void setHeader(final VCFHeader header) {
+        if (outputHasBeenWritten) {
+            throw new IllegalStateException("The header cannot be modified after the header or variants have been written to the output stream.");
+        }
+        // make sure the header is sorted correctly
+        this.header = doNotWriteGenotypes ? new VCFHeader(header.getMetaDataInSortedOrder()) : new VCFHeader(
+                header.getMetaDataInSortedOrder(), header.getGenotypeSamples());
+        // create the config offsets map
+        if ( this.header.getContigLines().isEmpty() ) {
+            if ( ALLOW_MISSING_CONTIG_LINES ) {
+                if ( GeneralUtils.DEBUG_MODE_ENABLED ) {
+                    System.err.println("No contig dictionary found in header, falling back to reference sequence dictionary");
+                }
+                createContigDictionary(VCFUtils.makeContigHeaderLines(getRefDict(), null));
+            } else {
+                throw new IllegalStateException("Cannot write BCF2 file with missing contig lines");
+            }
+        } else {
+            createContigDictionary(this.header.getContigLines());
+        }
+        // set up the map from dictionary string values -> offset
+        final ArrayList<String> dict = BCF2Utils.makeDictionary(this.header);
+        for ( int i = 0; i < dict.size(); i++ ) {
+            stringDictionaryMap.put(dict.get(i), i);
+        }
+
+        sampleNames = this.header.getGenotypeSamples().toArray(new String[this.header.getNGenotypeSamples()]);
+        // setup the field encodings
+        fieldManager.setup(this.header, encoder, stringDictionaryMap);
+
+    }
+
     // --------------------------------------------------------------------------------
     //
     // implicit block
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/IndexingVariantContextWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/IndexingVariantContextWriter.java
index 6a77f6b..fa3f6ba 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/IndexingVariantContextWriter.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/IndexingVariantContextWriter.java
@@ -60,6 +60,8 @@ abstract class IndexingVariantContextWriter implements VariantContextWriter {
         this.refDict = refDict;
     }
 
+    static String DEFAULT_READER_NAME = "Reader Name";
+
     /**
      * Create a VariantContextWriter with an associated index using the default index creator
      *
@@ -126,11 +128,13 @@ abstract class IndexingVariantContextWriter implements VariantContextWriter {
         return name;
     }
 
+    @Override
     public abstract void writeHeader(VCFHeader header);
 
     /**
      * attempt to close the VCF file
      */
+    @Override
     public void close() {
         try {
             // close the underlying output stream
@@ -161,6 +165,7 @@ abstract class IndexingVariantContextWriter implements VariantContextWriter {
      *
      * @param vc      the Variant Context object
      */
+    @Override
     public void add(final VariantContext vc) {
         // if we are doing on the fly indexing, add the record ***before*** we write any bytes
         if ( indexer != null )
@@ -175,6 +180,6 @@ abstract class IndexingVariantContextWriter implements VariantContextWriter {
      * @return
      */
     protected static final String writerName(final File location, final OutputStream stream) {
-        return location == null ? stream.toString() : location.getAbsolutePath();
+        return location == null ? stream == null ? DEFAULT_READER_NAME : stream.toString() : location.getAbsolutePath();
     }
 }
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriter.java
index 2185482..754ddf9 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriter.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriter.java
@@ -29,7 +29,11 @@ import htsjdk.variant.variantcontext.VariantContext;
 
 /**
  * this class writes VCF files, allowing records to be passed in unsorted (up to a certain genomic distance away)
+ *
+ * @deprecated 9/2017, this class is completely untested and unsupported, there is no replacement at this time
+ * if you use this class please file an issue on github or it will be removed at some point in the future
  */
+ at Deprecated
 public class SortingVariantContextWriter extends SortingVariantContextWriterBase {
 
     // the maximum START distance between records that we'll cache
@@ -51,6 +55,7 @@ public class SortingVariantContextWriter extends SortingVariantContextWriterBase
         this(innerWriter, maxCachingStartDistance, false); // by default, don't own inner
     }
 
+    @Override
     protected void noteCurrentRecord(VariantContext vc) {
         super.noteCurrentRecord(vc); // first, check for errors
 
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriterBase.java b/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriterBase.java
index 690a781..292f83e 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriterBase.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/SortingVariantContextWriterBase.java
@@ -38,7 +38,11 @@ import java.util.concurrent.PriorityBlockingQueue;
 /**
  * This class writes VCF files, allowing records to be passed in unsorted.
  * It also enforces that it is never passed records of the same chromosome with any other chromosome in between them.
+ *
+ * @deprecated 9/2017, this class is completely untested and unsupported, there is no replacement at this time
+ * if you use this class please file an issue on github or it will be removed at some point in the future
  */
+ at Deprecated
 abstract class SortingVariantContextWriterBase implements VariantContextWriter {
 
     // The VCFWriter to which to actually write the sorted VCF records
@@ -132,6 +136,11 @@ abstract class SortingVariantContextWriterBase implements VariantContextWriter {
         emitSafeRecords();
     }
 
+    @Override
+    public void setHeader(final VCFHeader header) {
+        innerWriter.setHeader(header);
+    }
+
     /**
      * Gets a string representation of this object.
      * @return a string representation of this object
@@ -186,6 +195,7 @@ abstract class SortingVariantContextWriterBase implements VariantContextWriter {
     private static class VariantContextComparator implements Comparator<VCFRecord>, Serializable {
         private static final long serialVersionUID = 1L;
 
+        @Override
         public int compare(VCFRecord r1, VCFRecord r2) {
             return r1.vc.getStart() - r2.vc.getStart();
         }
@@ -198,4 +208,4 @@ abstract class SortingVariantContextWriterBase implements VariantContextWriter {
             this.vc = vc;
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/VCFWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/VCFWriter.java
index 71aef13..442567b 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/VCFWriter.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/VCFWriter.java
@@ -66,6 +66,9 @@ class VCFWriter extends IndexingVariantContextWriter {
     // should we always output a complete format record, even if we could drop trailing fields?
     private final boolean writeFullFormatField;
 
+    // is the header or body written to the output stream?
+    private boolean outputHasBeenWritten;
+
     /*
      * The VCF writer uses an internal Writer, based by the ByteArrayOutputStream lineBuffer,
      * to temp. buffer the header and per-site output before flushing the per line output
@@ -128,13 +131,14 @@ class VCFWriter extends IndexingVariantContextWriter {
 
     @Override
     public void writeHeader(final VCFHeader header) {
+
         // note we need to update the mHeader object after this call because they header
         // may have genotypes trimmed out of it, if doNotWriteGenotypes is true
+        setHeader(header);
         try {
-            this.mHeader = writeHeader(header, writer, doNotWriteGenotypes, getVersionLine(), getStreamName());
-            this.vcfEncoder = new VCFEncoder(this.mHeader, this.allowMissingFieldsInHeader, this.writeFullFormatField);
+            writeHeader(this.mHeader, writer, getVersionLine(), getStreamName());
             writeAndResetBuffer();
-
+            outputHasBeenWritten = true;
         } catch ( IOException e ) {
             throw new RuntimeIOException("Couldn't write file " + getStreamName(), e);
         }
@@ -146,11 +150,9 @@ class VCFWriter extends IndexingVariantContextWriter {
 
     public static VCFHeader writeHeader(VCFHeader header,
                                         final Writer writer,
-                                        final boolean doNotWriteGenotypes,
                                         final String versionLine,
                                         final String streamNameForError) {
-        header = doNotWriteGenotypes ? new VCFHeader(header.getMetaDataInSortedOrder()) : header;
-        
+
         try {
             // the file format field needs to be written first
             writer.write(versionLine + "\n");
@@ -223,9 +225,18 @@ class VCFWriter extends IndexingVariantContextWriter {
             write("\n");
 
             writeAndResetBuffer();
-
+            outputHasBeenWritten = true;
         } catch (IOException e) {
             throw new RuntimeIOException("Unable to write the VCF object to " + getStreamName(), e);
         }
     }
+
+    @Override
+    public void setHeader(final VCFHeader header) {
+        if (outputHasBeenWritten) {
+            throw new IllegalStateException("The header cannot be modified after the header or variants have been written to the output stream.");
+        }
+        this.mHeader = doNotWriteGenotypes ? new VCFHeader(header.getMetaDataInSortedOrder()) : header;
+        this.vcfEncoder = new VCFEncoder(this.mHeader, this.allowMissingFieldsInHeader, this.writeFullFormatField);
+    }
 }
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriter.java b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriter.java
index 187ff17..d5b1def 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriter.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriter.java
@@ -35,11 +35,18 @@ import htsjdk.variant.vcf.VCFHeader;
  */
 public interface VariantContextWriter extends Closeable {
 
+    /**
+     * Writes the header
+     *
+     * @param header header
+     * @throws IllegalStateException if header is already written
+     */
     public void writeHeader(VCFHeader header);
 
     /**
      * attempt to close the VCF file
      */
+    @Override
     public void close();
 
     /**
@@ -48,4 +55,15 @@ public interface VariantContextWriter extends Closeable {
     public boolean checkError();
     
     public void add(VariantContext vc);
+
+    /**
+     * Sets the VCF header so that data blocks can be written without writing the header
+     *
+     * Exactly one of writeHeader() or setHeader() should be called when using a writer
+     *
+     * @param header VCF header
+     * @throws IllegalStateException if header or body is already written
+
+     */
+    void setHeader(VCFHeader header);
 }
\ No newline at end of file
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java
index 56c8b8b..ddc0d50 100644
--- a/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java
+++ b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilder.java
@@ -53,7 +53,6 @@ import java.util.EnumSet;
  * 
  * <p>
  * Provides methods for creating <code>VariantContextWriter</code>s using the Builder pattern.
- * Replaces <code>VariantContextWriterFactory</code>.
  * </p>
  * <p>
  * The caller must choose an output file or an output stream for the <code>VariantContextWriter</code> to write to.
@@ -407,8 +406,9 @@ public class VariantContextWriterBuilder {
                 typeToBuild = OutputType.BCF_STREAM;
         }
 
+        // If we are writing to a file, or a special file type (ex. pipe) where the stream is not yet open.
         OutputStream outStreamFromFile = this.outStream;
-        if (FILE_TYPES.contains(this.outType)) {
+        if (FILE_TYPES.contains(this.outType) || (STREAM_TYPES.contains(this.outType) && this.outStream == null)) {
             try {
                 outStreamFromFile = IOUtil.maybeBufferOutputStream(new FileOutputStream(outFile), bufferSize);
             } catch (final FileNotFoundException e) {
@@ -446,7 +446,7 @@ public class VariantContextWriterBuilder {
                 if (options.contains(Options.INDEX_ON_THE_FLY))
                     throw new IllegalArgumentException("VCF index creation not supported for stream output.");
 
-                writer = createVCFWriter(null, outStream);
+                writer = createVCFWriter(null, outStreamFromFile);
                 break;
             case BCF_STREAM:
                 if (options.contains(Options.INDEX_ON_THE_FLY))
diff --git a/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java b/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java
deleted file mode 100644
index e1e0026..0000000
--- a/src/main/java/htsjdk/variant/variantcontext/writer/VariantContextWriterFactory.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/*
-* Copyright (c) 2012 The Broad Institute
-* 
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-* 
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-* 
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package htsjdk.variant.variantcontext.writer;
-
-import htsjdk.samtools.Defaults;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.util.BlockCompressedOutputStream;
-import htsjdk.samtools.util.IOUtil;
-import htsjdk.samtools.util.RuntimeIOException;
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.index.IndexCreator;
-import htsjdk.tribble.index.tabix.TabixFormat;
-import htsjdk.tribble.index.tabix.TabixIndexCreator;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.OutputStream;
-import java.util.EnumSet;
-
-/**
- * Factory methods to create VariantContext writers
- *
- * @author depristo
- * @since 5/12
- *
- * @deprecated Replaced by {@link VariantContextWriterBuilder}
- */
- at Deprecated
-public class VariantContextWriterFactory {
-
-    public static final EnumSet<Options> DEFAULT_OPTIONS = EnumSet.of(Options.INDEX_ON_THE_FLY);
-    public static final EnumSet<Options> NO_OPTIONS = EnumSet.noneOf(Options.class);
-
-    static {
-        if (Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE) {
-            DEFAULT_OPTIONS.add(Options.USE_ASYNC_IO);
-        }
-    }
-
-    private VariantContextWriterFactory() {}
-
-    public static VariantContextWriter create(final File location, final SAMSequenceDictionary refDict) {
-        return create(location, openOutputStream(location), refDict, DEFAULT_OPTIONS);
-    }
-
-    public static VariantContextWriter create(final File location, final SAMSequenceDictionary refDict, final EnumSet<Options> options) {
-        return create(location, openOutputStream(location), refDict, options);
-    }
-
-    /**
-     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter create(final File location,
-                                              final OutputStream output,
-                                              final SAMSequenceDictionary refDict) {
-        return create(location, output, refDict, DEFAULT_OPTIONS);
-    }
-
-    /**
-     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter create(final OutputStream output,
-                                              final SAMSequenceDictionary refDict,
-                                              final EnumSet<Options> options) {
-        return create(null, output, refDict, options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
-     *                 but does not control where the file is written
-     * @param output This is where the BCF is actually written. If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createBcf2(final File location,
-                                                  final OutputStream output,
-                                                  final SAMSequenceDictionary refDict,
-                                                  final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new BCF2Writer(location, output, refDict,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
-     *                 but does not control where the file is written
-     * @param output This is where the BCF is actually written.  If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createBcf2(final File location,
-                                                  final OutputStream output,
-                                                  final SAMSequenceDictionary refDict,
-                                                  final IndexCreator indexCreator,
-                                                  final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new BCF2Writer(location, output, refDict, indexCreator,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
-     *                 but does not control where the file is written
-     * @param output This is where the VCF is actually written. If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createVcf(final File location,
-                                                 final OutputStream output,
-                                                 final SAMSequenceDictionary refDict,
-                                                 final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new VCFWriter(location, output, refDict,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages, and for naming the index,
-     *                 but does not control where the file is written
-     * @param output This is where the VCF is actually written.  If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createVcf(final File location,
-                                                 final OutputStream output,
-                                                 final SAMSequenceDictionary refDict,
-                                                 final IndexCreator indexCreator,
-                                                 final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new VCFWriter(location, output, refDict, indexCreator,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages,
-     *                 but does not control where the file is written
-     * @param output This is where the VCF is actually written.  If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createBlockCompressedVcf(final File location,
-                                                                final OutputStream output,
-                                                                final SAMSequenceDictionary refDict,
-                                                                final EnumSet<Options> options) {
-        final TabixIndexCreator indexCreator;
-        if (options.contains(Options.INDEX_ON_THE_FLY)) {
-            indexCreator = new TabixIndexCreator(refDict, TabixFormat.VCF);
-        } else {
-            indexCreator = null;
-        }
-        return maybeWrapWithAsyncWriter(new VCFWriter(location, BlockCompressedOutputStream.maybeBgzfWrapOutputStream(location, output),
-                refDict, indexCreator,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
-    }
-
-    /**
-     * @param location Note that this parameter is used to producing intelligent log messages,
-     *                 but does not control where the file is written
-     * @param output This is where the VCF is actually written. If buffered writing is desired, caller must provide
-     *               some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter createBlockCompressedVcf(final File location,
-                                                                final OutputStream output,
-                                                                final SAMSequenceDictionary refDict,
-                                                                final IndexCreator indexCreator,
-                                                                final EnumSet<Options> options) {
-        return maybeWrapWithAsyncWriter(new VCFWriter(location, BlockCompressedOutputStream.maybeBgzfWrapOutputStream(location, output),
-                refDict, indexCreator,
-                options.contains(Options.INDEX_ON_THE_FLY),
-                options.contains(Options.DO_NOT_WRITE_GENOTYPES),
-                options.contains(Options.ALLOW_MISSING_FIELDS_IN_HEADER),
-                options.contains(Options.WRITE_FULL_FORMAT_FIELD)), options);
-    }
-
-    public static VariantContextWriter create(final File location,
-        final OutputStream output,
-        final SAMSequenceDictionary refDict,
-        final EnumSet<Options> options) {
-
-        if (isBCFOutput(location, options)) {
-            return createBcf2(location, output, refDict, options);
-        } else if (isCompressedVcf(location)) {
-            return createBlockCompressedVcf(location, output, refDict, options);
-        } else {
-            return createVcf(location, output, refDict, options);
-        }
-    }
-
-    /**
-     * @param output If buffered writing is desired, caller must provide some kind of buffered OutputStream.
-     */
-    public static VariantContextWriter create(final File location,
-                                              final OutputStream output,
-                                              final SAMSequenceDictionary refDict,
-                                              final IndexCreator indexCreator,
-                                              final EnumSet<Options> options) {
-
-        if (isBCFOutput(location, options)) {
-            return createBcf2(location, output, refDict, indexCreator, options);
-        } else if (isCompressedVcf(location)) {
-            return createBlockCompressedVcf(location, output, refDict, indexCreator, options);
-        } else {
-            return createVcf(location, output, refDict, indexCreator, options);
-        }
-    }
-
-    private static VariantContextWriter maybeWrapWithAsyncWriter(final VariantContextWriter writer,
-                                                                 final EnumSet<Options> options) {
-        if (options.contains(Options.USE_ASYNC_IO)) {
-            return new AsyncVariantContextWriter(writer, AsyncVariantContextWriter.DEFAULT_QUEUE_SIZE);
-        }
-        else return writer;
-    }
-
-    /**
-     * Should we output a BCF file based solely on the name of the file at location?
-     *
-     * @param location
-     * @return
-     */
-    public static boolean isBCFOutput(final File location) {
-        return isBCFOutput(location, EnumSet.noneOf(Options.class));
-    }
-
-    public static boolean isBCFOutput(final File location, final EnumSet<Options> options) {
-        return options.contains(Options.FORCE_BCF) || (location != null && location.getName().contains(".bcf"));
-    }
-
-    public static boolean isCompressedVcf(final File location) {
-        if (location == null)
-            return false;
-
-        return AbstractFeatureReader.hasBlockCompressedExtension(location);
-    }
-
-    public static VariantContextWriter sortOnTheFly(final VariantContextWriter innerWriter, final int maxCachingStartDistance) {
-        return sortOnTheFly(innerWriter, maxCachingStartDistance, false);
-    }
-
-    public static VariantContextWriter sortOnTheFly(final VariantContextWriter innerWriter, final int maxCachingStartDistance, final boolean takeOwnershipOfInner) {
-        return new SortingVariantContextWriter(innerWriter, maxCachingStartDistance, takeOwnershipOfInner);
-    }
-
-    /**
-     * Returns a output stream writing to location, or throws an exception if this fails
-     * @param location
-     * @return
-     */
-    protected static OutputStream openOutputStream(final File location) {
-        try {
-            return IOUtil.maybeBufferOutputStream(new FileOutputStream(location));
-        } catch (final FileNotFoundException e) {
-            throw new RuntimeIOException(location + ": Unable to create VCF writer", e);
-        }
-    }
-}
diff --git a/src/main/java/htsjdk/variant/vcf/AbstractVCFCodec.java b/src/main/java/htsjdk/variant/vcf/AbstractVCFCodec.java
index 16857b4..8a55a19 100644
--- a/src/main/java/htsjdk/variant/vcf/AbstractVCFCodec.java
+++ b/src/main/java/htsjdk/variant/vcf/AbstractVCFCodec.java
@@ -26,6 +26,7 @@
 package htsjdk.variant.vcf;
 
 import htsjdk.samtools.util.BlockCompressedInputStream;
+import htsjdk.samtools.util.IOUtil;
 import htsjdk.tribble.AsciiFeatureCodec;
 import htsjdk.tribble.Feature;
 import htsjdk.tribble.NameAwareCodec;
@@ -45,6 +46,8 @@ import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -254,6 +257,7 @@ public abstract class AbstractVCFCodec extends AsciiFeatureCodec<VariantContext>
      * @param line the line
      * @return a VariantContext
      */
+    @Override
     public VariantContext decode(String line) {
         return decodeLine(line, true);
     }
@@ -364,6 +368,7 @@ public abstract class AbstractVCFCodec extends AsciiFeatureCodec<VariantContext>
      * get the name of this codec
      * @return our set name
      */
+    @Override
     public String getName() {
         return name;
     }
@@ -372,6 +377,7 @@ public abstract class AbstractVCFCodec extends AsciiFeatureCodec<VariantContext>
      * set the name of this codec
      * @param name new name
      */
+    @Override
     public void setName(String name) {
         this.name = name;
     }
@@ -616,10 +622,11 @@ public abstract class AbstractVCFCodec extends AsciiFeatureCodec<VariantContext>
 
     public static boolean canDecodeFile(final String potentialInput, final String MAGIC_HEADER_LINE) {
         try {
+            Path path = IOUtil.getPath(potentialInput);
             //isVCFStream closes the stream that's passed in
-            return isVCFStream(new FileInputStream(potentialInput), MAGIC_HEADER_LINE) ||
-                    isVCFStream(new GZIPInputStream(new FileInputStream(potentialInput)), MAGIC_HEADER_LINE) ||
-                    isVCFStream(new BlockCompressedInputStream(new FileInputStream(potentialInput)), MAGIC_HEADER_LINE);
+            return isVCFStream(Files.newInputStream(path), MAGIC_HEADER_LINE) ||
+                    isVCFStream(new GZIPInputStream(Files.newInputStream(path)), MAGIC_HEADER_LINE) ||
+                    isVCFStream(new BlockCompressedInputStream(Files.newInputStream(path)), MAGIC_HEADER_LINE);
         } catch ( FileNotFoundException e ) {
             return false;
         } catch ( IOException e ) {
diff --git a/src/main/java/htsjdk/variant/vcf/VCF3Codec.java b/src/main/java/htsjdk/variant/vcf/VCF3Codec.java
index 5f4f48e..e9ca3ab 100644
--- a/src/main/java/htsjdk/variant/vcf/VCF3Codec.java
+++ b/src/main/java/htsjdk/variant/vcf/VCF3Codec.java
@@ -56,6 +56,7 @@ public class VCF3Codec extends AbstractVCFCodec {
      * @param reader the line reader to take header lines from
      * @return the number of header lines
      */
+    @Override
     public Object readActualHeader(final LineIterator reader) {
         final List<String> headerStrings = new ArrayList<String>();
 
@@ -97,6 +98,7 @@ public class VCF3Codec extends AbstractVCFCodec {
      * @param filterString the string to parse
      * @return a set of the filters applied
      */
+    @Override
     protected List<String> parseFilters(String filterString) {
 
         // null for unfiltered
diff --git a/src/main/java/htsjdk/variant/vcf/VCFCodec.java b/src/main/java/htsjdk/variant/vcf/VCFCodec.java
index 89d6881..6e5d3b7 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFCodec.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFCodec.java
@@ -125,6 +125,7 @@ public class VCFCodec extends AbstractVCFCodec {
      * @param filterString the string to parse
      * @return a set of the filters applied or null if filters were not applied to the record (e.g. as per the missing value in a VCF)
      */
+    @Override
     protected List<String> parseFilters(final String filterString) {
         // null for unfiltered
         if ( filterString.equals(VCFConstants.UNFILTERED) )
diff --git a/src/main/java/htsjdk/variant/vcf/VCFCompoundHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFCompoundHeaderLine.java
index 48e0cdf..4d8c344 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFCompoundHeaderLine.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFCompoundHeaderLine.java
@@ -57,6 +57,7 @@ public abstract class VCFCompoundHeaderLine extends VCFHeaderLine implements VCF
     private VCFHeaderLineType type;
 
     // access methods
+    @Override
     public String getID() { return name; }
     public String getDescription() { return description; }
     public VCFHeaderLineType getType() { return type; }
@@ -221,6 +222,7 @@ public abstract class VCFCompoundHeaderLine extends VCFHeaderLine implements VCF
      * make a string representation of this header line
      * @return a string representation
      */
+    @Override
     protected String toStringEncoding() {
         Map<String, Object> map = new LinkedHashMap<String, Object>();
         map.put("ID", name);
diff --git a/src/main/java/htsjdk/variant/vcf/VCFEncoder.java b/src/main/java/htsjdk/variant/vcf/VCFEncoder.java
index a909066..0605b73 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFEncoder.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFEncoder.java
@@ -22,361 +22,362 @@ import java.util.TreeMap;
  */
 public class VCFEncoder {
 
-	/**
-	 * The encoding used for VCF files: ISO-8859-1
-	 */
-	public static final Charset VCF_CHARSET = Charset.forName("ISO-8859-1");
-	private static final String QUAL_FORMAT_STRING = "%.2f";
-	private static final String QUAL_FORMAT_EXTENSION_TO_TRIM = ".00";
-
-	private final IntGenotypeFieldAccessors GENOTYPE_FIELD_ACCESSORS = new IntGenotypeFieldAccessors();
-
-	private VCFHeader header;
-
-	private boolean allowMissingFieldsInHeader = false;
-
-	private boolean outputTrailingFormatFields = false;
-
-	/**
-	 * Prepare a VCFEncoder that will encode records appropriate to the given VCF header, optionally
-	 * allowing missing fields in the header.
-	 */
-	public VCFEncoder(final VCFHeader header, final boolean allowMissingFieldsInHeader, final boolean outputTrailingFormatFields) {
-		if (header == null) throw new NullPointerException("The VCF header must not be null.");
-		this.header = header;
-		this.allowMissingFieldsInHeader = allowMissingFieldsInHeader;
-		this.outputTrailingFormatFields = outputTrailingFormatFields;
-	}
-
-	/**
-	 * Please see the notes in the default constructor
-	 */
-	@Deprecated
-	public void setVCFHeader(final VCFHeader header) {
-		this.header = header;
-	}
-
-	/**
-	 * Please see the notes in the default constructor
-	 */
-	@Deprecated
-	public void setAllowMissingFieldsInHeader(final boolean allow) {
-		this.allowMissingFieldsInHeader = allow;
-	}
-
-	public String encode(final VariantContext context) {
-		if (this.header == null) {
-			throw new NullPointerException("The header field must be set on the VCFEncoder before encoding records.");
-		}
-
-		final StringBuilder stringBuilder = new StringBuilder();
-
-		// CHROM
-		stringBuilder.append(context.getContig()).append(VCFConstants.FIELD_SEPARATOR)
-				// POS
-				.append(String.valueOf(context.getStart())).append(VCFConstants.FIELD_SEPARATOR)
-				// ID
-				.append(context.getID()).append(VCFConstants.FIELD_SEPARATOR)
-				// REF
-				.append(context.getReference().getDisplayString()).append(VCFConstants.FIELD_SEPARATOR);
-
-		// ALT
-		if ( context.isVariant() ) {
-			Allele altAllele = context.getAlternateAllele(0);
-			String alt = altAllele.getDisplayString();
-			stringBuilder.append(alt);
-
-			for (int i = 1; i < context.getAlternateAlleles().size(); i++) {
-				altAllele = context.getAlternateAllele(i);
-				alt = altAllele.getDisplayString();
-				stringBuilder.append(',');
-				stringBuilder.append(alt);
-			}
-		} else {
-			stringBuilder.append(VCFConstants.EMPTY_ALTERNATE_ALLELE_FIELD);
-		}
-
-		stringBuilder.append(VCFConstants.FIELD_SEPARATOR);
-
-		// QUAL
-		if ( ! context.hasLog10PError()) stringBuilder.append(VCFConstants.MISSING_VALUE_v4);
-		else stringBuilder.append(formatQualValue(context.getPhredScaledQual()));
-		stringBuilder.append(VCFConstants.FIELD_SEPARATOR)
-				// FILTER
-				.append(getFilterString(context)).append(VCFConstants.FIELD_SEPARATOR);
-
-		// INFO
-		final Map<String, String> infoFields = new TreeMap<String, String>();
-		for (final Map.Entry<String, Object> field : context.getAttributes().entrySet() ) {
-			if ( ! this.header.hasInfoLine(field.getKey())) fieldIsMissingFromHeaderError(context, field.getKey(), "INFO");
-
-			final String outputValue = formatVCFField(field.getValue());
-			if (outputValue != null) infoFields.put(field.getKey(), outputValue);
-		}
-		writeInfoString(infoFields, stringBuilder);
-
-		// FORMAT
-		final GenotypesContext gc = context.getGenotypes();
-		if (gc.isLazyWithData() && ((LazyGenotypesContext) gc).getUnparsedGenotypeData() instanceof String) {
-			stringBuilder.append(VCFConstants.FIELD_SEPARATOR);
-			stringBuilder.append(((LazyGenotypesContext) gc).getUnparsedGenotypeData().toString());
-		} else {
-			final List<String> genotypeAttributeKeys = context.calcVCFGenotypeKeys(this.header);
-			if ( ! genotypeAttributeKeys.isEmpty()) {
-				for (final String format : genotypeAttributeKeys)
-					if ( ! this.header.hasFormatLine(format))
-						fieldIsMissingFromHeaderError(context, format, "FORMAT");
-
-				final String genotypeFormatString = ParsingUtils.join(VCFConstants.GENOTYPE_FIELD_SEPARATOR, genotypeAttributeKeys);
-
-				stringBuilder.append(VCFConstants.FIELD_SEPARATOR);
-				stringBuilder.append(genotypeFormatString);
-
-				final Map<Allele, String> alleleStrings = buildAlleleStrings(context);
-				addGenotypeData(context, alleleStrings, genotypeAttributeKeys, stringBuilder);
-			}
-		}
-
-		return stringBuilder.toString();
-	}
-
-	VCFHeader getVCFHeader() {
-		return this.header;
-	}
-
-	boolean getAllowMissingFieldsInHeader() {
-		return this.allowMissingFieldsInHeader;
-	}
-
-	private String getFilterString(final VariantContext vc) {
-		if (vc.isFiltered()) {
-			for (final String filter : vc.getFilters()) {
-				if ( ! this.header.hasFilterLine(filter)) fieldIsMissingFromHeaderError(vc, filter, "FILTER");
-			}
-
-			return ParsingUtils.join(";", ParsingUtils.sortList(vc.getFilters()));
-		}
-		else if (vc.filtersWereApplied()) return VCFConstants.PASSES_FILTERS_v4;
-		else return VCFConstants.UNFILTERED;
-	}
-
-	private String formatQualValue(final double qual) {
-		String s = String.format(QUAL_FORMAT_STRING, qual);
-		if ( s.endsWith(QUAL_FORMAT_EXTENSION_TO_TRIM) )
-			s = s.substring(0, s.length() - QUAL_FORMAT_EXTENSION_TO_TRIM.length());
-		return s;
-	}
-
-	private void fieldIsMissingFromHeaderError(final VariantContext vc, final String id, final String field) {
-		if ( ! allowMissingFieldsInHeader)
-			throw new IllegalStateException("Key " + id + " found in VariantContext field " + field
-					+ " at " + vc.getContig() + ":" + vc.getStart()
-					+ " but this key isn't defined in the VCFHeader.  We require all VCFs to have"
-					+ " complete VCF headers by default.");
-	}
-
-	String formatVCFField(final Object val) {
-		final String result;
-		if ( val == null )
-			result = VCFConstants.MISSING_VALUE_v4;
-		else if ( val instanceof Double )
-			result = formatVCFDouble((Double) val);
-		else if ( val instanceof Boolean )
-			result = (Boolean)val ? "" : null; // empty string for true, null for false
-		else if ( val instanceof List ) {
-			result = formatVCFField(((List)val).toArray());
-		} else if ( val.getClass().isArray() ) {
-			final int length = Array.getLength(val);
-			if ( length == 0 )
-				return formatVCFField(null);
-			final StringBuilder sb = new StringBuilder(formatVCFField(Array.get(val, 0)));
-			for ( int i = 1; i < length; i++) {
-				sb.append(',');
-				sb.append(formatVCFField(Array.get(val, i)));
-			}
-			result = sb.toString();
-		} else
-			result = val.toString();
-
-		return result;
-	}
-
-	/**
-	 * Takes a double value and pretty prints it to a String for display
-	 *
-	 * Large doubles => gets %.2f style formatting
-	 * Doubles < 1 / 10 but > 1/100 => get %.3f style formatting
-	 * Double < 1/100 => %.3e formatting
-	 * @param d
-	 * @return
-	 */
-	public static String formatVCFDouble(final double d) {
-		final String format;
-		if ( d < 1 ) {
-			if ( d < 0.01 ) {
-				if ( Math.abs(d) >= 1e-20 )
-					format = "%.3e";
-				else {
-					// return a zero format
-					return "0.00";
-				}
-			} else {
-				format = "%.3f";
-			}
-		} else {
-			format = "%.2f";
-		}
-
-		return String.format(format, d);
-	}
-
-	static int countOccurrences(final char c, final String s) {
-		int count = 0;
-		for (int i = 0; i < s.length(); i++) {
-			count += s.charAt(i) == c ? 1 : 0;
-		}
-		return count;
-	}
-
-	static boolean isMissingValue(final String s) {
-		// we need to deal with the case that it's a list of missing values
-		return (countOccurrences(VCFConstants.MISSING_VALUE_v4.charAt(0), s) + countOccurrences(',', s) == s.length());
-	}
-
-	/*
-	 * Add the genotype data
-	 */
-	public void addGenotypeData(final VariantContext vc, final Map<Allele, String> alleleMap, final List<String> genotypeFormatKeys, final StringBuilder builder) {
-		final int ploidy = vc.getMaxPloidy(2);
-
-		for (final String sample : this.header.getGenotypeSamples()) {
-			builder.append(VCFConstants.FIELD_SEPARATOR);
-
-			Genotype g = vc.getGenotype(sample);
-			if (g == null) g = GenotypeBuilder.createMissing(sample, ploidy);
-
-			final List<String> attrs = new ArrayList<String>(genotypeFormatKeys.size());
-			for (final String field : genotypeFormatKeys) {
-				if (field.equals(VCFConstants.GENOTYPE_KEY)) {
-					if ( ! g.isAvailable()) {
-						throw new IllegalStateException("GTs cannot be missing for some samples if they are available for others in the record");
-					}
-
-					writeAllele(g.getAllele(0), alleleMap, builder);
-					for (int i = 1; i < g.getPloidy(); i++) {
-						builder.append(g.isPhased() ? VCFConstants.PHASED : VCFConstants.UNPHASED);
-						writeAllele(g.getAllele(i), alleleMap, builder);
-					}
-					continue;
-
-				} else {
-					final String outputValue;
-					if ( field.equals(VCFConstants.GENOTYPE_FILTER_KEY ) ) {
-						outputValue = g.isFiltered() ? g.getFilters() : VCFConstants.PASSES_FILTERS_v4;
-					} else {
-						final IntGenotypeFieldAccessors.Accessor accessor = GENOTYPE_FIELD_ACCESSORS.getAccessor(field);
-						if ( accessor != null ) {
-							final int[] intValues = accessor.getValues(g);
-							if ( intValues == null )
-								outputValue = VCFConstants.MISSING_VALUE_v4;
-							else if ( intValues.length == 1 ) // fast path
-								outputValue = Integer.toString(intValues[0]);
-							else {
-								final StringBuilder sb = new StringBuilder();
-								sb.append(intValues[0]);
-								for ( int i = 1; i < intValues.length; i++) {
-									sb.append(',');
-									sb.append(intValues[i]);
-								}
-								outputValue = sb.toString();
-							}
-						} else {
-							Object val = g.hasExtendedAttribute(field) ? g.getExtendedAttribute(field) : VCFConstants.MISSING_VALUE_v4;
-
-							final VCFFormatHeaderLine metaData = this.header.getFormatHeaderLine(field);
-							if ( metaData != null ) {
-								final int numInFormatField = metaData.getCount(vc);
-								if ( numInFormatField > 1 && val.equals(VCFConstants.MISSING_VALUE_v4) ) {
-									// If we have a missing field but multiple values are expected, we need to construct a new string with all fields.
-									// For example, if Number=2, the string has to be ".,."
-									final StringBuilder sb = new StringBuilder(VCFConstants.MISSING_VALUE_v4);
-									for ( int i = 1; i < numInFormatField; i++ ) {
-										sb.append(',');
-										sb.append(VCFConstants.MISSING_VALUE_v4);
-									}
-									val = sb.toString();
-								}
-							}
-
-							// assume that if key is absent, then the given string encoding suffices
-							outputValue = formatVCFField(val);
-						}
-					}
-
-					if ( outputValue != null )
-						attrs.add(outputValue);
-				}
-			}
-
-			// strip off trailing missing values
-			if (!outputTrailingFormatFields) {
-				for (int i = attrs.size() - 1; i >= 0; i--) {
-					if (isMissingValue(attrs.get(i))) attrs.remove(i);
-					else break;
-				}
-			}
-
-			for (int i = 0; i < attrs.size(); i++) {
-				if ( i > 0 || genotypeFormatKeys.contains(VCFConstants.GENOTYPE_KEY)) {
-					builder.append(VCFConstants.GENOTYPE_FIELD_SEPARATOR);
-				}
-				builder.append(attrs.get(i));
-			}
-		}
-	}
-
-	/*
-	 * Create the info string; assumes that no values are null
-	 */
-	private void writeInfoString(final Map<String, String> infoFields, final StringBuilder builder) {
-		if ( infoFields.isEmpty() ) {
-			builder.append(VCFConstants.EMPTY_INFO_FIELD);
-			return;
-		}
-
-		boolean isFirst = true;
-		for (final Map.Entry<String, String> entry : infoFields.entrySet()) {
-			if (isFirst) isFirst = false;
-			else builder.append(VCFConstants.INFO_FIELD_SEPARATOR);
-
-			builder.append(entry.getKey());
-
-			if ( ! entry.getValue().equals("")) {
-				final VCFInfoHeaderLine metaData = this.header.getInfoHeaderLine(entry.getKey());
-				if ( metaData == null || metaData.getCountType() != VCFHeaderLineCount.INTEGER || metaData.getCount() != 0 ) {
-					builder.append('=');
-					builder.append(entry.getValue());
-				}
-			}
-		}
-	}
-
-	public Map<Allele, String> buildAlleleStrings(final VariantContext vc) {
-		final Map<Allele, String> alleleMap = new HashMap<Allele, String>(vc.getAlleles().size()+1);
-		alleleMap.put(Allele.NO_CALL, VCFConstants.EMPTY_ALLELE); // convenience for lookup
-
-		final List<Allele> alleles = vc.getAlleles();
-		for ( int i = 0; i < alleles.size(); i++ ) {
-			alleleMap.put(alleles.get(i), String.valueOf(i));
-		}
-
-		return alleleMap;
-	}
-
-	private void writeAllele(final Allele allele, final Map<Allele, String> alleleMap, final StringBuilder builder) {
-		final String encoding = alleleMap.get(allele);
-		if ( encoding == null )
-			throw new RuntimeException("Allele " + allele + " is not an allele in the variant context");
-		builder.append(encoding);
-	}
+    /**
+     * The encoding used for VCF files: ISO-8859-1
+     */
+    public static final Charset VCF_CHARSET = Charset.forName("ISO-8859-1");
+    private static final String QUAL_FORMAT_STRING = "%.2f";
+    private static final String QUAL_FORMAT_EXTENSION_TO_TRIM = ".00";
+
+    private final IntGenotypeFieldAccessors GENOTYPE_FIELD_ACCESSORS = new IntGenotypeFieldAccessors();
+
+    private VCFHeader header;
+
+    private boolean allowMissingFieldsInHeader = false;
+
+    private boolean outputTrailingFormatFields = false;
+
+    /**
+     * Prepare a VCFEncoder that will encode records appropriate to the given VCF header, optionally
+     * allowing missing fields in the header.
+     */
+    public VCFEncoder(final VCFHeader header, final boolean allowMissingFieldsInHeader, final boolean outputTrailingFormatFields) {
+        if (header == null) throw new NullPointerException("The VCF header must not be null.");
+        this.header = header;
+        this.allowMissingFieldsInHeader = allowMissingFieldsInHeader;
+        this.outputTrailingFormatFields = outputTrailingFormatFields;
+    }
+
+    /**
+     * @deprecated since 10/24/13 use the constructor
+     */
+    @Deprecated
+    public void setVCFHeader(final VCFHeader header) {
+        this.header = header;
+    }
+
+    /**
+     * @deprecated since 10/24/13 use the constructor
+     */
+    @Deprecated
+    public void setAllowMissingFieldsInHeader(final boolean allow) {
+        this.allowMissingFieldsInHeader = allow;
+    }
+
+    public String encode(final VariantContext context) {
+        if (this.header == null) {
+            throw new NullPointerException("The header field must be set on the VCFEncoder before encoding records.");
+        }
+
+        final StringBuilder stringBuilder = new StringBuilder();
+
+        // CHROM
+        stringBuilder.append(context.getContig()).append(VCFConstants.FIELD_SEPARATOR)
+                // POS
+                .append(String.valueOf(context.getStart())).append(VCFConstants.FIELD_SEPARATOR)
+                // ID
+                .append(context.getID()).append(VCFConstants.FIELD_SEPARATOR)
+                // REF
+                .append(context.getReference().getDisplayString()).append(VCFConstants.FIELD_SEPARATOR);
+
+        // ALT
+        if (context.isVariant()) {
+            Allele altAllele = context.getAlternateAllele(0);
+            String alt = altAllele.getDisplayString();
+            stringBuilder.append(alt);
+
+            for (int i = 1; i < context.getAlternateAlleles().size(); i++) {
+                altAllele = context.getAlternateAllele(i);
+                alt = altAllele.getDisplayString();
+                stringBuilder.append(',');
+                stringBuilder.append(alt);
+            }
+        } else {
+            stringBuilder.append(VCFConstants.EMPTY_ALTERNATE_ALLELE_FIELD);
+        }
+
+        stringBuilder.append(VCFConstants.FIELD_SEPARATOR);
+
+        // QUAL
+        if (!context.hasLog10PError()) stringBuilder.append(VCFConstants.MISSING_VALUE_v4);
+        else stringBuilder.append(formatQualValue(context.getPhredScaledQual()));
+        stringBuilder.append(VCFConstants.FIELD_SEPARATOR)
+                // FILTER
+                .append(getFilterString(context)).append(VCFConstants.FIELD_SEPARATOR);
+
+        // INFO
+        final Map<String, String> infoFields = new TreeMap<>();
+        for (final Map.Entry<String, Object> field : context.getAttributes().entrySet()) {
+            if (!this.header.hasInfoLine(field.getKey()))
+                fieldIsMissingFromHeaderError(context, field.getKey(), "INFO");
+
+            final String outputValue = formatVCFField(field.getValue());
+            if (outputValue != null) infoFields.put(field.getKey(), outputValue);
+        }
+        writeInfoString(infoFields, stringBuilder);
+
+        // FORMAT
+        final GenotypesContext gc = context.getGenotypes();
+        if (gc.isLazyWithData() && ((LazyGenotypesContext) gc).getUnparsedGenotypeData() instanceof String) {
+            stringBuilder.append(VCFConstants.FIELD_SEPARATOR);
+            stringBuilder.append(((LazyGenotypesContext) gc).getUnparsedGenotypeData().toString());
+        } else {
+            final List<String> genotypeAttributeKeys = context.calcVCFGenotypeKeys(this.header);
+            if (!genotypeAttributeKeys.isEmpty()) {
+                for (final String format : genotypeAttributeKeys)
+                    if (!this.header.hasFormatLine(format))
+                        fieldIsMissingFromHeaderError(context, format, "FORMAT");
+
+                final String genotypeFormatString = ParsingUtils.join(VCFConstants.GENOTYPE_FIELD_SEPARATOR, genotypeAttributeKeys);
+
+                stringBuilder.append(VCFConstants.FIELD_SEPARATOR);
+                stringBuilder.append(genotypeFormatString);
+
+                final Map<Allele, String> alleleStrings = buildAlleleStrings(context);
+                addGenotypeData(context, alleleStrings, genotypeAttributeKeys, stringBuilder);
+            }
+        }
+
+        return stringBuilder.toString();
+    }
+
+    VCFHeader getVCFHeader() {
+        return this.header;
+    }
+
+    boolean getAllowMissingFieldsInHeader() {
+        return this.allowMissingFieldsInHeader;
+    }
+
+    private String getFilterString(final VariantContext vc) {
+        if (vc.isFiltered()) {
+            for (final String filter : vc.getFilters()) {
+                if (!this.header.hasFilterLine(filter)) fieldIsMissingFromHeaderError(vc, filter, "FILTER");
+            }
+
+            return ParsingUtils.join(";", ParsingUtils.sortList(vc.getFilters()));
+        } else if (vc.filtersWereApplied()) return VCFConstants.PASSES_FILTERS_v4;
+        else return VCFConstants.UNFILTERED;
+    }
+
+    private String formatQualValue(final double qual) {
+        String s = String.format(QUAL_FORMAT_STRING, qual);
+        if (s.endsWith(QUAL_FORMAT_EXTENSION_TO_TRIM))
+            s = s.substring(0, s.length() - QUAL_FORMAT_EXTENSION_TO_TRIM.length());
+        return s;
+    }
+
+    private void fieldIsMissingFromHeaderError(final VariantContext vc, final String id, final String field) {
+        if (!allowMissingFieldsInHeader)
+            throw new IllegalStateException("Key " + id + " found in VariantContext field " + field
+                    + " at " + vc.getContig() + ":" + vc.getStart()
+                    + " but this key isn't defined in the VCFHeader.  We require all VCFs to have"
+                    + " complete VCF headers by default.");
+    }
+
+    String formatVCFField(final Object val) {
+        final String result;
+        if (val == null)
+            result = VCFConstants.MISSING_VALUE_v4;
+        else if (val instanceof Double)
+            result = formatVCFDouble((Double) val);
+        else if (val instanceof Boolean)
+            result = (Boolean) val ? "" : null; // empty string for true, null for false
+        else if (val instanceof List) {
+            result = formatVCFField(((List) val).toArray());
+        } else if (val.getClass().isArray()) {
+            final int length = Array.getLength(val);
+            if (length == 0)
+                return formatVCFField(null);
+            final StringBuilder sb = new StringBuilder(formatVCFField(Array.get(val, 0)));
+            for (int i = 1; i < length; i++) {
+                sb.append(',');
+                sb.append(formatVCFField(Array.get(val, i)));
+            }
+            result = sb.toString();
+        } else
+            result = val.toString();
+
+        return result;
+    }
+
+    /**
+     * Takes a double value and pretty prints it to a String for display
+     * <p>
+     * Large doubles => gets %.2f style formatting
+     * Doubles < 1 / 10 but > 1/100 => get %.3f style formatting
+     * Double < 1/100 => %.3e formatting
+     *
+     * @param d
+     * @return
+     */
+    public static String formatVCFDouble(final double d) {
+        final String format;
+        if (d < 1) {
+            if (d < 0.01) {
+                if (Math.abs(d) >= 1e-20)
+                    format = "%.3e";
+                else {
+                    // return a zero format
+                    return "0.00";
+                }
+            } else {
+                format = "%.3f";
+            }
+        } else {
+            format = "%.2f";
+        }
+
+        return String.format(format, d);
+    }
+
+    static int countOccurrences(final char c, final String s) {
+        int count = 0;
+        for (int i = 0; i < s.length(); i++) {
+            count += s.charAt(i) == c ? 1 : 0;
+        }
+        return count;
+    }
+
+    static boolean isMissingValue(final String s) {
+        // we need to deal with the case that it's a list of missing values
+        return (countOccurrences(VCFConstants.MISSING_VALUE_v4.charAt(0), s) + countOccurrences(',', s) == s.length());
+    }
+
+    /*
+     * Add the genotype data
+     */
+    public void addGenotypeData(final VariantContext vc, final Map<Allele, String> alleleMap, final List<String> genotypeFormatKeys, final StringBuilder builder) {
+        final int ploidy = vc.getMaxPloidy(2);
+
+        for (final String sample : this.header.getGenotypeSamples()) {
+            builder.append(VCFConstants.FIELD_SEPARATOR);
+
+            Genotype g = vc.getGenotype(sample);
+            if (g == null) g = GenotypeBuilder.createMissing(sample, ploidy);
+
+            final List<String> attrs = new ArrayList<String>(genotypeFormatKeys.size());
+            for (final String field : genotypeFormatKeys) {
+                if (field.equals(VCFConstants.GENOTYPE_KEY)) {
+                    if (!g.isAvailable()) {
+                        throw new IllegalStateException("GTs cannot be missing for some samples if they are available for others in the record");
+                    }
+
+                    writeAllele(g.getAllele(0), alleleMap, builder);
+                    for (int i = 1; i < g.getPloidy(); i++) {
+                        builder.append(g.isPhased() ? VCFConstants.PHASED : VCFConstants.UNPHASED);
+                        writeAllele(g.getAllele(i), alleleMap, builder);
+                    }
+                    continue;
+
+                } else {
+                    final String outputValue;
+                    if (field.equals(VCFConstants.GENOTYPE_FILTER_KEY)) {
+                        outputValue = g.isFiltered() ? g.getFilters() : VCFConstants.PASSES_FILTERS_v4;
+                    } else {
+                        final IntGenotypeFieldAccessors.Accessor accessor = GENOTYPE_FIELD_ACCESSORS.getAccessor(field);
+                        if (accessor != null) {
+                            final int[] intValues = accessor.getValues(g);
+                            if (intValues == null)
+                                outputValue = VCFConstants.MISSING_VALUE_v4;
+                            else if (intValues.length == 1) // fast path
+                                outputValue = Integer.toString(intValues[0]);
+                            else {
+                                final StringBuilder sb = new StringBuilder();
+                                sb.append(intValues[0]);
+                                for (int i = 1; i < intValues.length; i++) {
+                                    sb.append(',');
+                                    sb.append(intValues[i]);
+                                }
+                                outputValue = sb.toString();
+                            }
+                        } else {
+                            Object val = g.hasExtendedAttribute(field) ? g.getExtendedAttribute(field) : VCFConstants.MISSING_VALUE_v4;
+
+                            final VCFFormatHeaderLine metaData = this.header.getFormatHeaderLine(field);
+                            if (metaData != null) {
+                                final int numInFormatField = metaData.getCount(vc);
+                                if (numInFormatField > 1 && val.equals(VCFConstants.MISSING_VALUE_v4)) {
+                                    // If we have a missing field but multiple values are expected, we need to construct a new string with all fields.
+                                    // For example, if Number=2, the string has to be ".,."
+                                    final StringBuilder sb = new StringBuilder(VCFConstants.MISSING_VALUE_v4);
+                                    for (int i = 1; i < numInFormatField; i++) {
+                                        sb.append(',');
+                                        sb.append(VCFConstants.MISSING_VALUE_v4);
+                                    }
+                                    val = sb.toString();
+                                }
+                            }
+
+                            // assume that if key is absent, then the given string encoding suffices
+                            outputValue = formatVCFField(val);
+                        }
+                    }
+
+                    if (outputValue != null)
+                        attrs.add(outputValue);
+                }
+            }
+
+            // strip off trailing missing values
+            if (!outputTrailingFormatFields) {
+                for (int i = attrs.size() - 1; i >= 0; i--) {
+                    if (isMissingValue(attrs.get(i))) attrs.remove(i);
+                    else break;
+                }
+            }
+
+            for (int i = 0; i < attrs.size(); i++) {
+                if (i > 0 || genotypeFormatKeys.contains(VCFConstants.GENOTYPE_KEY)) {
+                    builder.append(VCFConstants.GENOTYPE_FIELD_SEPARATOR);
+                }
+                builder.append(attrs.get(i));
+            }
+        }
+    }
+
+    /*
+     * Create the info string; assumes that no values are null
+     */
+    private void writeInfoString(final Map<String, String> infoFields, final StringBuilder builder) {
+        if (infoFields.isEmpty()) {
+            builder.append(VCFConstants.EMPTY_INFO_FIELD);
+            return;
+        }
+
+        boolean isFirst = true;
+        for (final Map.Entry<String, String> entry : infoFields.entrySet()) {
+            if (isFirst) isFirst = false;
+            else builder.append(VCFConstants.INFO_FIELD_SEPARATOR);
+
+            builder.append(entry.getKey());
+
+            if (!entry.getValue().equals("")) {
+                final VCFInfoHeaderLine metaData = this.header.getInfoHeaderLine(entry.getKey());
+                if (metaData == null || metaData.getCountType() != VCFHeaderLineCount.INTEGER || metaData.getCount() != 0) {
+                    builder.append('=');
+                    builder.append(entry.getValue());
+                }
+            }
+        }
+    }
+
+    public Map<Allele, String> buildAlleleStrings(final VariantContext vc) {
+        final Map<Allele, String> alleleMap = new HashMap<Allele, String>(vc.getAlleles().size() + 1);
+        alleleMap.put(Allele.NO_CALL, VCFConstants.EMPTY_ALLELE); // convenience for lookup
+
+        final List<Allele> alleles = vc.getAlleles();
+        for (int i = 0; i < alleles.size(); i++) {
+            alleleMap.put(alleles.get(i), String.valueOf(i));
+        }
+
+        return alleleMap;
+    }
+
+    private void writeAllele(final Allele allele, final Map<Allele, String> alleleMap, final StringBuilder builder) {
+        final String encoding = alleleMap.get(allele);
+        if (encoding == null)
+            throw new RuntimeException("Allele " + allele + " is not an allele in the variant context");
+        builder.append(encoding);
+    }
 }
diff --git a/src/main/java/htsjdk/variant/vcf/VCFFileReader.java b/src/main/java/htsjdk/variant/vcf/VCFFileReader.java
index 9024f34..d13387c 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFFileReader.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFFileReader.java
@@ -129,14 +129,22 @@ public class VCFFileReader implements Closeable, Iterable<VariantContext> {
 	}
 
     /** Returns an iterator over all records in this VCF/BCF file. */
-	public CloseableIterator<VariantContext> iterator() {
+	@Override
+    public CloseableIterator<VariantContext> iterator() {
 		try { return reader.iterator(); }
         catch (final IOException ioe) {
 			throw new TribbleException("Could not create an iterator from a feature reader.", ioe);
 		}
 	}
 
-    /** Queries for records within the region specified. */
+    /**
+     * Queries for records overlapping the region specified.
+     * Note that this method requires VCF files with an associated index.  If no index exists a TribbleException will be thrown.
+     * @param chrom the chomosome to query
+     * @param start query interval start
+     * @param end query interval end
+     * @return non-null iterator over VariantContexts
+     */
     public CloseableIterator<VariantContext> query(final String chrom, final int start, final int end) {
         try { return reader.query(chrom, start, end); }
         catch (final IOException ioe) {
@@ -144,7 +152,8 @@ public class VCFFileReader implements Closeable, Iterable<VariantContext> {
         }
     }
 
-	public void close() {
+	@Override
+    public void close() {
 		try { this.reader.close(); }
         catch (final IOException ioe) {
 			throw new TribbleException("Could not close a variant context feature reader.", ioe);
diff --git a/src/main/java/htsjdk/variant/vcf/VCFHeader.java b/src/main/java/htsjdk/variant/vcf/VCFHeader.java
index 30dce37..00ed26e 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFHeader.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFHeader.java
@@ -68,7 +68,7 @@ public class VCFHeader implements Serializable {
     private final Map<String, VCFFormatHeaderLine> mFormatMetaData = new LinkedHashMap<String, VCFFormatHeaderLine>();
     private final Map<String, VCFFilterHeaderLine> mFilterMetaData = new LinkedHashMap<String, VCFFilterHeaderLine>();
     private final Map<String, VCFHeaderLine> mOtherMetaData = new LinkedHashMap<String, VCFHeaderLine>();
-    private final List<VCFContigHeaderLine> contigMetaData = new ArrayList<VCFContigHeaderLine>();
+    private final Map<String, VCFContigHeaderLine> contigMetaData = new LinkedHashMap<>();
 
     // the list of auxillary tags
     private final List<String> mGenotypeSampleNames = new ArrayList<String>();
@@ -188,7 +188,8 @@ public class VCFHeader implements Serializable {
      * @return all of the VCF header lines of the ##contig form in order, or an empty list if none were present
      */
     public List<VCFContigHeaderLine> getContigLines() {
-        return Collections.unmodifiableList(contigMetaData);
+        // this must preserve input order
+        return Collections.unmodifiableList(new ArrayList<>(contigMetaData.values()));
     }
 
     /**
@@ -223,10 +224,8 @@ public class VCFHeader implements Serializable {
         }
         mMetaData.removeAll(toRemove);
         for (final SAMSequenceRecord record : dictionary.getSequences()) {
-            contigMetaData.add(new VCFContigHeaderLine(record, record.getAssembly()));
+            addMetaDataLine(new VCFContigHeaderLine(record, record.getAssembly()));
         }
-
-        this.mMetaData.addAll(contigMetaData);
     }
 
     public VariantContextComparator getVCFRecordComparator() {
@@ -321,18 +320,15 @@ public class VCFHeader implements Serializable {
      * @return true if line was added to the list of contig lines, otherwise false
      */
     private boolean addContigMetaDataLineLookupEntry(final VCFContigHeaderLine line) {
-        for (VCFContigHeaderLine vcfContigHeaderLine : contigMetaData) {
-            // if we are trying to add a contig for the same ID
-            if (vcfContigHeaderLine.getID().equals(line.getID())) {
-                if ( GeneralUtils.DEBUG_MODE_ENABLED ) {
-                    System.err.println("Found duplicate VCF contig header lines for " + line.getID() + "; keeping the first only" );
-                }
-                // do not add this contig if it exists
-                return false;
+        // if we are trying to add a contig for the same ID
+        if (contigMetaData.containsKey(line.getID())) {
+            if ( GeneralUtils.DEBUG_MODE_ENABLED ) {
+                System.err.println("Found duplicate VCF contig header lines for " + line.getID() + "; keeping the first only" );
             }
+            // do not add this contig if it exists
+            return false;
         }
-
-        contigMetaData.add(line);
+        contigMetaData.put(line.getID(), line);
         return true;
     }
 
diff --git a/src/main/java/htsjdk/variant/vcf/VCFHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFHeaderLine.java
index c4c1e3b..ce12c42 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFHeaderLine.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFHeaderLine.java
@@ -127,6 +127,7 @@ public class VCFHeaderLine implements Comparable, Serializable {
         return result;
     }
 
+    @Override
     public int compareTo(Object other) {
         return toString().compareTo(other.toString());
     }
diff --git a/src/main/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java b/src/main/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java
index 071d815..3ac72b2 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFHeaderLineTranslator.java
@@ -67,6 +67,7 @@ class VCF4Parser implements VCFLineParser {
      * @param valueLine the line
      * @return a mapping of the tags parsed out
      */
+    @Override
     public Map<String, String> parseLine(String valueLine, List<String> expectedTagOrder) {
         // our return map
         Map<String, String> ret = new LinkedHashMap<String, String>();
@@ -145,6 +146,7 @@ class VCF4Parser implements VCFLineParser {
 
 class VCF3Parser implements VCFLineParser {
 
+    @Override
     public Map<String, String> parseLine(String valueLine, List<String> expectedTagOrder) {
         // our return map
         Map<String, String> ret = new LinkedHashMap<String, String>();
diff --git a/src/main/java/htsjdk/variant/vcf/VCFSimpleHeaderLine.java b/src/main/java/htsjdk/variant/vcf/VCFSimpleHeaderLine.java
index a5da687..1c36f9e 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFSimpleHeaderLine.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFSimpleHeaderLine.java
@@ -92,6 +92,7 @@ public class VCFSimpleHeaderLine extends VCFHeaderLine implements VCFIDHeaderLin
         this.genericFields.putAll(genericFields);
     }
 
+    @Override
     protected String toStringEncoding() {
         Map<String, Object> map = new LinkedHashMap<String, Object>();
         map.put("ID", name);
@@ -121,6 +122,7 @@ public class VCFSimpleHeaderLine extends VCFHeaderLine implements VCFIDHeaderLin
         return result;
     }
 
+    @Override
     public String getID() {
         return name;
     }
diff --git a/src/main/java/htsjdk/variant/vcf/VCFUtils.java b/src/main/java/htsjdk/variant/vcf/VCFUtils.java
index c8eceea..fb6035e 100644
--- a/src/main/java/htsjdk/variant/vcf/VCFUtils.java
+++ b/src/main/java/htsjdk/variant/vcf/VCFUtils.java
@@ -32,26 +32,23 @@ import htsjdk.variant.utils.GeneralUtils;
 import java.io.File;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.TreeMap;
 
 public class VCFUtils {
 
     public static Set<VCFHeaderLine> smartMergeHeaders(final Collection<VCFHeader> headers, final boolean emitWarnings) throws IllegalStateException {
         // We need to maintain the order of the VCFHeaderLines, otherwise they will be scrambled in the returned Set.
         // This will cause problems for VCFHeader.getSequenceDictionary and anything else that implicitly relies on the line ordering.
-        final TreeMap<String, VCFHeaderLine> map = new TreeMap<String, VCFHeaderLine>(); // from KEY.NAME -> line
+        final LinkedHashMap<String, VCFHeaderLine> map = new LinkedHashMap<>(); // from KEY.NAME -> line
         final HeaderConflictWarner conflictWarner = new HeaderConflictWarner(emitWarnings);
 
         // todo -- needs to remove all version headers from sources and add its own VCF version line
         for ( final VCFHeader source : headers ) {
-            //System.out.printf("Merging in header %s%n", source);
             for ( final VCFHeaderLine line : source.getMetaDataInSortedOrder()) {
 
                 String key = line.getKey();
@@ -102,12 +99,11 @@ public class VCFUtils {
                     }
                 } else {
                     map.put(key, line);
-                    //System.out.printf("Adding header line %s%n", line);
                 }
             }
         }
         // returning a LinkedHashSet so that ordering will be preserved. Ensures the contig lines do not get scrambled.
-        return new LinkedHashSet<VCFHeaderLine>(map.values());
+        return new LinkedHashSet<>(map.values());
     }
 
     /**
@@ -187,6 +183,8 @@ public class VCFUtils {
             assembly = "hg18";
         else if (refPath.contains("hg19"))
             assembly = "hg19";
+        else if (refPath.contains("hg38"))
+            assembly = "hg38";
         return assembly;
     }
 
diff --git a/src/test/java/htsjdk/HtsjdkTest.java b/src/test/java/htsjdk/HtsjdkTest.java
new file mode 100644
index 0000000..4da626b
--- /dev/null
+++ b/src/test/java/htsjdk/HtsjdkTest.java
@@ -0,0 +1,10 @@
+package htsjdk;
+
+import org.scalatest.testng.TestNGSuite;
+
+/**
+ * Base class for all Java tests in HTSJDK.
+ */
+public class HtsjdkTest extends TestNGSuite {
+
+}
diff --git a/src/test/java/htsjdk/cram/io/ExternalCompressionTest.java b/src/test/java/htsjdk/cram/io/ExternalCompressionTest.java
index 09f6e49..60a6519 100644
--- a/src/test/java/htsjdk/cram/io/ExternalCompressionTest.java
+++ b/src/test/java/htsjdk/cram/io/ExternalCompressionTest.java
@@ -1,16 +1,15 @@
-package htsjdk.samtools.cram.io;
+package htsjdk.cram.io;
 
-import org.apache.commons.compress.utils.IOUtils;
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.cram.io.ExternalCompression;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.file.Files;
 
-public class ExternalCompressionTest {
+public class ExternalCompressionTest extends HtsjdkTest {
     public static final File BZIP2_FILE = new File("src/test/resources/htsjdk/samtools/cram/io/bzip2-test.bz2");
     public static final byte [] TEST_BYTES = "This is a simple string to test BZip2".getBytes();
 
diff --git a/src/test/java/htsjdk/samtools/AbstractBAMFileIndexTest.java b/src/test/java/htsjdk/samtools/AbstractBAMFileIndexTest.java
index 74c2dd7..cf451b8 100644
--- a/src/test/java/htsjdk/samtools/AbstractBAMFileIndexTest.java
+++ b/src/test/java/htsjdk/samtools/AbstractBAMFileIndexTest.java
@@ -1,11 +1,12 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.seekablestream.SeekableStream;
 import org.testng.annotations.Test;
 
 import java.io.IOException;
 
-public class AbstractBAMFileIndexTest {
+public class AbstractBAMFileIndexTest extends HtsjdkTest {
 
     /**
      * @see <a href="https://github.com/samtools/htsjdk/issues/73">https://github.com/samtools/htsjdk/issues/73</a>
@@ -59,4 +60,4 @@ public class AbstractBAMFileIndexTest {
         buffer.readInteger();
         buffer.readBytes(new byte[10000]);
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/BAMCigarOverflowTest.java b/src/test/java/htsjdk/samtools/BAMCigarOverflowTest.java
index dd630f9..8f91c64 100644
--- a/src/test/java/htsjdk/samtools/BAMCigarOverflowTest.java
+++ b/src/test/java/htsjdk/samtools/BAMCigarOverflowTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloserUtil;
 import org.testng.annotations.Test;
 import static org.testng.Assert.assertEquals;
@@ -10,7 +11,7 @@ import java.io.File;
  * Test the fix of a bug reported by s-andrews in which the use of an arithmetic rather than a logical right shift in BinaryCigarCodec.binaryCigarToCigarElement()
  * causes an overflow in the CIGAR when reading a BAM file for a read that spans a very large intron.
  */
-public class BAMCigarOverflowTest {
+public class BAMCigarOverflowTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
     @Test
diff --git a/src/test/java/htsjdk/samtools/BAMFileIndexTest.java b/src/test/java/htsjdk/samtools/BAMFileIndexTest.java
index 170bc47..0271ade 100755
--- a/src/test/java/htsjdk/samtools/BAMFileIndexTest.java
+++ b/src/test/java/htsjdk/samtools/BAMFileIndexTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloseableIterator;
 import htsjdk.samtools.util.CloserUtil;
 import htsjdk.samtools.util.StopWatch;
@@ -46,7 +47,7 @@ import static org.testng.Assert.*;
 /**
  * Test BAM file indexing.
  */
-public class BAMFileIndexTest {
+public class BAMFileIndexTest extends HtsjdkTest {
     private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
     private final boolean mVerbose = false;
 
@@ -78,8 +79,7 @@ public class BAMFileIndexTest {
     }
 
     @Test(groups = {"slow"})
-    public void testRandomQueries()
-            throws Exception {
+    public void testRandomQueries() throws Exception {
         runRandomTest(BAM_FILE, 1000, new Random());
     }
 
@@ -181,6 +181,21 @@ public class BAMFileIndexTest {
         CloserUtil.close(reader);
     }
 
+    @DataProvider(name = "queryIntervalsData")
+    public Object[][] queryIntervalsData(){
+        return new Object[][] {
+                {true, 1},
+                {false, 2}
+        };
+    }
+    @Test(dataProvider = "queryIntervalsData")
+    public void testQueryIntervals(final boolean contained, final int expected) {
+        final SamReader reader = SamReaderFactory.makeDefault().enable().open(BAM_FILE);
+
+        final CloseableIterator<SAMRecord> it = reader.query("chr1", 202661637, 202661812, contained);
+        Assert.assertEquals(countElements(it), expected);
+    }
+
     @Test
     public void testQueryMate() {
         final SamReader reader = SamReaderFactory.makeDefault().open(BAM_FILE);
diff --git a/src/test/java/htsjdk/samtools/BAMFileSpanTest.java b/src/test/java/htsjdk/samtools/BAMFileSpanTest.java
new file mode 100644
index 0000000..06d1bc9
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/BAMFileSpanTest.java
@@ -0,0 +1,72 @@
+package htsjdk.samtools;
+
+import java.util.Arrays;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+public class BAMFileSpanTest extends HtsjdkTest {
+  @Test(dataProvider = "testRemoveContentsBeforeProvider")
+  public void testRemoveContentsBefore(BAMFileSpan originalSpan, BAMFileSpan cutoff,
+      BAMFileSpan expectedSpan) {
+    // only start value in cutoff is used
+    Assert.assertEquals(
+        ((BAMFileSpan) originalSpan.removeContentsBefore(cutoff)).getChunks(),
+        expectedSpan.getChunks());
+  }
+
+  @DataProvider(name = "testRemoveContentsBeforeProvider")
+  private Object[][] testRemoveContentsBeforeProvider() {
+    return new Object[][] {
+        { span(chunk(6,10), chunk(11,15)), null, span(chunk(6,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(), span(chunk(6,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(6,0)), span(chunk(6,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(7,0)), span(chunk(7,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(9,0)), span(chunk(9,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(10,0)), span(chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(11,0)), span(chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(12,0)), span(chunk(12,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(15,0)), span() },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(16,0)), span() },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(6,10), chunk(7,16)), span(chunk(6, 10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(16,17), chunk(18,19)), span() },
+    };
+  }
+
+  @Test(dataProvider = "testRemoveContentsAfterProvider")
+  public void testRemoveContentsAfter(BAMFileSpan originalSpan, BAMFileSpan cutoff,
+      BAMFileSpan expectedSpan) {
+    // only end value in cutoff is used
+    Assert.assertEquals(
+        ((BAMFileSpan) originalSpan.removeContentsAfter(cutoff)).getChunks(),
+        expectedSpan.getChunks());
+  }
+
+  @DataProvider(name = "testRemoveContentsAfterProvider")
+  private Object[][] testRemoveContentsAfterProvider() {
+    return new Object[][] {
+        { span(chunk(6,10), chunk(11,15)), null, span(chunk(6,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(), span(chunk(6,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,6)), span() },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,7)), span(chunk(6,7)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,9)), span(chunk(6,9)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,10)), span(chunk(6,10)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,11)), span(chunk(6,10)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,12)), span(chunk(6,10), chunk(11,12)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,15)), span(chunk(6,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,16)), span(chunk(6,10), chunk(11,15)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,6), chunk(7,10)), span(chunk(6, 10)) },
+        { span(chunk(6,10), chunk(11,15)), span(chunk(0,6), chunk(7,16)), span(chunk(6, 10), chunk(11,15)) },
+    };
+  }
+
+  private BAMFileSpan span(Chunk... chunks) {
+    return new BAMFileSpan(Arrays.asList(chunks));
+  }
+
+  private Chunk chunk(long start, long end) {
+    return new Chunk(start, end);
+  }
+}
diff --git a/src/test/java/htsjdk/samtools/BAMFileWriterTest.java b/src/test/java/htsjdk/samtools/BAMFileWriterTest.java
index a8944d0..3bb46e6 100644
--- a/src/test/java/htsjdk/samtools/BAMFileWriterTest.java
+++ b/src/test/java/htsjdk/samtools/BAMFileWriterTest.java
@@ -23,19 +23,24 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloseableIterator;
 import htsjdk.samtools.util.CloserUtil;
+import htsjdk.samtools.util.SequenceUtil;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.File;
+import java.io.IOException;
 
 /**
  * Test that BAM writing doesn't blow up.  For presorted writing, the resulting BAM file is read and contents are
  * compared with the original SAM file.
  */
-public class BAMFileWriterTest {
+public class BAMFileWriterTest extends HtsjdkTest {
 
     private SAMRecordSetBuilder getRecordSetBuilder(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder) {
         final SAMRecordSetBuilder ret = new SAMRecordSetBuilder(sortForMe, sortOrder);
@@ -189,4 +194,39 @@ public class BAMFileWriterTest {
         testHelper(getRecordSetBuilder(true, SAMFileHeader.SortOrder.coordinate), SAMFileHeader.SortOrder.queryname, true);
         Assert.fail("Exception should be thrown");
     }
+
+
+    /**
+     * A test to check that BAM changes read bases according with {@link SequenceUtil#toBamReadBasesInPlace}.
+     */
+    @Test
+    public void testBAMReadBases() throws IOException {
+        final SAMFileHeader header = new SAMFileHeader();
+        header.addSequence(new SAMSequenceRecord("1", SequenceUtil.getIUPACCodesString().length()));
+        header.addReadGroup(new SAMReadGroupRecord("rg1"));
+
+        final SAMRecord originalSAMRecord = new SAMRecord(header);
+        originalSAMRecord.setReadName("test");
+        originalSAMRecord.setReferenceIndex(0);
+        originalSAMRecord.setAlignmentStart(1);
+        originalSAMRecord.setReadBases(SequenceUtil.getIUPACCodesString().getBytes());
+        originalSAMRecord.setCigarString(originalSAMRecord.getReadLength() + "M");
+        originalSAMRecord.setBaseQualities(SAMRecord.NULL_QUALS);
+
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        try (final BAMFileWriter writer = new BAMFileWriter(baos, null)) {
+            writer.setHeader(header);
+            writer.addAlignment(originalSAMRecord);
+        }
+
+
+        final BAMFileReader reader = new BAMFileReader(new ByteArrayInputStream(baos.toByteArray()), null, true, false, ValidationStringency.SILENT, new DefaultSAMRecordFactory());
+        final CloseableIterator<SAMRecord> iterator = reader.getIterator();
+        iterator.hasNext();
+        final SAMRecord recordFromBAM = iterator.next();
+
+        Assert.assertNotEquals(recordFromBAM.getReadBases(), originalSAMRecord.getReadBases());
+        Assert.assertEquals(recordFromBAM.getReadBases(), SequenceUtil.toBamReadBasesInPlace(originalSAMRecord.getReadBases()));
+    }
+
 }
diff --git a/src/test/java/htsjdk/samtools/BAMIndexWriterTest.java b/src/test/java/htsjdk/samtools/BAMIndexWriterTest.java
index 09f9236..da68126 100644
--- a/src/test/java/htsjdk/samtools/BAMIndexWriterTest.java
+++ b/src/test/java/htsjdk/samtools/BAMIndexWriterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloserUtil;
 import htsjdk.samtools.util.IOUtil;
 import org.testng.annotations.DataProvider;
@@ -38,7 +39,7 @@ import static org.testng.Assert.assertTrue;
 /**
  * Test BAM file index creation
  */
-public class BAMIndexWriterTest {
+public class BAMIndexWriterTest extends HtsjdkTest {
     // Two input files for basic test
     private final String BAM_FILE_LOCATION = "src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam";
     private final String BAI_FILE_LOCATION = "src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai";
@@ -57,7 +58,7 @@ public class BAMIndexWriterTest {
         final File javaBaiFile = File.createTempFile("javaBai.", "java.bai");
         final File javaBaiTxtFile = new File(javaBaiFile.getAbsolutePath() + ".txt");
         final SamReader bam = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS).open(BAM_FILE);
-        BAMIndexer.createIndex(bam, javaBaiFile);
+        BAMIndexer.createIndex(bam, javaBaiFile.toPath());
         verbose("Wrote binary Java BAM Index file " + javaBaiFile);
 
         // now, turn the bai file into text
@@ -76,7 +77,7 @@ public class BAMIndexWriterTest {
         // Compare java-generated bai file with c-generated and sorted bai file
         final File javaBaiFile = File.createTempFile("javaBai.", ".bai");
         final SamReader bam = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS).open(BAM_FILE);
-        BAMIndexer.createIndex(bam, javaBaiFile);
+        BAMIndexer.createIndex(bam, javaBaiFile.toPath());
         verbose("Wrote binary java BAM Index file " + javaBaiFile);
 
         final File cRegeneratedBaiFile = File.createTempFile("cBai.", ".bai");
@@ -213,7 +214,7 @@ public class BAMIndexWriterTest {
     private File createIndexFile(File bamFile) throws IOException {
         final File bamIndexFile = File.createTempFile("Bai.", ".bai");
         final SamReader bam = SamReaderFactory.makeDefault().open(bamFile);
-        BAMIndexer.createIndex(bam, bamIndexFile);
+        BAMIndexer.createIndex(bam, bamIndexFile.toPath());
         verbose("Wrote BAM Index file " + bamIndexFile);
         bam.close();
         return bamIndexFile;
diff --git a/src/test/java/htsjdk/samtools/BAMIteratorTest.java b/src/test/java/htsjdk/samtools/BAMIteratorTest.java
index 5fa9e7d..6fa67cd 100644
--- a/src/test/java/htsjdk/samtools/BAMIteratorTest.java
+++ b/src/test/java/htsjdk/samtools/BAMIteratorTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloseableIterator;
 import htsjdk.samtools.util.CloserUtil;
 import org.testng.Assert;
@@ -34,7 +35,7 @@ import java.io.File;
 /**
  * @author alecw at broadinstitute.org
  */
-public class BAMIteratorTest {
+public class BAMIteratorTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
     @Test(dataProvider = "dataProvider")
diff --git a/src/test/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilterTest.java b/src/test/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilterTest.java
index 7c0bb1f..d25e7ba 100644
--- a/src/test/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilterTest.java
+++ b/src/test/java/htsjdk/samtools/BAMQueryMultipleIntervalsIteratorFilterTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -7,7 +8,7 @@ import org.testng.annotations.Test;
 import java.util.Arrays;
 import java.util.Random;
 
-public class BAMQueryMultipleIntervalsIteratorFilterTest {
+public class BAMQueryMultipleIntervalsIteratorFilterTest extends HtsjdkTest {
 
     private final byte[] BASES = {'A', 'C', 'G', 'T'};
     private final Random random = new Random();
diff --git a/src/test/java/htsjdk/samtools/BAMRemoteFileTest.java b/src/test/java/htsjdk/samtools/BAMRemoteFileTest.java
index 4b686cf..dccfddc 100644
--- a/src/test/java/htsjdk/samtools/BAMRemoteFileTest.java
+++ b/src/test/java/htsjdk/samtools/BAMRemoteFileTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloserUtil;
 import htsjdk.samtools.util.TestUtil;
 import org.testng.annotations.Test;
@@ -40,7 +41,7 @@ import static org.testng.Assert.*;
 /**
  * Test BAM file indexing.
  */
-public class BAMRemoteFileTest {
+public class BAMRemoteFileTest extends HtsjdkTest {
     private final File BAM_INDEX_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai");
     private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
     private final String BAM_URL_STRING = TestUtil.BASE_URL_FOR_HTTP_TESTS + "index_test.bam";
diff --git a/src/test/java/htsjdk/samtools/BinTest.java b/src/test/java/htsjdk/samtools/BinTest.java
index 271a411..6009ed3 100644
--- a/src/test/java/htsjdk/samtools/BinTest.java
+++ b/src/test/java/htsjdk/samtools/BinTest.java
@@ -24,12 +24,13 @@
 
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.util.Collections;
 
-public class BinTest {
+public class BinTest extends HtsjdkTest {
     @Test
     public void testEmptyBin() {
         // Construct a new empty bin and ensure that the bin list is empty, not null.
diff --git a/src/test/java/htsjdk/samtools/BinningIndexBuilderTest.java b/src/test/java/htsjdk/samtools/BinningIndexBuilderTest.java
new file mode 100644
index 0000000..72d3f8a
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/BinningIndexBuilderTest.java
@@ -0,0 +1,105 @@
+package htsjdk.samtools;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+
+public class BinningIndexBuilderTest extends HtsjdkTest {
+
+    private final static int REFERNCE_SEQUENCE_INDEX = 19;
+
+    @DataProvider(name="BinningFeatures")
+    public Object[][] getBinningFeatures() {
+        return new Object[][]{
+                { // single feature in first bin at offset 0
+                    Collections.singletonList(
+                        new MockIndexableFeature(REFERNCE_SEQUENCE_INDEX, 1, 10, 0, 25)),
+                    Collections.singletonList( new Chunk(0, 25)),
+                    new long[] { 0L }
+                },
+                { // single feature in first bin at non-zero offset
+                    Collections.singletonList(
+                            new MockIndexableFeature(REFERNCE_SEQUENCE_INDEX, 1, 10, 100, 125)),
+                    Collections.singletonList( new Chunk(100, 125)),
+                    new long[] { 100L }
+                },
+                { // two features spanning two bins at non-zero offsets
+                    Arrays.asList(
+                            new MockIndexableFeature(REFERNCE_SEQUENCE_INDEX, 2, 13, 100, 125),
+                            new MockIndexableFeature(REFERNCE_SEQUENCE_INDEX, 22222, 22223, 22222, 22225)),
+                    Arrays.asList(new Chunk(100, 125), new Chunk(22222, 22225)),
+                    new long[] {100L, 22222L }
+                },
+                { // two features in first bin, one at offset 0, plus one feature in the second bin
+                  // https://github.com/samtools/htsjdk/issues/943
+                    Arrays.asList(
+                        new MockIndexableFeature(REFERNCE_SEQUENCE_INDEX, 1, 10, 0, 25),
+                        new MockIndexableFeature(REFERNCE_SEQUENCE_INDEX, 2, 13, 100, 125),
+                        new MockIndexableFeature(REFERNCE_SEQUENCE_INDEX, 22222, 22223, 22222, 22225)),
+                    Arrays.asList(new Chunk(0, 125), new Chunk(22222, 22225)),
+                    new long[] {0L, 22222L }
+                }
+        };
+    }
+
+    @Test(dataProvider = "BinningFeatures")
+    public void testFeatureAtOffsetZero(
+            final List<MockIndexableFeature> mockFeatures,
+            final List<Chunk> expectedChunks,
+            final long[] expectedBins)
+    {
+        // use a sequence length that spans at least two (16k) binning blocks
+        final BinningIndexBuilder bib = new BinningIndexBuilder(REFERNCE_SEQUENCE_INDEX, 40000);
+
+        mockFeatures.forEach(bib::processFeature);
+
+        final BinningIndexContent bic = bib.generateIndexContent();
+
+        Assert.assertEquals(expectedBins, bic.getLinearIndex().getIndexEntries());
+        Assert.assertEquals(expectedChunks, bic.getAllChunks());
+    }
+
+    private static class MockIndexableFeature implements BinningIndexBuilder.FeatureToBeIndexed {
+        private final int referenceIndex;
+        private final int startCoordinate;
+        private final int endCoordinate;
+        private final long startOffset;
+        private long endOffset;
+
+        private MockIndexableFeature(
+                final int referenceIndex,
+                final int startCoordinate,
+                final int endCoordinate,
+                final long startOffset,
+                final long endOffset) {
+            this.referenceIndex = referenceIndex;
+            this.startCoordinate = startCoordinate;
+            this.endCoordinate = endCoordinate;
+            this.startOffset = startOffset;
+            this.endOffset = endOffset;
+        }
+
+        @Override
+        public int getStart() {
+            return startCoordinate;
+        }
+
+        @Override
+        public int getEnd() {
+            return endCoordinate;
+        }
+
+        @Override
+        public Integer getIndexingBin() { return null; }
+
+        @Override
+        public Chunk getChunk() { return new Chunk(startOffset, endOffset); }
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMBAIIndexerTest.java b/src/test/java/htsjdk/samtools/CRAMBAIIndexerTest.java
index 6f3b954..ce32e7a 100644
--- a/src/test/java/htsjdk/samtools/CRAMBAIIndexerTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMBAIIndexerTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.build.ContainerFactory;
 import htsjdk.samtools.cram.structure.Container;
 import htsjdk.samtools.cram.structure.CramCompressionRecord;
@@ -17,7 +18,7 @@ import java.util.List;
 /**
  * Created by vadim on 12/01/2016.
  */
-public class CRAMBAIIndexerTest {
+public class CRAMBAIIndexerTest extends HtsjdkTest {
 
     private static CramCompressionRecord createRecord(int recordIndex, int seqId, int start) {
         byte[] bases = "AAAAA".getBytes();
diff --git a/src/test/java/htsjdk/samtools/CRAMCRAIIndexerTest.java b/src/test/java/htsjdk/samtools/CRAMCRAIIndexerTest.java
index 11d2f3c..c5a9634 100644
--- a/src/test/java/htsjdk/samtools/CRAMCRAIIndexerTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMCRAIIndexerTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.CRAIEntry;
 import htsjdk.samtools.cram.build.CramContainerIterator;
 import htsjdk.samtools.cram.ref.ReferenceSource;
@@ -17,12 +18,12 @@ import java.util.List;
  * Companion to CRAMBAIIndexerTest, for testing CRAI indices created on cram
  * streams;
  */
-public class CRAMCRAIIndexerTest {
+public class CRAMCRAIIndexerTest extends HtsjdkTest {
 
     @Test
     public void testCRAIIndexerFromContainer() throws IOException {
         final File CRAMFile = new File("src/test/resources/htsjdk/samtools/cram/test2.cram");
-        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/test2.fa");
+        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/auxf.fa");
         ReferenceSource refSource = new ReferenceSource(refFile);
         CRAMFileReader reader = new CRAMFileReader(
                 CRAMFile,
@@ -55,7 +56,7 @@ public class CRAMCRAIIndexerTest {
     @Test
     public void testCRAIIndexerFromStream() throws IOException {
         final File CRAMFile = new File("src/test/resources/htsjdk/samtools/cram/test2.cram");
-        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/test2.fa");
+        final File refFile = new File("src/test/resources/htsjdk/samtools/cram/auxf.fa");
         ReferenceSource refSource = new ReferenceSource(refFile);
 
         // get the header to use
@@ -180,4 +181,4 @@ public class CRAMCRAIIndexerTest {
         return count;
     }
 
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/CRAMComplianceTest.java b/src/test/java/htsjdk/samtools/CRAMComplianceTest.java
index 81cd2f9..cede96f 100644
--- a/src/test/java/htsjdk/samtools/CRAMComplianceTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMComplianceTest.java
@@ -1,60 +1,122 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
+import com.google.common.jimfs.Configuration;
+import com.google.common.jimfs.Jimfs;
+import htsjdk.samtools.cram.build.CramIO;
 import htsjdk.samtools.cram.common.CramVersions;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.seekablestream.SeekableStream;
 import htsjdk.samtools.util.Log;
+
+import htsjdk.samtools.util.SequenceUtil;
+import java.nio.file.*;
 import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
+import java.io.*;
+
+import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.List;
 
 /**
  * Created by vadim on 28/04/2015.
  */
-public class CRAMComplianceTest {
+public class CRAMComplianceTest extends HtsjdkTest {
 
-    @DataProvider(name = "test1")
-    public Object[][] createData1() {
-        return new Object[][]{
-                {"auxf#values"},
+    @FunctionalInterface
+    public interface TriConsumer<T1, T2, T3> {
+        abstract void accept(T1 arg1, T2 arg2, T3 arg3);
+    }
+
+    // The files in this provider expose a defect in CRAM conversion of one kind or another
+    // so the tests are executed using partial verification
+    @DataProvider(name = "partialVerification")
+    public Object[][] getPartialVerificationData() {
+        return new Object[][] {
+                {"auxf#values"},    // unsigned attributes: https://github.com/samtools/htsjdk/issues/499
+                {"c1#noseq"},       // unsigned attributes: https://github.com/samtools/htsjdk/issues/499
+                {"c1#unknown"},     // unsigned attributes: https://github.com/samtools/htsjdk/issues/499
+                {"ce#5b"},          // reads with no read bases: https://github.com/samtools/htsjdk/issues/509
+                {"ce#tag_depadded"},// reads with no read bases: https://github.com/samtools/htsjdk/issues/509
+                {"ce#tag_padded"},  // reads with no read bases: https://github.com/samtools/htsjdk/issues/509
+                {"ce#unmap"},       // unmapped reads with non-zero MAPQ value that is not restored
+                                    // https://github.com/samtools/htsjdk/issues/714
+                {"xx#triplet"},     // the version 2.1 variant of this file has a bad insertSize, which is
+                                    // probably residual detritus from https://github.com/samtools/htsjdk/issues/364
+                {"xx#minimal"},     // cigar string "5H0M5H" is restored as "10H"
+                                    // https://github.com/samtools/htsjdk/issues/713
+        };
+    }
+
+    @Test(dataProvider = "partialVerification")
+    public void partialVerificationTest(String name) throws IOException {
+        // do compliance test with partial validation to work around known limitations
+        doComplianceTest(name, this::assertSameRecordsPartial);
+    }
+
+    // Files that can be subjected to full SAMRecord equality after conversion
+    @DataProvider(name = "fullVerification")
+    public Object[][] getFullVerificationData() {
+        return new Object[][] {
                 {"c1#bounds"},
                 {"c1#clip"},
-                {"c1#noseq"},
                 {"c1#pad1"},
                 {"c1#pad2"},
                 {"c1#pad3"},
-                {"c1#unknown"},
                 {"ce#1"},
                 {"ce#2"},
-                {"ce#5b"},
                 {"ce#5"},
                 {"ce#large_seq"},
                 {"ce#supp"},
-                {"ce#tag_depadded"},
-                {"ce#tag_padded"},
                 {"ce#unmap1"},
                 {"ce#unmap2"},
-                {"ce#unmap"},
                 {"xx#blank"},
                 {"xx#large_aux2"},
                 {"xx#large_aux"},
-                {"xx#minimal"},
                 {"xx#pair"},
                 {"xx#rg"},
-                {"xx#triplet"},
                 {"xx#unsorted"},
         };
     }
 
+    @Test(dataProvider = "fullVerification")
+    public void fullVerificationTest(String name) throws IOException {
+        doComplianceTest(name, (version, expected, actual) -> Assert.assertEquals(expected, actual));
+    }
+
+    // Files that can be subjected to full verification only after read base normalization, because either
+    // the reference or the reads contain ambiguity codes that are normalized by SequenceUtil.toBamReadBasesInPlace
+    // during the round-trip process.
+    @DataProvider(name = "ambiguityCodeVerification")
+    public Object[][] getAmbiguityCodeVerificationData() {
+        return new Object[][]{
+                {"amb#amb"}
+        };
+    }
+
+    @Test(dataProvider = "ambiguityCodeVerification")
+    public void ambiguityCodeVerificationTest(String name) throws IOException {
+        doComplianceTest(name,
+                (version, expected, actual) ->
+                {
+                    if (expected.getReadString().equals(actual.getReadString())) {
+                        Assert.assertEquals(expected, actual);
+                    } else {
+                        // tolerate BAM and CRAM conversion of read bases to upper case IUPAC codes by
+                        // creating a deep copy of the expected reads and normalizing (upper case IUPAC)
+                        // the bases; then proceeding with the full compare with the actual
+                        SAMRecord expectedNormalized = actual.deepCopy();
+                        final byte[] expectedBases = expectedNormalized.getReadBases();
+                        SequenceUtil.toBamReadBasesInPlace(expectedBases);
+                        Assert.assertEquals(actual, expectedNormalized);
+                    }
+                }
+                );
+    }
 
     @BeforeTest
     public void beforeTest() {
@@ -75,62 +137,60 @@ public class CRAMComplianceTest {
         }
     }
 
-    @Test(dataProvider = "test1")
-    public void test(String name) throws IOException {
+    private void doComplianceTest(
+            final String name,
+            final TriConsumer<Integer, SAMRecord, SAMRecord> assertFunction) throws IOException {
         TestCase t = new TestCase(new File("src/test/resources/htsjdk/samtools/cram/"), name);
 
-        ReferenceSource source = new ReferenceSource(t.refFile);
-        SamReader reader = SamReaderFactory.make().validationStringency(ValidationStringency.SILENT).open(t.bamFile);
-
-        final SAMRecordIterator samRecordIterator = reader.iterator();
-        List<SAMRecord> samRecords = new ArrayList<SAMRecord>();
-        while (samRecordIterator.hasNext())
-            samRecords.add(samRecordIterator.next());
-        SAMFileHeader samFileHeader = reader.getFileHeader();
-        reader.close();
+        // retrieve all records from the original file
+        List<SAMRecord> samRecords = getSAMRecordsFromFile(t.bamFile, t.refFile);
+        SAMFileHeader samFileHeader = getFileHeader(t.bamFile, t.refFile);
 
+        // write them to cram stream
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        ReferenceSource source = new ReferenceSource(t.refFile);
         CRAMFileWriter cramFileWriter = new CRAMFileWriter(baos, source, samFileHeader, name);
         for (SAMRecord samRecord : samRecords) {
             cramFileWriter.addAlignment(samRecord);
         }
         cramFileWriter.close();
 
-
-        CRAMFileReader cramFileReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (SeekableStream)null, source, ValidationStringency.SILENT);
+        // read them back from the stream and compare to original sam via assertSameRecords
+        CRAMFileReader cramFileReader = new CRAMFileReader(new ByteArrayInputStream(baos.toByteArray()), (SeekableStream) null, source, ValidationStringency.SILENT);
         SAMRecordIterator cramFileReaderIterator = cramFileReader.getIterator();
         for (SAMRecord samRecord : samRecords) {
             Assert.assertTrue(cramFileReaderIterator.hasNext());
             SAMRecord restored = cramFileReaderIterator.next();
             Assert.assertNotNull(restored);
-            assertSameRecords(CramVersions.DEFAULT_CRAM_VERSION.major, samRecord, restored);
+            assertFunction.accept(CramVersions.DEFAULT_CRAM_VERSION.major, samRecord, restored);
         }
         Assert.assertFalse(cramFileReaderIterator.hasNext());
 
         //v2.1 test
-        cramFileReader = new CRAMFileReader(new FileInputStream(t.cramFile_21), (SeekableStream)null, source, ValidationStringency.SILENT);
+        cramFileReader = new CRAMFileReader(new FileInputStream(t.cramFile_21), (SeekableStream) null, source, ValidationStringency.SILENT);
         cramFileReaderIterator = cramFileReader.getIterator();
         for (SAMRecord samRecord : samRecords) {
             Assert.assertTrue(cramFileReaderIterator.hasNext());
             SAMRecord restored = cramFileReaderIterator.next();
             Assert.assertNotNull(restored);
-            assertSameRecords(CramVersions.CRAM_v2_1.major, samRecord, restored);
+            assertFunction.accept(CramVersions.CRAM_v2_1.major, samRecord, restored);
         }
         Assert.assertFalse(cramFileReaderIterator.hasNext());
 
         //v3.0 test
-        cramFileReader = new CRAMFileReader(new FileInputStream(t.cramFile_30), (SeekableStream)null, source, ValidationStringency.SILENT);
+        cramFileReader = new CRAMFileReader(new FileInputStream(t.cramFile_30), (SeekableStream) null, source, ValidationStringency.SILENT);
         cramFileReaderIterator = cramFileReader.getIterator();
         for (SAMRecord samRecord : samRecords) {
             Assert.assertTrue(cramFileReaderIterator.hasNext());
             SAMRecord restored = cramFileReaderIterator.next();
             Assert.assertNotNull(restored);
-            assertSameRecords(CramVersions.CRAM_v3.major, samRecord, restored);
+            assertFunction.accept(CramVersions.CRAM_v3.major, samRecord, restored);
         }
         Assert.assertFalse(cramFileReaderIterator.hasNext());
     }
 
-    private void assertSameRecords(int majorVersion, SAMRecord record1, SAMRecord record2) {
+    private void assertSameRecordsPartial(Integer majorVersion, SAMRecord record1, SAMRecord record2) {
+        // test a partial set of fields for equality, avoiding known CRAM conversion issues
         Assert.assertEquals(record2.getFlags(), record1.getFlags());
         Assert.assertEquals(record2.getReadName(), record1.getReadName());
         Assert.assertEquals(record2.getReferenceName(), record1.getReferenceName());
@@ -139,12 +199,204 @@ public class CRAMComplianceTest {
         /**
          * Known issue: CRAM v2.1 doesn't handle reads with missing bases correctly. This
          * causes '*' bases to arise when reading CRAM. Skipping the base comparison asserts.
+         * https://github.com/samtools/htsjdk/issues/509
          */
         if (record1.getReadBases() != SAMRecord.NULL_SEQUENCE || majorVersion >= CramVersions.CRAM_v3.major) {
-            Assert.assertEquals(record2.getReadBases(), record1.getReadBases());
+            // BAM and CRAM convert read bases to upper case IUPAC codes
+            final byte[] originalBases = record1.getReadBases();
+            SequenceUtil.toBamReadBasesInPlace(originalBases);
+            Assert.assertEquals(record2.getReadBases(), originalBases);
         }
 
         Assert.assertEquals(record2.getBaseQualities(), record1.getBaseQualities());
     }
 
+    @DataProvider(name = "CRAMSourceFiles")
+    public Object[][] getCRAMSources() {
+        final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/cram");
+
+        return new Object[][] {
+                // Test cram file created with samtools using a *reference* that contains ambiguity codes
+                // 'R' and 'M', a single no call '.', and some lower case bases.
+                {new File(TEST_DATA_DIR, "samtoolsSliceMD5WithAmbiguityCodesTest.cram"),
+                        new File(TEST_DATA_DIR, "ambiguityCodes.fasta")},
+                {new File(TEST_DATA_DIR, "NA12878.20.21.1-100.100-SeqsPerSlice.0-unMapped.cram"),
+                        new File(TEST_DATA_DIR, "human_g1k_v37.20.21.1-100.fasta")},
+                {new File(TEST_DATA_DIR, "NA12878.20.21.1-100.100-SeqsPerSlice.1-unMapped.cram"),
+                        new File(TEST_DATA_DIR, "human_g1k_v37.20.21.1-100.fasta")},
+                {new File(TEST_DATA_DIR, "NA12878.20.21.1-100.100-SeqsPerSlice.500-unMapped.cram"),
+                        new File(TEST_DATA_DIR, "human_g1k_v37.20.21.1-100.fasta")},
+                {new File(TEST_DATA_DIR, "test.cram"), new File(TEST_DATA_DIR, "auxf.fa")},
+                {new File(TEST_DATA_DIR, "test2.cram"), new File(TEST_DATA_DIR, "auxf.fa")},
+        };
+    }
+
+    @Test(dataProvider = "CRAMSourceFiles")
+    public void testCRAMThroughBAMRoundTrip(final File originalCRAMFile, final File referenceFile) throws IOException {
+
+        // retrieve all records from the cram and make defensive deep copies
+        List<SAMRecord> originalCRAMRecords = getSAMRecordsFromFile(originalCRAMFile, referenceFile);
+        List<SAMRecord> copiedCRAMRecords = new ArrayList<>();
+        originalCRAMRecords.forEach(origRec -> copiedCRAMRecords.add(origRec.deepCopy()));
+
+        // write copies of the CRAM records to a BAM, and then read them back in
+        SAMFileHeader samHeader;
+        List<SAMRecord> bamRecords;
+        try (FileSystem jimfs = Jimfs.newFileSystem(Configuration.unix())) {
+            final Path tempBam = jimfs.getPath("testCRAMToBAMToCRAM" + BamFileIoUtils.BAM_FILE_EXTENSION);
+            samHeader = getFileHeader(originalCRAMFile, referenceFile);
+            writeRecordsToPath(copiedCRAMRecords, tempBam, referenceFile, samHeader);
+            bamRecords = getSAMRecordsFromPath(tempBam, referenceFile);
+        }
+
+        // compare to originals
+        int i = 0;
+        for (SAMRecord rec : bamRecords) {
+            rec.setIndexingBin(null);
+            Assert.assertTrue(rec.equals(originalCRAMRecords.get(i++)));
+        }
+        Assert.assertEquals(i, originalCRAMRecords.size());
+
+        // write the BAM records to a CRAM and read them back in
+        List<SAMRecord> roundTripCRAMRecords;
+        try (FileSystem jimfs = Jimfs.newFileSystem(Configuration.unix())) {
+            final Path tempCRAM = jimfs.getPath("testCRAMToBAMToCRAM" + CramIO.CRAM_FILE_EXTENSION);
+            writeRecordsToPath(bamRecords, tempCRAM, referenceFile, samHeader);
+            roundTripCRAMRecords = getSAMRecordsFromPath(tempCRAM, referenceFile);
+        }
+
+        // compare to originals
+        i = 0;
+        for (SAMRecord rec : roundTripCRAMRecords) {
+            Assert.assertTrue(rec.equals(originalCRAMRecords.get(i++)));
+        }
+        Assert.assertEquals(i, originalCRAMRecords.size());
+    }
+
+    @Test
+    public void testBAMThroughCRAMRoundTrip() throws IOException, NoSuchAlgorithmException {
+        final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/cram");
+
+        // These files are reduced versions of the CEUTrio.HiSeq.WGS.b37.NA12878.20.21.bam and human_g1k_v37.20.21.fasta
+        // files used in GATK4 tests. The first 8000 records from chr20 were extracted; from those around 80 placed but
+        // unmapped reads that contained cigar elements were removed, along with one read who's mate was on chr21.
+        // Finally all read positions were remapped to the subsetted reference file, which contains only the ~9000 bases
+        // used by the reduced read set.
+        final File originalBAMInputFile = new File(TEST_DATA_DIR, "CEUTrio.HiSeq.WGS.b37.NA12878.20.first.8000.bam");
+        final File referenceFile = new File(TEST_DATA_DIR, "human_g1k_v37.20.subset.fasta");
+
+        // retrieve all records from the bam and reset the indexing bins to keep comparisons with
+        // cram records from failing
+        List<SAMRecord> originalBAMRecords = getSAMRecordsFromFile(originalBAMInputFile, referenceFile);
+        for (int i = 0; i < originalBAMRecords.size(); i++) {
+            originalBAMRecords.get(i).setIndexingBin(null);
+        }
+
+        // write the BAM records to a temporary CRAM
+        final File tempCRAMFile = File.createTempFile("testBAMThroughCRAMRoundTrip", CramIO.CRAM_FILE_EXTENSION);
+        tempCRAMFile.deleteOnExit();
+        SAMFileHeader samHeader = getFileHeader(originalBAMInputFile, referenceFile);
+        writeRecordsToFile(originalBAMRecords, tempCRAMFile, referenceFile, samHeader);
+
+        // read the CRAM records back in and compare to the original BAM records
+        List<SAMRecord> cramRecords = getSAMRecordsFromFile(tempCRAMFile, referenceFile);
+        Assert.assertEquals(cramRecords.size(), originalBAMRecords.size());
+        for (int i = 0; i < originalBAMRecords.size(); i++) {
+            Assert.assertEquals(originalBAMRecords.get(i), cramRecords.get(i));
+        }
+    }
+
+    @Test
+    public void testBAMThroughCRAMRoundTripViaPath() throws IOException, NoSuchAlgorithmException {
+        final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/cram");
+
+        // These files are reduced versions of the CEUTrio.HiSeq.WGS.b37.NA12878.20.21.bam and human_g1k_v37.20.21.fasta
+        // files used in GATK4 tests. The first 8000 records from chr20 were extracted; from those around 80 placed but
+        // unmapped reads that contained cigar elements were removed, along with one read who's mate was on chr21.
+        // Finally all read positions were remapped to the subsetted reference file, which contains only the ~9000 bases
+        // used by the reduced read set.
+        final File originalBAMInputFile = new File(TEST_DATA_DIR, "CEUTrio.HiSeq.WGS.b37.NA12878.20.first.8000.bam");
+        final File referenceFile = new File(TEST_DATA_DIR, "human_g1k_v37.20.subset.fasta");
+
+        // retrieve all records from the bam and reset the indexing bins to keep comparisons with
+        // cram records from failing
+        List<SAMRecord> originalBAMRecords = getSAMRecordsFromFile(originalBAMInputFile, referenceFile);
+        for (int i = 0; i < originalBAMRecords.size(); i++) {
+            originalBAMRecords.get(i).setIndexingBin(null);
+        }
+
+        // write the BAM records to a temporary CRAM
+        try (FileSystem jimfs = Jimfs.newFileSystem(Configuration.unix())) {
+            final Path tempCRAM = jimfs.getPath("testBAMThroughCRAMRoundTrip" + CramIO.CRAM_FILE_EXTENSION);
+            SAMFileHeader samHeader = getFileHeader(originalBAMInputFile, referenceFile);
+            writeRecordsToPath(originalBAMRecords, tempCRAM, referenceFile, samHeader);
+
+            // read the CRAM records back in and compare to the original BAM records
+            List<SAMRecord> cramRecords = getSAMRecordsFromPath(tempCRAM, referenceFile);
+            Assert.assertEquals(cramRecords.size(), originalBAMRecords.size());
+            for (int i = 0; i < originalBAMRecords.size(); i++) {
+                Assert.assertEquals(originalBAMRecords.get(i), cramRecords.get(i));
+            }
+        }
+    }
+
+    private SAMFileHeader getFileHeader(final File sourceFile, final File referenceFile) throws IOException {
+        try (final SamReader reader = SamReaderFactory.make()
+                .validationStringency(ValidationStringency.SILENT)
+                .referenceSequence(referenceFile).open(sourceFile)) {
+            return reader.getFileHeader();
+        }
+    }
+
+    private List<SAMRecord> getSAMRecordsFromFile(final File sourceFile, final File referenceFile) throws IOException {
+        return getSAMRecordsFromPath(sourceFile.toPath(), referenceFile);
+    }
+
+    private List<SAMRecord> getSAMRecordsFromPath(final Path sourcePath, final File referenceFile) throws IOException {
+        List<SAMRecord> recs = new ArrayList<>();
+        try (SamReader reader = SamReaderFactory.make()
+            .validationStringency(ValidationStringency.SILENT)
+            .referenceSequence(referenceFile).open(sourcePath))
+        {
+            for (SAMRecord rec : reader) {
+                recs.add(rec);
+            }
+        }
+        return recs;
+    }
+
+    private void writeRecordsToFile (
+            final List<SAMRecord> recs,
+            final File targetFile,
+            final File referenceFile,
+            final SAMFileHeader samHeader) {
+
+        // NOTE: even when the input is coord-sorted, using assumePresorted=false will cause some
+        // tests to fail since it can change the order of some unmapped reads - AFAICT this is allowed
+        // by the spec since the order is arbitrary for unmapped
+        try (final SAMFileWriter writer = new SAMFileWriterFactory()
+                .makeWriter(samHeader, true, targetFile, referenceFile)) {
+            for (SAMRecord rec : recs) {
+                 writer.addAlignment(rec);
+            }
+        }
+    }
+
+    private void writeRecordsToPath (
+        final List<SAMRecord> recs,
+        final Path targetPath,
+        final File referenceFile,
+        final SAMFileHeader samHeader) {
+
+        // NOTE: even when the input is coord-sorted, using assumePresorted=false will cause some
+        // tests to fail since it can change the order of some unmapped reads - this is allowed
+        // by the spec since the order is arbitrary for unmapped.
+        try (final SAMFileWriter writer = new SAMFileWriterFactory()
+            .makeWriter(samHeader, true, targetPath, referenceFile)) {
+            for (SAMRecord rec : recs) {
+                writer.addAlignment(rec);
+            }
+        }
+    }
+
 }
diff --git a/src/test/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java b/src/test/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java
index b26f4b0..9ab9ed2 100644
--- a/src/test/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMContainerStreamWriterTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
 import htsjdk.samtools.seekablestream.SeekableMemoryStream;
@@ -23,7 +24,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
-public class CRAMContainerStreamWriterTest {
+public class CRAMContainerStreamWriterTest extends HtsjdkTest {
 
     @BeforeClass
     public void initClass() {
diff --git a/src/test/java/htsjdk/samtools/CRAMEdgeCasesTest.java b/src/test/java/htsjdk/samtools/CRAMEdgeCasesTest.java
index e77e0e8..4fa9b1a 100644
--- a/src/test/java/htsjdk/samtools/CRAMEdgeCasesTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMEdgeCasesTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.CRAMException;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
@@ -20,7 +21,7 @@ import java.util.Iterator;
 /**
  * A collection of CRAM test based on round trip comparison of SAMRecord before and after CRAM compression.
  */
-public class CRAMEdgeCasesTest {
+public class CRAMEdgeCasesTest extends HtsjdkTest {
 
     @BeforeTest
     public void beforeTest() {
diff --git a/src/test/java/htsjdk/samtools/CRAMFileBAIIndexTest.java b/src/test/java/htsjdk/samtools/CRAMFileBAIIndexTest.java
index eba2b4c..3216092 100644
--- a/src/test/java/htsjdk/samtools/CRAMFileBAIIndexTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMFileBAIIndexTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.build.ContainerParser;
 import htsjdk.samtools.cram.build.CramContainerIterator;
 import htsjdk.samtools.cram.ref.ReferenceSource;
@@ -32,7 +33,7 @@ import java.util.TreeSet;
  * The scan* tests check that for every records in the BAM file the query returns the same records from the CRAM file.
  * Created by Vadim on 14/03/2015.
  */
-public class CRAMFileBAIIndexTest {
+public class CRAMFileBAIIndexTest extends HtsjdkTest {
     private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
     private File cramFile;
     private File indexFile;
diff --git a/src/test/java/htsjdk/samtools/CRAMFileCRAIIndexTest.java b/src/test/java/htsjdk/samtools/CRAMFileCRAIIndexTest.java
index 9084a0f..b919c46 100644
--- a/src/test/java/htsjdk/samtools/CRAMFileCRAIIndexTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMFileCRAIIndexTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.build.ContainerParser;
 import htsjdk.samtools.cram.build.CramContainerIterator;
 import htsjdk.samtools.cram.ref.ReferenceSource;
@@ -29,7 +30,8 @@ import java.util.TreeSet;
  * file as the source of the test data. The scan* tests check that for every records in the
  * CRAM file the query returns the same records from the CRAM file.
  */
-public class CRAMFileCRAIIndexTest {
+ at Test(singleThreaded = true)
+public class CRAMFileCRAIIndexTest extends HtsjdkTest {
     private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
 
     private final int nofReads = 10000 ;
diff --git a/src/test/java/htsjdk/samtools/CRAMFileReaderTest.java b/src/test/java/htsjdk/samtools/CRAMFileReaderTest.java
index 3fcb3bd..da53f17 100644
--- a/src/test/java/htsjdk/samtools/CRAMFileReaderTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMFileReaderTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
 import htsjdk.samtools.seekablestream.SeekableFileStream;
@@ -40,7 +41,7 @@ import java.util.Arrays;
 /**
  * Additional tests for CRAMFileReader are in CRAMFileIndexTest
  */
-public class CRAMFileReaderTest {
+public class CRAMFileReaderTest extends HtsjdkTest {
 
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
     private static final File CRAM_WITH_CRAI = new File(TEST_DATA_DIR, "cram_with_crai_index.cram");
@@ -82,7 +83,7 @@ public class CRAMFileReaderTest {
     @Test(description = "Test CRAMReader 2 input required", expectedExceptions = IllegalArgumentException.class)
     public void testCRAMReader2_InputRequired() {
         File file = null;
-        InputStream bis =  null;
+        InputStream bis = null;
         new CRAMFileReader(file, bis, createReferenceSource());
     }
 
diff --git a/src/test/java/htsjdk/samtools/CRAMFileWriterTest.java b/src/test/java/htsjdk/samtools/CRAMFileWriterTest.java
index c495528..bd3a5ab 100644
--- a/src/test/java/htsjdk/samtools/CRAMFileWriterTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMFileWriterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
 import htsjdk.samtools.util.Log;
@@ -40,7 +41,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
-public class CRAMFileWriterTest {
+public class CRAMFileWriterTest extends HtsjdkTest {
 
     @BeforeClass
     public void initClass() {
@@ -248,6 +249,7 @@ public class CRAMFileWriterTest {
             SAMRecord record1 = iterator.next();
             SAMRecord record2 = records.get(i++);
             Assert.assertEquals(record1.getInferredInsertSize(), record2.getInferredInsertSize(), record1.getReadName());
+            Assert.assertEquals(record1, record2, record1.getReadName());
         }
         Assert.assertEquals(records.size(), i);
     }
diff --git a/src/test/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java b/src/test/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java
index b7e3eab..b7facb6 100644
--- a/src/test/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMFileWriterWithIndexTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.CRAIIndex;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
@@ -22,7 +23,7 @@ import java.util.Random;
 /**
  * Created by vadim on 23/03/2015.
  */
-public class CRAMFileWriterWithIndexTest {
+public class CRAMFileWriterWithIndexTest extends HtsjdkTest {
     private byte[] cramBytes;
     private byte[] indexBytes;
     private InMemoryReferenceSequenceFile rsf;
diff --git a/src/test/java/htsjdk/samtools/CRAMIndexQueryTest.java b/src/test/java/htsjdk/samtools/CRAMIndexQueryTest.java
index df94310..9e32d6f 100644
--- a/src/test/java/htsjdk/samtools/CRAMIndexQueryTest.java
+++ b/src/test/java/htsjdk/samtools/CRAMIndexQueryTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.seekablestream.SeekableFileStream;
 import htsjdk.samtools.util.CloseableIterator;
 import org.testng.Assert;
@@ -42,7 +43,7 @@ import java.util.function.Function;
  * whatever index format (.bai or .crai converted to .bai) is available for the
  * target file.
  */
-public class CRAMIndexQueryTest {
+public class CRAMIndexQueryTest extends HtsjdkTest {
 
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/cram");
 
@@ -319,13 +320,13 @@ public class CRAMIndexQueryTest {
     public Object[][] multipleIntervalOverlapping() {
         return new Object[][]{
             {cramQueryWithCRAI, cramQueryReference,
-                    new QueryInterval[]{new QueryInterval(0, 100010, 100010), new QueryInterval(0, 100011, 100011)},
+                    new QueryInterval[]{new QueryInterval(0, 100009, 100009), new QueryInterval(0, 100011, 100011)},
                     new String[]{"a", "b", "c", "d", "e"}},
             {cramQueryWithLocalCRAI, cramQueryReference,
-                    new QueryInterval[]{new QueryInterval(0, 100010, 100010), new QueryInterval(0, 100011, 100011)},
+                    new QueryInterval[]{new QueryInterval(0, 100009, 100009), new QueryInterval(0, 100011, 100011)},
                     new String[]{"a", "b", "c", "d", "e"}},
             {cramQueryWithBAI, cramQueryReference,
-                    new QueryInterval[]{new QueryInterval(0, 100010, 100010), new QueryInterval(0, 100011, 100011)},
+                    new QueryInterval[]{new QueryInterval(0, 100009, 100009), new QueryInterval(0, 100011, 100011)},
                     new String[]{"a", "b", "c", "d", "e"}},
             // no matching reads
             {cramQueryReadsWithBAI, cramQueryReadsReference,
diff --git a/src/test/java/htsjdk/samtools/CRAMSliceMD5Test.java b/src/test/java/htsjdk/samtools/CRAMSliceMD5Test.java
new file mode 100644
index 0000000..40568c4
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CRAMSliceMD5Test.java
@@ -0,0 +1,136 @@
+package htsjdk.samtools;
+
+import htsjdk.samtools.cram.CRAMException;
+import htsjdk.samtools.cram.build.CramIO;
+import htsjdk.samtools.cram.ref.CRAMReferenceSource;
+import htsjdk.samtools.cram.ref.ReferenceSource;
+import htsjdk.samtools.cram.structure.Container;
+import htsjdk.samtools.cram.structure.ContainerIO;
+import htsjdk.samtools.cram.structure.CramHeader;
+import htsjdk.samtools.cram.structure.Slice;
+import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
+import htsjdk.samtools.util.SequenceUtil;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+/**
+ * Created by vadim on 03/07/2017.
+ */
+public class CRAMSliceMD5Test {
+
+    @Test
+    public void testSliceMD5() throws IOException {
+        final CramTestCase test = new CramTestCase();
+
+        // read the CRAM:
+        final ByteArrayInputStream bais = new ByteArrayInputStream(test.cramData);
+        final CramHeader cramHeader = CramIO.readCramHeader(bais);
+        final Container container = ContainerIO.readContainer(cramHeader.getVersion(), bais);
+        final Slice slice = container.slices[0];
+        Assert.assertEquals(slice.alignmentStart, 1);
+        Assert.assertEquals(slice.alignmentSpan, test.referenceBases.length);
+        // check the slice MD5 is the MD5 of upper-cased ref bases:
+        final byte[] ucRefMD5 = SequenceUtil.calculateMD5(test.refBasesFromUCSource, 0, test.refBasesFromUCSource.length);
+        Assert.assertEquals(slice.refMD5, ucRefMD5);
+
+        // check the CRAM file reads:
+        final CRAMFileReader reader = new CRAMFileReader(new ByteArrayInputStream(test.cramData), (File) null, test.referenceSourceUpperCased, ValidationStringency.STRICT);
+        final SAMRecordIterator iterator = reader.getIterator();
+        Assert.assertTrue(iterator.hasNext());
+        Assert.assertEquals(iterator.next(), test.record);
+    }
+
+    @Test(expectedExceptions = CRAMException.class)
+    public void testExceptionWhileReadingWithWrongReference() throws IOException {
+        final CramTestCase test = new CramTestCase();
+
+        // try reading the CRAM file with the incorrect ref source that does not upper case bases:
+        final CRAMFileReader reader = new CRAMFileReader(new ByteArrayInputStream(test.cramData), (File) null, test.referenceSourceMixedCase, ValidationStringency.STRICT);
+        final SAMRecordIterator iterator = reader.getIterator();
+        // expect an exception here due to slice MD5 mismatch:
+        iterator.hasNext();
+    }
+
+
+    /**
+     * A test case to demonstrate the effect of upper casing of reference bases.
+     * The class contains some assertions in the constructor to stress out reference bases case expectations.
+     */
+    private static class CramTestCase {
+        private final byte[] referenceBases;
+        private final byte[] referenceBasesUpperCased;
+        private final SAMFileHeader samFileHeader;
+        /**
+         * An invalid reference source that does not change bases:
+         */
+        private final CRAMReferenceSource referenceSourceMixedCase;
+        private final InMemoryReferenceSequenceFile memoryReferenceSequenceFile;
+        /**
+         * A valid reference source that uppercases reference bases:
+         */
+        private final ReferenceSource referenceSourceUpperCased;
+        private final byte[] refBasesFromUCSource;
+        private final byte[] refBasesFromMixedCaseSource;
+        private final SAMRecord record;
+        private final byte[] cramData;
+
+        private CramTestCase() {
+            referenceBases = SequenceUtil.getIUPACCodesString().getBytes();
+            referenceBasesUpperCased = SequenceUtil.upperCase(Arrays.copyOf(referenceBases, referenceBases.length));
+
+            samFileHeader = new SAMFileHeader();
+            samFileHeader.addSequence(new SAMSequenceRecord("1", referenceBases.length));
+            samFileHeader.addReadGroup(new SAMReadGroupRecord("rg1"));
+
+            // this source does not change ref bases:
+            referenceSourceMixedCase = (sequenceRecord, tryNameVariants) -> referenceBases;
+
+            memoryReferenceSequenceFile = new InMemoryReferenceSequenceFile();
+            // copy ref bases to avoid the original from upper casing:
+            memoryReferenceSequenceFile.add("1", Arrays.copyOf(referenceBases, referenceBases.length));
+            // this is the correct reference source, it upper cases ref bases:
+            referenceSourceUpperCased = new ReferenceSource(memoryReferenceSequenceFile);
+
+            refBasesFromUCSource = referenceSourceUpperCased.getReferenceBases(samFileHeader.getSequence(0), true);
+            // check the ref bases from the source are upper cased indeed:
+            Assert.assertEquals(refBasesFromUCSource, referenceBasesUpperCased);
+            // check there is no lower case A:
+            Assert.assertTrue(!new String(refBasesFromUCSource).contains("a"));
+
+            refBasesFromMixedCaseSource = referenceSourceMixedCase.getReferenceBases(samFileHeader.getSequence(0), true);
+            // check the mixed case source does not change ref base casing:
+            Assert.assertEquals(refBasesFromMixedCaseSource, referenceBases);
+            // check the mixed case source contains lower case bases:
+            Assert.assertTrue(new String(refBasesFromMixedCaseSource).contains("a"));
+
+            final int readLen = referenceBases.length;
+            final byte[] bases = new byte[readLen];
+            Arrays.fill(bases, (byte) 'A');
+            final byte[] scores = new byte[readLen];
+            Arrays.fill(scores, (byte) '!');
+
+            record = new SAMRecord(samFileHeader);
+            record.setReadName("test");
+            record.setReferenceIndex(0);
+            record.setAlignmentStart(1);
+            record.setCigarString(readLen + "M");
+            record.setReadBases(bases);
+            record.setBaseQualities(scores);
+
+            // write a valid CRAM with a valid reference source:
+            final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+            try (final CRAMFileWriter writer = new CRAMFileWriter(baos, referenceSourceUpperCased, samFileHeader, "test")) {
+                writer.addAlignment(record);
+            }
+            cramData = baos.toByteArray();
+        }
+    }
+
+
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/ChunkTest.java b/src/test/java/htsjdk/samtools/ChunkTest.java
index d2bc157..b3a9e0a 100644
--- a/src/test/java/htsjdk/samtools/ChunkTest.java
+++ b/src/test/java/htsjdk/samtools/ChunkTest.java
@@ -23,10 +23,11 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-public class ChunkTest {
+public class ChunkTest extends HtsjdkTest {
     @Test
     public void testOverlaps() {
         // Test completely disjoint offsets.
diff --git a/src/test/java/htsjdk/samtools/CigarCodecTest.java b/src/test/java/htsjdk/samtools/CigarCodecTest.java
index 8275a94..7ccde7d 100644
--- a/src/test/java/htsjdk/samtools/CigarCodecTest.java
+++ b/src/test/java/htsjdk/samtools/CigarCodecTest.java
@@ -23,12 +23,13 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.util.Arrays;
 
-public class CigarCodecTest {
+public class CigarCodecTest extends HtsjdkTest {
 
 
     @Test
diff --git a/src/test/java/htsjdk/samtools/CigarOperatorUnitTest.java b/src/test/java/htsjdk/samtools/CigarOperatorUnitTest.java
new file mode 100644
index 0000000..21c36d6
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/CigarOperatorUnitTest.java
@@ -0,0 +1,137 @@
+/*
+ * The MIT License (MIT)
+ *
+ * Copyright (c) 2017 Daniel Gomez-Sanchez
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+/**
+ * @author Daniel Gomez-Sanchez (magicDGS)
+ */
+public class CigarOperatorUnitTest extends HtsjdkTest {
+
+    @DataProvider
+    public Object[][] chars() {
+        return new Object[][] {
+                {'M', CigarOperator.M},
+                {'I', CigarOperator.I},
+                {'D', CigarOperator.D},
+                {'N', CigarOperator.N},
+                {'S', CigarOperator.S},
+                {'H', CigarOperator.H},
+                {'P', CigarOperator.P},
+                {'=', CigarOperator.EQ},
+                {'X', CigarOperator.X}
+        };
+    }
+
+    @Test(dataProvider = "chars")
+    public void testCharacterToEnum(final char c, final CigarOperator op) throws Exception {
+        Assert.assertEquals(CigarOperator.characterToEnum(c), op);
+    }
+
+    @Test(dataProvider = "chars")
+    public void testEnumToCharacter(final char c, final CigarOperator op) throws Exception {
+        Assert.assertEquals(CigarOperator.enumToCharacter(op), c);
+    }
+
+    @DataProvider
+    public Object[][] illegalChars() {
+        return new Object[][] {
+                {'A'}, {'E'}, {'O'}, {'U'}
+        };
+    }
+
+    @Test(dataProvider = "illegalChars", expectedExceptions = IllegalArgumentException.class)
+    public void testIllegalCharacterToEnum(final char c) throws Exception {
+        CigarOperator.characterToEnum(c);
+    }
+
+    @DataProvider
+    public Object[][] binary() {
+        return new Object[][] {
+                {0, CigarOperator.M},
+                {1, CigarOperator.I},
+                {2, CigarOperator.D},
+                {3, CigarOperator.N},
+                {4, CigarOperator.S},
+                {5, CigarOperator.H},
+                {6, CigarOperator.P},
+                {7, CigarOperator.EQ},
+                {8, CigarOperator.X}
+        };
+    }
+
+    @Test(dataProvider = "binary")
+    public void testBinaryToEnum(final int bin, final CigarOperator op) throws Exception {
+        Assert.assertEquals(CigarOperator.binaryToEnum(bin), op);
+    }
+
+    @Test(dataProvider = "binary")
+    public void testEnumToBinary(final int bin, final CigarOperator op) throws Exception {
+        Assert.assertEquals(CigarOperator.enumToBinary(op), bin);
+    }
+
+    @DataProvider
+    public Object[][] illegalBinary() {
+        return new Object[][] {
+                {-1}, {9}, {10}
+        };
+    }
+
+    @Test(dataProvider = "illegalBinary", expectedExceptions = IllegalArgumentException.class)
+    public void testIllegalBinaryToEnum(final int bin) throws Exception {
+        CigarOperator.binaryToEnum(bin);
+    }
+
+    @DataProvider
+    public Object[][] opStatus() {
+        return new Object[][] {
+                // op, isClipping, isIndel, isSkip, isAlignment, isPadding
+                {CigarOperator.M, false, false, false, true, false},
+                {CigarOperator.I, false, true, false, false, false},
+                {CigarOperator.D, false, true, false, false, false},
+                {CigarOperator.N, false, false, true, false, false},
+                {CigarOperator.S, true, false, false, false, false},
+                {CigarOperator.H, true, false, false, false, false},
+                {CigarOperator.P, false, false, false, false, true},
+                {CigarOperator.EQ, false, false, false, true, false},
+                {CigarOperator.X, false, false, false, true, false}
+        };
+    }
+
+    @Test(dataProvider = "opStatus")
+    public void testIsSetOfOperations(final CigarOperator op, final boolean isClipping,
+            final boolean isIndel,final boolean isSkip, final boolean isAlignment,
+            final boolean isPadding) throws Exception {
+        Assert.assertEquals(op.isClipping(), isClipping);
+        Assert.assertEquals(op.isIndel(), isIndel);
+        Assert.assertEquals(op.isIndelOrSkippedRegion(), isIndel || isSkip);
+        Assert.assertEquals(op.isAlignment(), isAlignment);
+        Assert.assertEquals(op.isPadding(), isPadding);
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/CigarTest.java b/src/test/java/htsjdk/samtools/CigarTest.java
index acdc224..c104073 100644
--- a/src/test/java/htsjdk/samtools/CigarTest.java
+++ b/src/test/java/htsjdk/samtools/CigarTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -33,7 +34,7 @@ import java.util.List;
 /**
  * @author alecw at broadinstitute.org
  */
-public class CigarTest {
+public class CigarTest extends HtsjdkTest {
 
     @DataProvider(name = "positiveTestsData")
     public Object[][] testPositive() {
@@ -62,6 +63,9 @@ public class CigarTest {
     public Object[][] negativeTestsData() {
 
         return new Object[][]{
+                // CIGAR element with zero length
+                {"0M", SAMValidationError.Type.INVALID_CIGAR},
+
                 // Cannot have two consecutive insertions (of the same type)
                 {"1M1D1D1M", SAMValidationError.Type.ADJACENT_INDEL_IN_CIGAR},
                 {"1M1I1I1M", SAMValidationError.Type.ADJACENT_INDEL_IN_CIGAR},
@@ -79,11 +83,15 @@ public class CigarTest {
                 {"1H1S", SAMValidationError.Type.INVALID_CIGAR},
                 {"1S1H", SAMValidationError.Type.INVALID_CIGAR},
                 {"1H1H", SAMValidationError.Type.INVALID_CIGAR},
+
+                // Hard clipping operator not at start or end of CIGAR
+                {"1M1H1M", SAMValidationError.Type.INVALID_CIGAR},
+
+                // Padding operator not valid at end of CIGAR
+                {"1M1P", SAMValidationError.Type.INVALID_CIGAR},
+                // Padding operator not between real operators in CIGAR
+                {"1S1P1M", SAMValidationError.Type.INVALID_CIGAR}
         };
-/*
-        // Zero length for an element not allowed. TODO: not sure why this is commented out
-       {"100M0D10M1D10M", SAMValidationError.Type.INVALID_CIGAR}
-*/
     }
 
     @Test(dataProvider = "negativeTestsData")
@@ -115,4 +123,115 @@ public class CigarTest {
         Assert.assertFalse(cigar.isRightClipped());
         Assert.assertTrue(cigar.isClipped());
     }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testMakeCigarFromNullOperator() {
+        Cigar.fromCigarOperators(null);
+    }
+
+    @DataProvider
+    public Object[][] referenceLengthData() {
+        return new Object[][] {
+                // consuming reference
+                {"1M", 1, 1},
+                {"1=", 1, 1},
+                {"1X", 1, 1},
+                {"1N", 1, 1},
+                {"1D", 1, 1},
+
+                // non-consuming reference
+                {"1S", 0, 0},
+                {"1H", 0, 0},
+
+                // special case: padding
+                {"1P", 0, 1}
+        };
+    }
+
+    @Test(dataProvider = "referenceLengthData")
+    public void testGetReferenceLength(final String textCigar,
+            final int referenceLength, final int paddedReferenceLenght) throws Exception{
+        final Cigar cigar = TextCigarCodec.decode(textCigar);
+        Assert.assertEquals(cigar.getReferenceLength(), referenceLength);
+        Assert.assertEquals(cigar.getPaddedReferenceLength(), paddedReferenceLenght);
+    }
+
+    @DataProvider
+    public Object[][] readLengthData() {
+        return new Object[][] {
+                // consuming read bases
+                {"1M", 1},
+                {"2I", 2},
+                {"3S", 3},
+                {"4X", 4},
+                {"5=", 5},
+
+                // non-consuming reference
+                {"1D", 0},
+                {"2N", 0},
+                {"4H", 0},
+                {"4P", 0}
+        };
+    }
+
+    @Test(dataProvider = "readLengthData")
+    public void testGetReadLength(final String textCigar, final int readLength) throws Exception{
+        final Cigar cigar = TextCigarCodec.decode(textCigar);
+        Assert.assertEquals(cigar.getReadLength(), readLength);
+    }
+
+    @Test
+    public void testContainsOperator() {
+        final Cigar cigar = TextCigarCodec.decode("10M1S");
+        Assert.assertTrue(cigar.containsOperator(CigarOperator.M));
+        Assert.assertTrue(cigar.containsOperator(CigarOperator.S));
+        Assert.assertFalse(cigar.containsOperator(CigarOperator.X));
+    }
+
+    @DataProvider
+    public Object[][] firstLastData() {
+        final CigarElement M_ELEMENT = new CigarElement(1, CigarOperator.M);
+        final CigarElement S_ELEMENT = new CigarElement(1, CigarOperator.S);
+        return new Object[][] {
+                {"*", null, null},
+                {"1M", M_ELEMENT, M_ELEMENT},
+                {"1M1S", M_ELEMENT, S_ELEMENT},
+                {"1S1M", S_ELEMENT, M_ELEMENT},
+                {"1S1M1S", S_ELEMENT, S_ELEMENT},
+                {"1M1D1M1D1M", M_ELEMENT, M_ELEMENT}
+        };
+    }
+
+    @Test(dataProvider = "firstLastData")
+    public void testGetFirstOrLastCigarElement(final String textCigar, final CigarElement first, final CigarElement last) {
+        final Cigar cigar = TextCigarCodec.decode(textCigar);
+        Assert.assertEquals(cigar.getFirstCigarElement(), first);
+        Assert.assertEquals(cigar.getLastCigarElement(), last);
+    }
+
+    @DataProvider
+    public Object[][] clippedData() {
+        return new Object[][] {
+                // no clipped
+                {"10M", false},
+                // wrong place for soft-clip and hard-clip returns false
+                {"1M1S1M", false},
+                {"1M1H1M", false},
+
+                // clipped
+                {"1S1M", true},
+                {"1M1S", true},
+                {"1S1M1S", true},
+                {"1H1M", true},
+                {"1M1H", true},
+                {"1H1M1H", true}
+        };
+    }
+
+    @Test(dataProvider = "clippedData")
+    public void testIsClipped(final String textCigar, final boolean isClipped) {
+        // this test is indirectly testing both left and right clipping methods
+        Assert.assertEquals(TextCigarCodec.decode(textCigar).isClipped(), isClipped);
+    }
+
 }
diff --git a/src/test/java/htsjdk/samtools/DownsamplingIteratorTests.java b/src/test/java/htsjdk/samtools/DownsamplingIteratorTests.java
index e84ee2e..96dff46 100644
--- a/src/test/java/htsjdk/samtools/DownsamplingIteratorTests.java
+++ b/src/test/java/htsjdk/samtools/DownsamplingIteratorTests.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.DownsamplingIteratorFactory.Strategy;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -14,7 +15,7 @@ import java.util.Random;
  * Tests for the downsampling iterator class.
  * @author Tim Fennell
  */
-public class DownsamplingIteratorTests {
+public class DownsamplingIteratorTests extends HtsjdkTest {
     final int NUM_TEMPLATES = 50000;
     final EnumMap<Strategy, Double> ACCURACY = new EnumMap<Strategy,Double>(Strategy.class){{
         put(Strategy.HighAccuracy, 0.001);
diff --git a/src/test/java/htsjdk/samtools/DuplicateScoringStrategyTest.java b/src/test/java/htsjdk/samtools/DuplicateScoringStrategyTest.java
new file mode 100644
index 0000000..d86b697
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/DuplicateScoringStrategyTest.java
@@ -0,0 +1,26 @@
+package htsjdk.samtools;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+public class DuplicateScoringStrategyTest extends HtsjdkTest {
+
+    @DataProvider
+    public Object [][] compareData() {
+        return new Object[][]{
+                {SAMFlag.READ_PAIRED.flag, 0, true, DuplicateScoringStrategy.ScoringStrategy.RANDOM, -1},
+                {0, SAMFlag.READ_PAIRED.flag, true, DuplicateScoringStrategy.ScoringStrategy.RANDOM, 1},
+        };
+    }
+
+    @Test(dataProvider = "compareData")
+    public static void testCompare(final int samFlag1, final int samFlag2, final boolean assumeMateCigar, final DuplicateScoringStrategy.ScoringStrategy strategy, final int expected) {
+        final SAMRecord rec1 = new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "36M", null, 2);
+        rec1.setFlags(samFlag1);
+        final SAMRecord rec2 = new SAMRecordSetBuilder().addFrag("test", 0, 1, true, false, "36M", null, 3);
+        rec2.setFlags(samFlag2);
+        Assert.assertEquals(DuplicateScoringStrategy.compare(rec1, rec2, strategy, assumeMateCigar), expected);
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/DuplicateSetIteratorTest.java b/src/test/java/htsjdk/samtools/DuplicateSetIteratorTest.java
index 5952953..27e1678 100644
--- a/src/test/java/htsjdk/samtools/DuplicateSetIteratorTest.java
+++ b/src/test/java/htsjdk/samtools/DuplicateSetIteratorTest.java
@@ -1,12 +1,13 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.util.HashMap;
 import java.util.Map;
 
-public class DuplicateSetIteratorTest {
+public class DuplicateSetIteratorTest extends HtsjdkTest {
     protected final static int DEFAULT_BASE_QUALITY = 10;
 
     private SAMRecordSetBuilder getSAMRecordSetBuilder() {
diff --git a/src/test/java/htsjdk/samtools/GenomicIndexUtilTest.java b/src/test/java/htsjdk/samtools/GenomicIndexUtilTest.java
index 8f5569c..0bf322d 100644
--- a/src/test/java/htsjdk/samtools/GenomicIndexUtilTest.java
+++ b/src/test/java/htsjdk/samtools/GenomicIndexUtilTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -7,7 +8,7 @@ import org.testng.annotations.Test;
 /**
  * Tests for GenomicIndexUtil.
  */
-public class GenomicIndexUtilTest {
+public class GenomicIndexUtilTest extends HtsjdkTest {
 
     @Test(dataProvider = "testRegionToBinDataProvider")
     public void testRegionToBin(final int beg, final int end, final int bin) {
@@ -47,4 +48,4 @@ public class GenomicIndexUtilTest {
                 {1<<26, 1<<26+1, 2}
         };
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/MergingSamRecordIteratorGroupCollisionTest.java b/src/test/java/htsjdk/samtools/MergingSamRecordIteratorGroupCollisionTest.java
index 18c10c7..d350b8f 100644
--- a/src/test/java/htsjdk/samtools/MergingSamRecordIteratorGroupCollisionTest.java
+++ b/src/test/java/htsjdk/samtools/MergingSamRecordIteratorGroupCollisionTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloserUtil;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
@@ -37,7 +38,7 @@ import java.util.List;
  *
  * @author Dave Tefft, Andre Mesarovic
  */
-public class MergingSamRecordIteratorGroupCollisionTest {
+public class MergingSamRecordIteratorGroupCollisionTest extends HtsjdkTest {
 
     private GroupAdapter padapter = new ProgramGroupAdapter();
     private GroupAdapter radapter = new ReadGroupAdapter();
@@ -424,18 +425,22 @@ public class MergingSamRecordIteratorGroupCollisionTest {
     }
 
     class ProgramGroupAdapter extends GroupAdapter {
+        @Override
         String getGroupId(AbstractSAMHeaderRecord group) {
             return ((SAMProgramRecord) group).getProgramGroupId();
         }
 
+        @Override
         List<? extends AbstractSAMHeaderRecord> getGroups(SAMFileHeader header) {
             return header.getProgramRecords();
         }
 
+        @Override
         String getTagName() {
             return SAMTag.PG.toString();
         }
 
+        @Override
         List<? extends AbstractSAMHeaderRecord> createGroups(final String[] groupIds) {
             final List<SamReader> readers = new ArrayList<SamReader>();
             for (final String groupId : groupIds) {
@@ -457,36 +462,44 @@ public class MergingSamRecordIteratorGroupCollisionTest {
             return fileHeaderMerger.getMergedHeader().getProgramRecords();
         }
 
+        @Override
         void setAttribute(AbstractSAMHeaderRecord group, String value) {
             ((SAMProgramRecord) group).setCommandLine(value);
         }
 
+        @Override
         AbstractSAMHeaderRecord newGroup(String id) {
             return new SAMProgramRecord(id);
         }
 
+        @Override
         void setBuilderGroup(SAMRecordSetBuilder builder, AbstractSAMHeaderRecord group) {
             builder.setProgramRecord((SAMProgramRecord) group);
         }
 
+        @Override
         boolean equivalent(AbstractSAMHeaderRecord group1, AbstractSAMHeaderRecord group2) {
             return ((SAMProgramRecord) group1).equivalent((SAMProgramRecord) group2);
         }
     }
 
     class ReadGroupAdapter extends GroupAdapter {
+        @Override
         String getGroupId(AbstractSAMHeaderRecord group) {
             return ((SAMReadGroupRecord) group).getReadGroupId();
         }
 
+        @Override
         List<? extends AbstractSAMHeaderRecord> getGroups(SAMFileHeader header) {
             return header.getReadGroups();
         }
 
+        @Override
         String getTagName() {
             return SAMTag.RG.toString();
         }
 
+        @Override
         List<? extends AbstractSAMHeaderRecord> createGroups(final String[] groupIds) {
             final List<SamReader> readers = new ArrayList<SamReader>();
 
@@ -507,20 +520,24 @@ public class MergingSamRecordIteratorGroupCollisionTest {
             return fileHeaderMerger.getMergedHeader().getReadGroups();
         }
 
+        @Override
         void setAttribute(AbstractSAMHeaderRecord group, String value) {
             ((SAMReadGroupRecord) group).setPlatformUnit(value);
         }
 
+        @Override
         AbstractSAMHeaderRecord newGroup(String id) {
             SAMReadGroupRecord group = new SAMReadGroupRecord(id);
             group.setAttribute(SAMTag.SM.name(), id);
             return group;
         }
 
+        @Override
         void setBuilderGroup(SAMRecordSetBuilder builder, AbstractSAMHeaderRecord group) {
             builder.setReadGroup((SAMReadGroupRecord) group);
         }
 
+        @Override
         boolean equivalent(AbstractSAMHeaderRecord group1, AbstractSAMHeaderRecord group2) {
             return ((SAMReadGroupRecord) group1).equivalent((SAMReadGroupRecord) group2);
         }
diff --git a/src/test/java/htsjdk/samtools/MergingSamRecordIteratorTest.java b/src/test/java/htsjdk/samtools/MergingSamRecordIteratorTest.java
index 885321b..a50c026 100644
--- a/src/test/java/htsjdk/samtools/MergingSamRecordIteratorTest.java
+++ b/src/test/java/htsjdk/samtools/MergingSamRecordIteratorTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.SequenceUtil;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -38,7 +39,7 @@ import java.util.Set;
  *
  * @author Dave Tefft
  */
-public class MergingSamRecordIteratorTest {
+public class MergingSamRecordIteratorTest extends HtsjdkTest {
 
     @Test
     public void testVanillaCoordinateMultiIterator() throws Exception {
diff --git a/src/test/java/htsjdk/samtools/PathInputResourceTest.java b/src/test/java/htsjdk/samtools/PathInputResourceTest.java
new file mode 100644
index 0000000..f82b9a6
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/PathInputResourceTest.java
@@ -0,0 +1,48 @@
+package htsjdk.samtools;
+
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.function.Function;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class PathInputResourceTest extends HtsjdkTest {
+  final String localBam = "src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam";
+
+  @Test
+  public void testWrappersAreAccessed() throws Exception {
+    Path path = Paths.get(localBam);
+    Path indexPath = Paths.get(localBam + ".bai");
+    HashMap<String, Boolean> fired = new HashMap<>();
+    Function<SeekableByteChannel, SeekableByteChannel> wrapData = (SeekableByteChannel in) -> {
+      fired.put("data", true);
+      return in;
+    };
+    Function<SeekableByteChannel, SeekableByteChannel> wrapIndex = (SeekableByteChannel in) -> {
+      fired.put("index", true);
+      return in;
+    };
+    SamInputResource in = SamInputResource.of(path, wrapData);
+    in.index(indexPath, wrapIndex);
+    InputResource indexResource = in.indexMaybe();
+    Assert.assertNotNull(indexResource);
+
+    Assert.assertFalse(fired.containsKey("data"));
+    Assert.assertFalse(fired.containsKey("index"));
+
+    indexResource.asUnbufferedSeekableStream();
+
+    Assert.assertFalse(fired.containsKey("data"));
+    Assert.assertTrue(fired.containsKey("index"));
+
+    in.data().asUnbufferedSeekableStream();
+
+    Assert.assertTrue(fired.containsKey("data"));
+    Assert.assertTrue(fired.containsKey("index"));
+  }
+
+}
diff --git a/src/test/java/htsjdk/samtools/ProgramRecordChainingTest.java b/src/test/java/htsjdk/samtools/ProgramRecordChainingTest.java
index cd470c4..4811148 100644
--- a/src/test/java/htsjdk/samtools/ProgramRecordChainingTest.java
+++ b/src/test/java/htsjdk/samtools/ProgramRecordChainingTest.java
@@ -23,13 +23,14 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 /**
  * Test for SequenceUtil.chainProgramRecord
  */
-public class ProgramRecordChainingTest {
+public class ProgramRecordChainingTest extends HtsjdkTest {
 
     @Test
     public void testChainProgramRecord() {
diff --git a/src/test/java/htsjdk/samtools/QueryIntervalTest.java b/src/test/java/htsjdk/samtools/QueryIntervalTest.java
new file mode 100644
index 0000000..d46fde1
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/QueryIntervalTest.java
@@ -0,0 +1,42 @@
+package htsjdk.samtools;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class QueryIntervalTest {
+
+    @Test
+    public void testOptimizeIntervals() throws Exception {
+        final QueryInterval[] overlappingIntervals = new QueryInterval[] {
+                new QueryInterval(0, 1520, 1521),
+                new QueryInterval(0, 1521, 1525)
+        };
+
+        final QueryInterval[] optimizedOverlapping = QueryInterval.optimizeIntervals(overlappingIntervals);
+
+        final QueryInterval[] abuttingIntervals = new QueryInterval[]{
+                new QueryInterval(0, 1520, 1521),
+                new QueryInterval(0, 1522, 1525)
+        };
+
+        final QueryInterval[] optimizedAbutting = QueryInterval.optimizeIntervals(abuttingIntervals);
+
+        final QueryInterval[] expected = new QueryInterval[]{
+                new QueryInterval(0, 1520, 1525),
+        };
+
+        Assert.assertEquals(optimizedOverlapping, expected);
+        Assert.assertEquals(optimizedAbutting, expected);
+
+
+        final QueryInterval[]
+                nonOptimizableSeparatedIntervals = new QueryInterval[]{
+                new QueryInterval(0, 1520, 1521),
+                new QueryInterval(0, 1523, 1525)
+        };
+
+        final QueryInterval[] optimizedSeparated = QueryInterval.optimizeIntervals(nonOptimizableSeparatedIntervals);
+
+        Assert.assertEquals(optimizedSeparated, nonOptimizableSeparatedIntervals);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/SAMBinaryTagAndValueUnitTest.java b/src/test/java/htsjdk/samtools/SAMBinaryTagAndValueUnitTest.java
index f5f7a5c..93a20dc 100644
--- a/src/test/java/htsjdk/samtools/SAMBinaryTagAndValueUnitTest.java
+++ b/src/test/java/htsjdk/samtools/SAMBinaryTagAndValueUnitTest.java
@@ -1,11 +1,12 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.BinaryCodec;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-public class SAMBinaryTagAndValueUnitTest {
+public class SAMBinaryTagAndValueUnitTest extends HtsjdkTest {
 
     @DataProvider(name="allowedAttributeTypes")
     public Object[][] allowedTypes() {
diff --git a/src/test/java/htsjdk/samtools/SAMCloneTest.java b/src/test/java/htsjdk/samtools/SAMCloneTest.java
index 8fdfb3b..e05d29d 100644
--- a/src/test/java/htsjdk/samtools/SAMCloneTest.java
+++ b/src/test/java/htsjdk/samtools/SAMCloneTest.java
@@ -23,13 +23,14 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 /**
  * @author alecw at broadinstitute.org
  */
-public class SAMCloneTest {
+public class SAMCloneTest extends HtsjdkTest {
     private SAMRecordSetBuilder getSAMReader(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder) {
         final SAMRecordSetBuilder ret = new SAMRecordSetBuilder(sortForMe, sortOrder);
         ret.addPair("readB", 20, 200, 300);
diff --git a/src/test/java/htsjdk/samtools/SAMFileHeaderTest.java b/src/test/java/htsjdk/samtools/SAMFileHeaderTest.java
new file mode 100644
index 0000000..0723ed9
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMFileHeaderTest.java
@@ -0,0 +1,64 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2017 Nils Homer
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ */
+package htsjdk.samtools;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class SAMFileHeaderTest extends HtsjdkTest {
+
+    @Test
+    public void testSortOrder() {
+        final SAMFileHeader header = new SAMFileHeader();
+
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        Assert.assertEquals(header.getSortOrder(), SAMFileHeader.SortOrder.coordinate);
+        Assert.assertEquals(header.getAttribute(SAMFileHeader.SORT_ORDER_TAG), SAMFileHeader.SortOrder.coordinate.name());
+
+        header.setAttribute(SAMFileHeader.SORT_ORDER_TAG, SAMFileHeader.SortOrder.queryname.name());
+        Assert.assertEquals(header.getSortOrder(), SAMFileHeader.SortOrder.queryname);
+        Assert.assertEquals(header.getAttribute(SAMFileHeader.SORT_ORDER_TAG), SAMFileHeader.SortOrder.queryname.name());
+
+        header.setAttribute(SAMFileHeader.SORT_ORDER_TAG, SAMFileHeader.SortOrder.coordinate);
+        Assert.assertEquals(header.getSortOrder(), SAMFileHeader.SortOrder.coordinate);
+        Assert.assertEquals(header.getAttribute(SAMFileHeader.SORT_ORDER_TAG), SAMFileHeader.SortOrder.coordinate.name());
+    }
+
+    @Test
+    public void testGroupOrder() {
+        final SAMFileHeader header = new SAMFileHeader();
+
+        header.setGroupOrder(SAMFileHeader.GroupOrder.query);
+        Assert.assertEquals(header.getGroupOrder(), SAMFileHeader.GroupOrder.query);
+        Assert.assertEquals(header.getAttribute(SAMFileHeader.GROUP_ORDER_TAG), SAMFileHeader.GroupOrder.query.name());
+
+        header.setAttribute(SAMFileHeader.GROUP_ORDER_TAG, SAMFileHeader.GroupOrder.reference.name());
+        Assert.assertEquals(header.getGroupOrder(), SAMFileHeader.GroupOrder.reference);
+        Assert.assertEquals(header.getAttribute(SAMFileHeader.GROUP_ORDER_TAG), SAMFileHeader.GroupOrder.reference.name());
+
+        header.setAttribute(SAMFileHeader.GROUP_ORDER_TAG, SAMFileHeader.GroupOrder.query);
+        Assert.assertEquals(header.getGroupOrder(), SAMFileHeader.GroupOrder.query);
+        Assert.assertEquals(header.getAttribute(SAMFileHeader.GROUP_ORDER_TAG), SAMFileHeader.GroupOrder.query.name());
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/SAMFileWriterFactoryTest.java b/src/test/java/htsjdk/samtools/SAMFileWriterFactoryTest.java
index dc7a6f3..c4eeb3a 100644
--- a/src/test/java/htsjdk/samtools/SAMFileWriterFactoryTest.java
+++ b/src/test/java/htsjdk/samtools/SAMFileWriterFactoryTest.java
@@ -23,16 +23,22 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
+import com.google.common.jimfs.Configuration;
+import com.google.common.jimfs.Jimfs;
 import htsjdk.samtools.cram.build.CramIO;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.util.IOUtil;
+import java.nio.file.Path;
+import java.nio.file.Files;
+import java.nio.file.FileSystem;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.*;
 
-public class SAMFileWriterFactoryTest {
+public class SAMFileWriterFactoryTest extends HtsjdkTest {
 
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
@@ -57,6 +63,19 @@ public class SAMFileWriterFactoryTest {
         Assert.assertTrue(md5File.length() > 0);
     }
 
+    @Test()
+    public void ordinaryPathWriterTest() throws Exception {
+        try (FileSystem jimfs = Jimfs.newFileSystem(Configuration.unix())) {
+            final Path outputPath = jimfs.getPath("ordinaryPathWriterTest" + BamFileIoUtils.BAM_FILE_EXTENSION);
+            createSmallBam(outputPath);
+            final Path indexPath = SamFiles.findIndex(outputPath);
+            final Path md5File = IOUtil.addExtension(outputPath,  ".md5");
+            Assert.assertTrue(Files.size(outputPath) > 0);
+            Assert.assertTrue(Files.size(indexPath) > 0);
+            Assert.assertTrue(Files.size(md5File) > 0);
+        }
+    }
+
     @Test(description="create a BAM in memory,  should start with GZipInputStream.GZIP_MAGIC")
     public void inMemoryBam()  throws Exception  {
     	ByteArrayOutputStream os=new ByteArrayOutputStream();
@@ -145,11 +164,23 @@ public class SAMFileWriterFactoryTest {
         // index only created if coordinate sorted
         header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
         header.addSequence(new SAMSequenceRecord("chr1", 123));
-        final SAMFileWriter writer = factory.makeBAMWriter(header, false, outputFile);
-        fillSmallBam(writer);
-        writer.close();
+        try (final SAMFileWriter writer = factory.makeBAMWriter(header, false, outputFile)) {
+            fillSmallBam(writer);
+        }
     }
 
+    private void createSmallBam(final Path outputPath) {
+        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        factory.setCreateIndex(true);
+        factory.setCreateMd5File(true);
+        final SAMFileHeader header = new SAMFileHeader();
+        // index only created if coordinate sorted
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        header.addSequence(new SAMSequenceRecord("chr1", 123));
+        try (final SAMFileWriter writer = factory.makeBAMWriter(header, false, outputPath)) {
+            fillSmallBam(writer);
+        }
+    }
 
    private void createSmallBamToOutputStream(final OutputStream outputStream,boolean binary) {
         final SAMFileWriterFactory factory = new SAMFileWriterFactory();
@@ -159,14 +190,36 @@ public class SAMFileWriterFactoryTest {
         // index only created if coordinate sorted
         header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
         header.addSequence(new SAMSequenceRecord("chr1", 123));
-        final SAMFileWriter writer = (binary?
+        try (final SAMFileWriter writer = (binary?
         			factory.makeBAMWriter(header, false, outputStream):
         			factory.makeSAMWriter(header, false, outputStream)
-        			);
-        fillSmallBam(writer);
-        writer.close();
+        			)) {
+            fillSmallBam(writer);
+        }
+    }
+
+    @Test(description="check that factory settings are propagated to writer")
+    public void testFactorySettings()  throws Exception {
+        final SAMFileWriterFactory factory = new SAMFileWriterFactory();
+        factory.setCreateIndex(false);
+        factory.setCreateMd5File(false);
+        final File wontBeUsed = new File("wontBeUsed.tmp");
+        final int maxRecsInRam = 271828;
+        factory.setMaxRecordsInRam(maxRecsInRam);
+        factory.setTempDirectory(wontBeUsed);
+        final SAMFileHeader header = new SAMFileHeader();
+        header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
+        header.addSequence(new SAMSequenceRecord("chr1", 123));
+        try (final SAMFileWriter writer = factory.makeBAMWriter(header, false, new ByteArrayOutputStream())) {
+            Assert.assertEquals(maxRecsInRam, ((SAMFileWriterImpl) writer).getMaxRecordsInRam());
+            Assert.assertEquals(wontBeUsed, ((SAMFileWriterImpl) writer).getTempDirectory());
+        }
+        try (final SAMFileWriter writer = factory.makeSAMWriter(header, false, new ByteArrayOutputStream())) {
+            Assert.assertEquals(maxRecsInRam, ((SAMFileWriterImpl) writer).getMaxRecordsInRam());
+            Assert.assertEquals(wontBeUsed, ((SAMFileWriterImpl) writer).getTempDirectory());
+        }
     }
-   
+
    private int fillSmallBam(SAMFileWriter writer) {
        final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
        builder.addUnmappedFragment("HiMom!");
@@ -220,6 +273,28 @@ public class SAMFileWriterFactoryTest {
         Assert.assertTrue(count == nRecs);
     }
 
+    private void verifyWriterOutput(Path output, ReferenceSource refSource, int nRecs, boolean verifySupplementalFiles) throws IOException {
+        if (verifySupplementalFiles) {
+            final Path index = SamFiles.findIndex(output);
+            final Path md5File = IOUtil.addExtension(output, ".md5");
+            Assert.assertTrue(Files.size(index) > 0);
+            Assert.assertTrue(Files.size(md5File) > 0);
+        }
+
+        SamReaderFactory factory =  SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT);
+        if (refSource != null) {
+            factory.referenceSource(refSource);
+        }
+        SamReader reader = factory.open(output);
+        SAMRecordIterator it = reader.iterator();
+        int count = 0;
+        for (; it.hasNext(); it.next()) {
+            count++;
+        }
+
+        Assert.assertTrue(count == nRecs);
+    }
+
     @DataProvider(name="bamOrCramWriter")
     public Object[][] bamOrCramWriter() {
         return new Object[][] {
@@ -235,13 +310,31 @@ public class SAMFileWriterFactoryTest {
         final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
         final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
 
-        final SAMFileWriter samWriter = factory.makeWriter(header, false, outputFile, referenceFile);
-        int nRecs = fillSmallBam(samWriter);
-        samWriter.close();
-
+        int nRecs;
+        try (final SAMFileWriter samWriter = factory.makeWriter(header, false, outputFile, referenceFile)) {
+            nRecs = fillSmallBam(samWriter);
+        }
         verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
     }
 
+    @Test(dataProvider="bamOrCramWriter")
+    public void testMakeWriterPath(String extension) throws Exception {
+        try (FileSystem jimfs = Jimfs.newFileSystem(Configuration.unix())) {
+            Path outputPath = jimfs.getPath("testMakeWriterPath" + extension);
+            Files.deleteIfExists(outputPath);
+            final SAMFileHeader header = new SAMFileHeader();
+            final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
+            final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
+
+            int nRecs;
+            try (final SAMFileWriter samWriter = factory.makeWriter(header, false, outputPath, referenceFile)) {
+                nRecs = fillSmallBam(samWriter);
+            }
+            verifyWriterOutput(outputPath, new ReferenceSource(referenceFile), nRecs, true);
+        }
+    }
+
+
     @Test
     public void testMakeCRAMWriterWithOptions() throws Exception {
         final File outputFile = prepareOutputFile(CramIO.CRAM_FILE_EXTENSION);
@@ -249,10 +342,10 @@ public class SAMFileWriterFactoryTest {
         final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
         final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
 
-        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, false, outputFile, referenceFile);
-        int nRecs = fillSmallBam(samWriter);
-        samWriter.close();
-
+        int nRecs;
+        try (final SAMFileWriter samWriter = factory.makeCRAMWriter(header, false, outputFile, referenceFile)) {
+            nRecs = fillSmallBam(samWriter);
+        }
         verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
     }
 
@@ -263,11 +356,11 @@ public class SAMFileWriterFactoryTest {
         final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
         final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
 
+        int nRecs;
         // Note: does not honor factory settings for CREATE_MD5 or CREATE_INDEX.
-        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, new FileOutputStream(outputFile), referenceFile);
-        int nRecs = fillSmallBam(samWriter);
-        samWriter.close();
-
+        try (final SAMFileWriter samWriter = factory.makeCRAMWriter(header, new FileOutputStream(outputFile), referenceFile)) {
+            nRecs = fillSmallBam(samWriter);
+        }
         verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, false);
     }
 
@@ -278,10 +371,11 @@ public class SAMFileWriterFactoryTest {
         final SAMFileWriterFactory factory = createWriterFactoryWithOptions(header);
         final File referenceFile = new File(TEST_DATA_DIR, "hg19mini.fasta");
 
+        int nRecs;
         // Defaults to preSorted==true
-        final SAMFileWriter samWriter = factory.makeCRAMWriter(header, outputFile, referenceFile);
-        int nRecs = fillSmallBam(samWriter);
-        samWriter.close();
+        try (final SAMFileWriter samWriter = factory.makeCRAMWriter(header, outputFile, referenceFile)) {
+            nRecs = fillSmallBam(samWriter);
+        }
 
         verifyWriterOutput(outputFile, new ReferenceSource(referenceFile), nRecs, true);
     }
diff --git a/src/test/java/htsjdk/samtools/SAMFlagTest.java b/src/test/java/htsjdk/samtools/SAMFlagTest.java
index 7b5a553..86dd8f0 100644
--- a/src/test/java/htsjdk/samtools/SAMFlagTest.java
+++ b/src/test/java/htsjdk/samtools/SAMFlagTest.java
@@ -24,10 +24,11 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-public class SAMFlagTest {
+public class SAMFlagTest extends HtsjdkTest {
     @Test
     public void testFlags() {
         Assert.assertTrue(SAMFlag.getFlags(83).contains(SAMFlag.READ_PAIRED));
diff --git a/src/test/java/htsjdk/samtools/SAMIntegerTagTest.java b/src/test/java/htsjdk/samtools/SAMIntegerTagTest.java
index 133062a..3fa38df 100644
--- a/src/test/java/htsjdk/samtools/SAMIntegerTagTest.java
+++ b/src/test/java/htsjdk/samtools/SAMIntegerTagTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.util.BinaryCodec;
 import htsjdk.samtools.util.CloserUtil;
@@ -45,7 +46,7 @@ import java.util.Map;
  *
  * @author alecw at broadinstitute.org
  */
-public class SAMIntegerTagTest {
+public class SAMIntegerTagTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/SAMIntegerTagTest");
 
     private static final String BYTE_TAG = "BY";
diff --git a/src/test/java/htsjdk/samtools/BinTest.java b/src/test/java/htsjdk/samtools/SAMProgramRecordTest.java
similarity index 69%
copy from src/test/java/htsjdk/samtools/BinTest.java
copy to src/test/java/htsjdk/samtools/SAMProgramRecordTest.java
index 271a411..99a26cc 100644
--- a/src/test/java/htsjdk/samtools/BinTest.java
+++ b/src/test/java/htsjdk/samtools/SAMProgramRecordTest.java
@@ -1,7 +1,7 @@
 /*
  * The MIT License
  *
- * Copyright (c) 2009 The Broad Institute
+ * Copyright (c) 2017 The Broad Institute
  *
  * Permission is hereby granted, free of charge, to any person obtaining a copy
  * of this software and associated documentation files (the "Software"), to deal
@@ -21,19 +21,23 @@
  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  * THE SOFTWARE.
  */
-
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-import java.util.Collections;
+/**
+ * Test for SAMReadGroupRecordTest
+ */
+public class SAMProgramRecordTest extends HtsjdkTest {
 
-public class BinTest {
     @Test
-    public void testEmptyBin() {
-        // Construct a new empty bin and ensure that the bin list is empty, not null.
-        Bin bin = new Bin(1,4681);
-        Assert.assertEquals(bin.getChunkList(),Collections.<Chunk>emptyList(),"getChunkList() in an empty bin should return an empty list.");
+    public void testGetSAMString() {
+        SAMProgramRecord r = new SAMProgramRecord("SW-eIV");
+        r.setProgramName("telnet");
+        r.setProgramVersion("0.17-40");
+        r.setCommandLine("telnet towel.blinkenlights.nl");
+        Assert.assertEquals("@PG\tID:SW-eIV\tPN:telnet\tVN:0.17-40\tCL:telnet towel.blinkenlights.nl", r.getSAMString());
     }
 }
diff --git a/src/test/java/htsjdk/samtools/SAMReadGroupRecordTest.java b/src/test/java/htsjdk/samtools/SAMReadGroupRecordTest.java
new file mode 100644
index 0000000..5dde03c
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMReadGroupRecordTest.java
@@ -0,0 +1,148 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2017 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.util.Iso8601Date;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Date;
+import java.util.function.BiConsumer;
+import java.util.function.Function;
+
+/**
+ * Test for SAMReadGroupRecordTest
+ */
+public class SAMReadGroupRecordTest extends HtsjdkTest {
+
+    @Test
+    public void testGetSAMString() {
+        SAMReadGroupRecord r = new SAMReadGroupRecord("rg1");
+        r.setSample("mysample");
+        r.setPlatform("ILLUMINA");
+        r.setDescription("my description");
+        Assert.assertEquals("@RG\tID:rg1\tSM:mysample\tPL:ILLUMINA\tDS:my description", r.getSAMString());
+    }
+
+    @Test
+    public void testReadGroupIdGetters() throws Exception {
+        final SAMReadGroupRecord rg = new SAMReadGroupRecord("rg1");
+        Assert.assertEquals(rg.getId(), "rg1");
+        Assert.assertEquals(rg.getReadGroupId(), "rg1");
+    }
+
+    @DataProvider
+    public Object[][] gettersAndSetters() {
+        final SAMReadGroupRecord rg = new SAMReadGroupRecord("rg");
+        return new Object[][] {
+                {rg, "sample",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setSample,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getSample},
+                {rg, "library",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setLibrary,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getLibrary},
+                {rg, "library",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setPlatformUnit,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getPlatformUnit},
+                {rg, "platform",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setPlatform,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getPlatform},
+                {rg, new Iso8601Date(new Date()),
+                        (BiConsumer<SAMReadGroupRecord, Date>) SAMReadGroupRecord::setRunDate,
+                        (Function<SAMReadGroupRecord, Date>) SAMReadGroupRecord::getRunDate},
+                {rg, "flow_order",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setFlowOrder,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getFlowOrder},
+                {rg, "key_sequence",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setKeySequence,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getKeySequence},
+                {rg, "sequencing_center",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setSequencingCenter,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getSequencingCenter},
+                {rg, "description",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setDescription,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getDescription},
+                {rg, 10,
+                        (BiConsumer<SAMReadGroupRecord, Integer>) SAMReadGroupRecord::setPredictedMedianInsertSize,
+                        (Function<SAMReadGroupRecord, Integer>) SAMReadGroupRecord::getPredictedMedianInsertSize},
+                {rg, "program_group",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setProgramGroup,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getProgramGroup},
+                {rg, "platform_model",
+                        (BiConsumer<SAMReadGroupRecord, String>) SAMReadGroupRecord::setPlatformModel,
+                        (Function<SAMReadGroupRecord, String>) SAMReadGroupRecord::getPlatformModel}
+        };
+    }
+
+    @Test(dataProvider = "gettersAndSetters")
+    public <T> void testGetterAndSetter(final SAMReadGroupRecord record, final T value,
+            final BiConsumer<SAMReadGroupRecord, T> setter,
+            final Function<SAMReadGroupRecord, T> getter) {
+        Assert.assertNull(getter.apply(record));
+        setter.accept(record, value);
+        Assert.assertEquals(getter.apply(record), value);
+        setter.accept(record, null);
+        Assert.assertNull(getter.apply(record));
+    }
+
+    @Test
+    public void testSetNonIso8601Date() throws Exception {
+        final SAMReadGroupRecord rg = new SAMReadGroupRecord("rg1");
+        // set not ISO 8601 date
+        final Date date = new Date();
+        rg.setRunDate(date);
+        // and assert that it is correctly wrapped
+        Assert.assertEquals(rg.getRunDate(), new Iso8601Date(date));
+    }
+
+
+    @DataProvider
+    public Object[][] readGroupsForEquals() {
+        final SAMReadGroupRecord empty = new SAMReadGroupRecord("empty");
+        final SAMReadGroupRecord withSample = new SAMReadGroupRecord("rg1");
+        withSample.setSample("sample1");
+        return new Object[][] {
+                // same object
+                {empty, empty, true},
+                {withSample, withSample, true},
+                // null or different class
+                {empty, null, false},
+                {empty, empty.getId(), false},
+                // different information set
+                {empty, withSample, false},
+                {withSample, empty, false},
+        };
+    }
+
+    @Test(dataProvider = "readGroupsForEquals")
+    public void testEqualsAndHashcode(final SAMReadGroupRecord rg, final Object other, final boolean isEqual) throws Exception {
+        Assert.assertEquals(rg.equals(other), isEqual);
+        if (isEqual) {
+            Assert.assertEquals(rg.hashCode(), other.hashCode());
+        }
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/SAMRecordDuplicateComparatorTest.java b/src/test/java/htsjdk/samtools/SAMRecordDuplicateComparatorTest.java
index cb50925..99d187a 100644
--- a/src/test/java/htsjdk/samtools/SAMRecordDuplicateComparatorTest.java
+++ b/src/test/java/htsjdk/samtools/SAMRecordDuplicateComparatorTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -37,7 +38,7 @@ import java.util.List;
  *
  * @author nhomer
  */
-public class SAMRecordDuplicateComparatorTest {
+public class SAMRecordDuplicateComparatorTest extends HtsjdkTest {
     
     private final static SAMRecordDuplicateComparator comparator = new SAMRecordDuplicateComparator();
 
diff --git a/src/test/java/htsjdk/samtools/util/PositionalOutputStreamTest.java b/src/test/java/htsjdk/samtools/SAMRecordQueryHashComparatorTest.java
similarity index 51%
copy from src/test/java/htsjdk/samtools/util/PositionalOutputStreamTest.java
copy to src/test/java/htsjdk/samtools/SAMRecordQueryHashComparatorTest.java
index 49de11d..231d1c3 100644
--- a/src/test/java/htsjdk/samtools/util/PositionalOutputStreamTest.java
+++ b/src/test/java/htsjdk/samtools/SAMRecordQueryHashComparatorTest.java
@@ -1,7 +1,7 @@
 /*
  * The MIT License (MIT)
  *
- * Copyright (c) 2015 Daniel Gómez-Sánchez
+ * Copyright (c) 2017 Daniel Gomez-Sanchez
  *
  * Permission is hereby granted, free of charge, to any person obtaining a copy
  * of this software and associated documentation files (the "Software"), to deal
@@ -22,41 +22,35 @@
  * SOFTWARE.
  */
 
-package htsjdk.samtools.util;
+package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-import java.io.IOException;
-import java.io.OutputStream;
-
 /**
  * @author Daniel Gomez-Sanchez (magicDGS)
  */
-public class PositionalOutputStreamTest {
+public class SAMRecordQueryHashComparatorTest extends HtsjdkTest {
+
+    private static SAMRecordQueryHashComparator COMPARATOR = new SAMRecordQueryHashComparator();
 
     @Test
-    public void basicPositionTest() throws Exception {
-        // wrapped null output stream to check
-        final PositionalOutputStream wrapped = new PositionalOutputStream(new OutputStream() {
-            @Override
-            public void write(int b) throws IOException {}
-        });
-        int position = 0;
-        // check that we start at position 0
-        Assert.assertEquals(wrapped.getPosition(), position);
-        // check that write one int just add one
-        wrapped.write(100);
-        Assert.assertEquals(wrapped.getPosition(), ++position);
-        // check that write a byte array adds its length
-        final byte[] bytes = new byte[]{1, 3, 5, 7};
-        wrapped.write(bytes);
-        position += bytes.length;
-        Assert.assertEquals(wrapped.getPosition(), position);
-        // check that write just some bytes from an array adds its length
-        wrapped.write(bytes, 2, 2);
-        position += 2;
-        Assert.assertEquals(wrapped.getPosition(), position);
+    public void testCompareDifferentNames() throws Exception {
+        final SAMRecord a = new SAMRecord(null);
+        a.setReadName("A");
+        final SAMRecord b = new SAMRecord(null);
+        b.setReadName("B");
+        // hashes are providing a different order in this case
+        Assert.assertTrue(COMPARATOR.compare(a, b) != 0);
+        Assert.assertTrue(COMPARATOR.compare(b, a) != 0);
     }
 
-}
\ No newline at end of file
+    // with equal names, it delegates to the SAMRecordQuerynameComparator methods
+    // so this should always provide the same result
+    @Test(dataProvider = "equalNameComparisonData", dataProviderClass = SAMRecordQueryNameComparatorTest.class)
+    public void testCompareEqualNames(final SAMRecord record1, final SAMRecord record2, final int sign) throws Exception {
+        final int comparisonResult = COMPARATOR.compare(record1, record2);
+        Assert.assertEquals(Integer.signum(comparisonResult), sign);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/SAMRecordQueryNameComparatorTest.java b/src/test/java/htsjdk/samtools/SAMRecordQueryNameComparatorTest.java
new file mode 100644
index 0000000..37d18a7
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMRecordQueryNameComparatorTest.java
@@ -0,0 +1,124 @@
+/*
+ * The MIT License (MIT)
+ *
+ * Copyright (c) 2017 Daniel Gomez-Sanchez
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package htsjdk.samtools;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.function.Consumer;
+
+/**
+ * @author Daniel Gomez-Sanchez (magicDGS)
+ */
+public class SAMRecordQueryNameComparatorTest extends HtsjdkTest {
+
+    private final static SAMRecordQueryNameComparator COMPARATOR = new SAMRecordQueryNameComparator();
+
+    // this test is separated to be able to use the data provider for the SAMRecordQueryHashComparator
+    @Test
+    public void testCompareDifferentNames() throws Exception {
+        final SAMRecord a = new SAMRecord(null);
+        a.setReadName("A");
+        final SAMRecord b = new SAMRecord(null);
+        b.setReadName("B");
+        Assert.assertTrue(COMPARATOR.compare(a, b) < 0);
+        Assert.assertTrue(COMPARATOR.compare(b, a) > 0);
+    }
+
+    private static SAMRecord copyAndSet(final SAMRecord record, final Consumer<SAMRecord> setParams) {
+        final SAMRecord copy = record.deepCopy();
+        setParams.accept(copy);
+        return copy;
+    }
+
+
+    // this test cases are separated for the different names comparison for re-use with SAMRecordQueryHashComparator
+    @DataProvider
+    public static Object[][] equalNameComparisonData() {
+        // base record with the information used in the comparator:
+        // - read name A
+        // - positive strand -> default
+        // - unpaired (without first/second of pair flags) -> explicitly set
+        // - primary alignment (and not supplementary) -> explicitly set
+        // - no hit index (HI tag) -> default
+        final SAMRecord record = new SAMRecord(null);
+        record.setReadName("A");
+        record.setReadPairedFlag(false);
+        record.setFirstOfPairFlag(false);
+        record.setSecondOfPairFlag(false);
+        // primary/secondary/supplementary alignments
+        record.setNotPrimaryAlignmentFlag(false);
+        record.setSupplementaryAlignmentFlag(false);
+
+        // record1, record2, comparison value
+        return new Object[][] {
+                // same record is equals after comparing all the fields
+                {record, record, 0},
+
+                // upaired vs. paired
+                {record, copyAndSet(record, (r) -> r.setReadPairedFlag(true)), 1},
+                {copyAndSet(record, (r) -> r.setReadPairedFlag(true)), record, -1},
+                // first/second of pair in natural order
+                {copyAndSet(record, r -> {r.setReadPairedFlag(true); r.setFirstOfPairFlag(true);}),
+                        copyAndSet(record, r -> {r.setReadPairedFlag(true); r.setSecondOfPairFlag(true);}),
+                        -1},
+                {copyAndSet(record, r -> {r.setReadPairedFlag(true); r.setSecondOfPairFlag(true);}),
+                    copyAndSet(record, r -> {r.setReadPairedFlag(true); r.setFirstOfPairFlag(true);}),
+                        1},
+
+                // negative strand is the last
+                {record, copyAndSet(record, r -> r.setReadNegativeStrandFlag(true)), -1},
+
+                // primary alignment is first compared to not primary
+                {record, copyAndSet(record, r -> r.setNotPrimaryAlignmentFlag(true)), -1},
+                {copyAndSet(record, r -> r.setNotPrimaryAlignmentFlag(true)), record, 1},
+                // secondary alignment is last compared to primary
+                {record, copyAndSet(record, r -> r.setSupplementaryAlignmentFlag(true)), -1},
+                {copyAndSet(record, r -> r.setSupplementaryAlignmentFlag(true)), record, 1},
+
+                // the one with HI tag is first if the other is null
+                {record, copyAndSet(record, r -> r.setAttribute(SAMTag.HI.name(), 1)), -1},
+                {copyAndSet(record, r -> r.setAttribute(SAMTag.HI.name(), 1)), record, 1},
+                // if both have HI tag, order by it
+                {copyAndSet(record, r -> r.setAttribute(SAMTag.HI.name(), 1)),
+                        copyAndSet(record, r -> r.setAttribute(SAMTag.HI.name(), 1)), 0},
+                {copyAndSet(record, r -> r.setAttribute(SAMTag.HI.name(), 1)),
+                        copyAndSet(record, r -> r.setAttribute(SAMTag.HI.name(), 2)), -1},
+                {copyAndSet(record, r -> r.setAttribute(SAMTag.HI.name(), 16)),
+                        copyAndSet(record, r -> r.setAttribute(SAMTag.HI.name(), 5)), 1}
+        };
+    }
+
+
+
+    @Test(dataProvider = "equalNameComparisonData")
+    public void testCompareEqualNames(final SAMRecord record1, final SAMRecord record2, final int sign) throws Exception {
+        final int comparisonResult = COMPARATOR.compare(record1, record2);
+        Assert.assertEquals(Integer.signum(comparisonResult),sign);
+    }
+
+}
diff --git a/src/test/java/htsjdk/samtools/SAMRecordUnitTest.java b/src/test/java/htsjdk/samtools/SAMRecordUnitTest.java
index 951ecee..e9b599b 100644
--- a/src/test/java/htsjdk/samtools/SAMRecordUnitTest.java
+++ b/src/test/java/htsjdk/samtools/SAMRecordUnitTest.java
@@ -24,6 +24,7 @@
 
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.BinaryCodec;
 import htsjdk.samtools.util.TestUtil;
 import org.testng.Assert;
@@ -34,7 +35,7 @@ import java.io.*;
 import java.util.Arrays;
 import java.util.List;
 
-public class SAMRecordUnitTest {
+public class SAMRecordUnitTest extends HtsjdkTest {
 
     @DataProvider(name = "serializationTestData")
     public Object[][] getSerializationTestData() {
@@ -462,7 +463,7 @@ public class SAMRecordUnitTest {
     }
 
     private SAMRecord createTestRecordHelper() {
-        return new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "3S9M", null, 2);
+        return new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "3S33M", null, 2);
     }
 
     @Test
@@ -775,7 +776,7 @@ public class SAMRecordUnitTest {
     }
 
     @Test
-    private void testNullHeaderDeepCopy() {
+    public void testNullHeaderDeepCopy() {
         SAMRecord sam = createTestRecordHelper();
         sam.setHeader(null);
         final SAMRecord deepCopy = sam.deepCopy();
@@ -804,13 +805,13 @@ public class SAMRecordUnitTest {
     }
 
     @Test
-    private void testNullHeadGetCigarSAM() {
-        SAMRecord sam = createTestRecordHelper();
+    public void testNullHeadGetCigarSAM() {
+        final SAMRecord sam = createTestRecordHelper();
         testNullHeaderCigar(sam);
     }
 
     @Test
-    private void testNullHeadGetCigarBAM() {
+    public void testNullHeadGetCigarBAM() {
         SAMRecord sam = createTestRecordHelper();
         SAMRecordFactory factory = new DefaultSAMRecordFactory();
         BAMRecord bamRec = factory.createBAMRecord(
@@ -1038,4 +1039,67 @@ public class SAMRecordUnitTest {
 
         return(rec);
     }
+
+    @DataProvider
+    public Object [][] readBasesArrayGetReadLengthData() {
+        return new Object[][]{
+                { null, 0 },
+                { SAMRecord.NULL_SEQUENCE, 0 },
+                { new byte[] {'A', 'C'}, 2 }
+        };
+    }
+
+    @Test(dataProvider = "readBasesArrayGetReadLengthData")
+    public void testReadBasesGetReadLength(final byte[] readBases, final int readLength) {
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setReadBases(readBases);
+        Assert.assertEquals(sam.getReadLength(), readLength);
+    }
+
+    @DataProvider
+    public Object [][] readBasesStringGetReadLengthData() {
+        return new Object[][]{
+                { null, 0 },
+                { SAMRecord.NULL_SEQUENCE_STRING, 0 },
+                { "AC", 2 }
+        };
+    }
+
+    @Test(dataProvider = "readBasesStringGetReadLengthData")
+    public void testReadStringGetReadLength(final String readBases, final int readLength) {
+        final SAMRecord sam = createTestRecordHelper();
+        sam.setReadString(readBases);
+        Assert.assertEquals(sam.getReadLength(), readLength);
+    }
+
+    @DataProvider(name = "attributeAccessTestData")
+    private Object[][] hasAttributeTestData() throws IOException {
+        final SamReader reader = SamReaderFactory.makeDefault().open(new File("src/test/resources/htsjdk/samtools/SAMIntegerTagTest/variousAttributes.sam"));
+        final SAMRecord samRecordWithAttributes = reader.iterator().next();
+        final SAMRecord samRecordWithoutAnyAttributes = new SAMRecord(reader.getFileHeader());
+        reader.close();
+
+        return new Object[][] {
+                {samRecordWithAttributes, "MF", true},
+                {samRecordWithAttributes, "Nm", true},
+                {samRecordWithAttributes, "H0", true},
+                {samRecordWithAttributes, "H1", true},
+                {samRecordWithAttributes, "SB", true},
+                {samRecordWithAttributes, "UB", true},
+                {samRecordWithAttributes, "SS", true},
+                {samRecordWithAttributes, "US", true},
+                {samRecordWithAttributes, "SI", true},
+                {samRecordWithAttributes, "I2", true},
+                {samRecordWithAttributes, "UI", true},
+
+                {samRecordWithAttributes, "AS", false},
+
+                {samRecordWithoutAnyAttributes, "RG", false}
+        };
+    }
+
+    @Test(dataProvider = "attributeAccessTestData")
+    public void testHasAttribute(final SAMRecord samRecord, final String tag, final boolean expectedHasAttribute) {
+        Assert.assertEquals(samRecord.hasAttribute(tag), expectedHasAttribute);
+    }
 }
diff --git a/src/test/java/htsjdk/samtools/SAMSequenceDictionaryCodecTest.java b/src/test/java/htsjdk/samtools/SAMSequenceDictionaryCodecTest.java
index 32de1cd..f667245 100644
--- a/src/test/java/htsjdk/samtools/SAMSequenceDictionaryCodecTest.java
+++ b/src/test/java/htsjdk/samtools/SAMSequenceDictionaryCodecTest.java
@@ -24,23 +24,24 @@
 
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.util.BufferedLineReader;
 import htsjdk.samtools.util.LineReader;
-import htsjdk.samtools.util.StringLineReader;
 import org.testng.annotations.BeforeMethod;
 import org.testng.annotations.Test;
 
-import javax.sound.sampled.Line;
 import java.io.BufferedWriter;
 import java.io.StringWriter;
 import java.util.List;
 import java.util.Random;
 
-import static org.testng.Assert.*;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
 
 /**
  * @author Pavel_Silin at epam.com, EPAM Systems, Inc. <www.epam.com>
  */
-public class SAMSequenceDictionaryCodecTest {
+public class SAMSequenceDictionaryCodecTest extends HtsjdkTest {
 
     private static final Random random = new Random();
     private SAMSequenceDictionary dictionary;
@@ -67,11 +68,11 @@ public class SAMSequenceDictionaryCodecTest {
         try {
             codec.encode(dictionary);
             bufferedWriter.close();
-            readerOne = new StringLineReader(writer.toString());
+            readerOne = BufferedLineReader.fromString(writer.toString());
             SAMSequenceDictionary actual = codec.decode(readerOne, null);
             assertEquals(actual, dictionary);
 
-            readerTwo = new StringLineReader(writer.toString());
+            readerTwo = BufferedLineReader.fromString(writer.toString());
 
             String line = readerTwo.readLine();
             assertTrue(line.startsWith("@HD"));
@@ -99,10 +100,10 @@ public class SAMSequenceDictionaryCodecTest {
             codec.encodeHeaderLine(false);
             sequences.forEach(codec::encodeSequenceRecord);
             bufferedWriter.close();
-            readerOne = new StringLineReader(writer.toString());
+            readerOne = BufferedLineReader.fromString(writer.toString());
             SAMSequenceDictionary actual = codec.decode(readerOne, null);
             assertEquals(actual, dictionary);
-            readerTwo = new StringLineReader(writer.toString());
+            readerTwo = BufferedLineReader.fromString(writer.toString());
 
             String line = readerTwo.readLine();
             assertTrue(line.startsWith("@HD"));
diff --git a/src/test/java/htsjdk/samtools/SAMSequenceDictionaryTest.java b/src/test/java/htsjdk/samtools/SAMSequenceDictionaryTest.java
index 0b1a507..a8e60ed 100644
--- a/src/test/java/htsjdk/samtools/SAMSequenceDictionaryTest.java
+++ b/src/test/java/htsjdk/samtools/SAMSequenceDictionaryTest.java
@@ -26,23 +26,22 @@
 
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Marshaller;
+import javax.xml.bind.Unmarshaller;
 import java.io.StringReader;
 import java.io.StringWriter;
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Marshaller;
-import javax.xml.bind.Unmarshaller;
-
-public class SAMSequenceDictionaryTest {
+public class SAMSequenceDictionaryTest extends HtsjdkTest {
     @Test
     public void testAliases() {
         final SAMSequenceRecord ssr1 = new SAMSequenceRecord("1", 1);
@@ -144,4 +143,27 @@ public class SAMSequenceDictionaryTest {
             throw new Exception("Expected to not be able to merge dictionaries, but was able");
         }
     }
+
+    @DataProvider
+    public Object[][] testIsSameDictionaryData() {
+
+        final SAMSequenceRecord rec1, rec2;
+        rec1 = new SAMSequenceRecord("chr1", 100);
+        rec2 = new SAMSequenceRecord("chr2", 101);
+
+        return new Object[][]{
+                new Object[]{Arrays.asList(rec1), Arrays.asList(rec1), true},
+                new Object[]{Arrays.asList(rec1), Arrays.asList(rec2), false},
+                new Object[]{Arrays.asList(rec1, rec2), Arrays.asList(rec1), false}
+        };
+    }
+
+    @Test(dataProvider = "testIsSameDictionaryData")
+    public void testIsSameDictionary(final List<SAMSequenceRecord> recs1, final List<SAMSequenceRecord> recs2, final boolean isSameDictionary) {
+
+        final SAMSequenceDictionary dict1 = new SAMSequenceDictionary(recs1);
+        final SAMSequenceDictionary dict2 = new SAMSequenceDictionary(recs2);
+
+        Assert.assertEquals(dict1.isSameDictionary(dict2), isSameDictionary);
+    }
 }
diff --git a/src/test/java/htsjdk/samtools/SAMSequenceRecordTest.java b/src/test/java/htsjdk/samtools/SAMSequenceRecordTest.java
new file mode 100644
index 0000000..89e6121
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SAMSequenceRecordTest.java
@@ -0,0 +1,86 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2017 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+
+/**
+ * Test for SAMReadGroupRecordTest
+ */
+public class SAMSequenceRecordTest extends HtsjdkTest {
+
+    @Test
+    public void testGetSAMString() {
+        final SAMSequenceRecord r = new SAMSequenceRecord("chr5_but_without_a_prefix", 271828);
+        r.setSpecies("Psephophorus terrypratchetti");
+        r.setAssembly("GRCt01");
+        r.setMd5("7a6dd3d307de916b477e7bf304ac22bc");
+        Assert.assertEquals("@SQ\tSN:chr5_but_without_a_prefix\tLN:271828\tSP:Psephophorus terrypratchetti\tAS:GRCt01\tM5:7a6dd3d307de916b477e7bf304ac22bc", r.getSAMString());
+    }
+
+    @DataProvider
+    public Object[][] testIsSameSequenceData() {
+        final SAMSequenceRecord rec1 = new SAMSequenceRecord("chr1", 100);
+        final SAMSequenceRecord rec2 = new SAMSequenceRecord("chr2", 101);
+        final SAMSequenceRecord rec3 = new SAMSequenceRecord("chr3", 0);
+        final SAMSequenceRecord rec4 = new SAMSequenceRecord("chr1", 100);
+
+        final String md5One = "1";
+        final String md5Two = "2";
+        final int index1 = 1;
+        final int index2 = 2;
+
+        return new Object[][]{
+                new Object[]{rec1, rec1, md5One, md5One, index1, index1, true},
+                new Object[]{rec1, null, md5One, md5One, index1, index1, false},
+                new Object[]{rec1, rec4, md5One, md5One, index1, index1, true},
+                new Object[]{rec1, rec4, md5One, md5One, index1, index2, false},
+                new Object[]{rec1, rec3, md5One, md5Two, index1, index1, false},
+                new Object[]{rec1, rec2, md5One, md5Two, index1, index1, false},
+                new Object[]{rec1, rec4, md5One, null, index1, index1, true},
+                new Object[]{rec1, rec4, null, md5One, index1, index1, true},
+                new Object[]{rec1, rec4, md5One, md5One, index1, index2, false}
+        };
+    }
+
+    @Test(dataProvider = "testIsSameSequenceData")
+    public void testIsSameSequence(final SAMSequenceRecord rec1 , final SAMSequenceRecord rec2, final String md5One, final String md5Two,
+                                   final int index1, final int index2, final boolean isSame) {
+        if (rec2 != null) {
+            rec2.setMd5(md5Two);
+            rec2.setSequenceIndex(index2);
+        }
+
+        if (rec1 != null) {
+            rec1.setMd5(md5One);
+            rec1.setSequenceIndex(index1);
+            Assert.assertEquals(rec1.isSameSequence(rec2), isSame);
+        }
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/SAMTextReaderTest.java b/src/test/java/htsjdk/samtools/SAMTextReaderTest.java
index c80924b..142eea3 100644
--- a/src/test/java/htsjdk/samtools/SAMTextReaderTest.java
+++ b/src/test/java/htsjdk/samtools/SAMTextReaderTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloseableIterator;
 import htsjdk.samtools.util.CloserUtil;
 import org.testng.Assert;
@@ -31,7 +32,7 @@ import org.testng.annotations.Test;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 
-public class SAMTextReaderTest {
+public class SAMTextReaderTest extends HtsjdkTest {
     // Simple input, spot check that parsed correctly, and make sure nothing blows up.
     @Test
     public void testBasic() throws Exception {
diff --git a/src/test/java/htsjdk/samtools/SAMTextWriterTest.java b/src/test/java/htsjdk/samtools/SAMTextWriterTest.java
index 123ab6b..5c9ff28 100644
--- a/src/test/java/htsjdk/samtools/SAMTextWriterTest.java
+++ b/src/test/java/htsjdk/samtools/SAMTextWriterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -31,7 +32,7 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 
-public class SAMTextWriterTest {
+public class SAMTextWriterTest extends HtsjdkTest {
 
     private SAMRecordSetBuilder getSAMReader(final boolean sortForMe, final SAMFileHeader.SortOrder sortOrder) {
         final SAMRecordSetBuilder ret = new SAMRecordSetBuilder(sortForMe, sortOrder);
diff --git a/src/test/java/htsjdk/samtools/SAMUtilsTest.java b/src/test/java/htsjdk/samtools/SAMUtilsTest.java
index 3be7e39..28e89f7 100644
--- a/src/test/java/htsjdk/samtools/SAMUtilsTest.java
+++ b/src/test/java/htsjdk/samtools/SAMUtilsTest.java
@@ -23,12 +23,15 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
+import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
+import java.util.Arrays;
 import java.util.List;
 
-public class SAMUtilsTest {
+public class SAMUtilsTest extends HtsjdkTest {
     @Test
     public void testCompareMapqs() {
         Assert.assertEquals(SAMUtils.compareMapqs(0, 0), 0);
@@ -244,7 +247,41 @@ public class SAMUtilsTest {
         Assert.assertEquals(other.getAttribute(SAMTagUtil.getSingleton().NM),null);
         Assert.assertEquals(other.getCigarString(),"8M2S");
         Assert.assertEquals(other.getInferredInsertSize(),-91);//100(mate) - 191(other)
+    }
+
+    @Test()
+    public void testBytesToCompressedBases() {
+        final byte[] bases = new byte[]{'=', 'a', 'A', 'c', 'C', 'g', 'G', 't', 'T', 'n', 'N', '.', 'M', 'm',
+                'R', 'r', 'S', 's', 'V', 'v', 'W', 'w', 'Y', 'y', 'H', 'h', 'K', 'k', 'D', 'd', 'B', 'b'};
+        final byte[] compressedBases = SAMUtils.bytesToCompressedBases(bases);
+        String expectedCompressedBases = "[1, 18, 36, 72, -113, -1, 51, 85, 102, 119, -103, -86, -69, -52, -35, -18]";
+        Assert.assertEquals(Arrays.toString(compressedBases), expectedCompressedBases);
+    }
+
+    @DataProvider
+    public Object[][] testBadBase() {
+        return new Object[][]{
+                {new byte[]{'>', 'A'}, '>'},
+                {new byte[]{'A', '>'} , '>'}
+        };
+    }
 
+    @Test(dataProvider = "testBadBase", expectedExceptions = IllegalArgumentException.class)
+    public void testBytesToCompressedBasesException(final byte[] bases, final char failingBase) {
+        try {
+            SAMUtils.bytesToCompressedBases(bases);
+        } catch ( final IllegalArgumentException ex ) {
+            Assert.assertTrue(ex.getMessage().contains(Character.toString(failingBase)));
+            throw ex;
+        }
     }
 
+    @Test
+    public void testCompressedBasesToBytes() {
+        final byte[] compressedBases = new byte[]{1, 18, 36, 72, -113, -1, 51, 85, 102, 119, -103, -86, -69, -52, -35, -18};
+        final byte[] bytes = SAMUtils.compressedBasesToBytes(2*compressedBases.length, compressedBases, 0);
+        final byte[] expectedBases = new byte[]{'=', 'A', 'A', 'C', 'C', 'G', 'G', 'T', 'T', 'N', 'N', 'N', 'M', 'M',
+                'R', 'R', 'S', 'S', 'V', 'V', 'W', 'W', 'Y', 'Y', 'H', 'H', 'K', 'K', 'D', 'D', 'B', 'B'};
+        Assert.assertEquals(new String(bytes), new String(expectedBases));
+    }
 }
diff --git a/src/test/java/htsjdk/samtools/SamFileHeaderMergerTest.java b/src/test/java/htsjdk/samtools/SamFileHeaderMergerTest.java
index 6e4fd75..5c55c0b 100644
--- a/src/test/java/htsjdk/samtools/SamFileHeaderMergerTest.java
+++ b/src/test/java/htsjdk/samtools/SamFileHeaderMergerTest.java
@@ -25,6 +25,7 @@
 
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloserUtil;
 import htsjdk.samtools.util.IOUtil;
 import htsjdk.samtools.util.SequenceUtil;
@@ -58,7 +59,7 @@ import static org.testng.Assert.assertEquals;
  * <p/>
  * Tests the ability of the SamFileHeaderMerger class to merge sequence dictionaries.
  */
-public class SamFileHeaderMergerTest {
+public class SamFileHeaderMergerTest extends HtsjdkTest {
 
     private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
diff --git a/src/test/java/htsjdk/samtools/SamFilesTest.java b/src/test/java/htsjdk/samtools/SamFilesTest.java
index 443a4d1..e7c1919 100644
--- a/src/test/java/htsjdk/samtools/SamFilesTest.java
+++ b/src/test/java/htsjdk/samtools/SamFilesTest.java
@@ -1,6 +1,8 @@
 package htsjdk.samtools;
 
 import java.nio.file.Path;
+
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -12,7 +14,7 @@ import java.io.IOException;
  * Test valid combinations of bam/cram vs bai/crai files.
  * Created by vadim on 10/08/2015.
  */
-public class SamFilesTest {
+public class SamFilesTest extends HtsjdkTest {
     private static final String TEST_DATA = "src/test/resources/htsjdk/samtools/BAMFileIndexTest/";
     private static final File BAM_FILE = new File(TEST_DATA + "index_test.bam");
 
diff --git a/src/test/java/htsjdk/samtools/SamFlagFieldTest.java b/src/test/java/htsjdk/samtools/SamFlagFieldTest.java
index f09e636..36008cf 100644
--- a/src/test/java/htsjdk/samtools/SamFlagFieldTest.java
+++ b/src/test/java/htsjdk/samtools/SamFlagFieldTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -7,7 +8,7 @@ import org.testng.annotations.Test;
 /**
  * @author nhomer
  */
-public class SamFlagFieldTest {
+public class SamFlagFieldTest extends HtsjdkTest {
 
     @Test
     public void testAllFlags() {
@@ -147,4 +148,4 @@ public class SamFlagFieldTest {
     public void testIllegalStringFlagCharacterExclamation(){
         SamFlagField.STRING.parse("pmMr!F1s");
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/SamHeaderRecordComparatorTest.java b/src/test/java/htsjdk/samtools/SamHeaderRecordComparatorTest.java
index c11be38..da93add 100644
--- a/src/test/java/htsjdk/samtools/SamHeaderRecordComparatorTest.java
+++ b/src/test/java/htsjdk/samtools/SamHeaderRecordComparatorTest.java
@@ -24,11 +24,12 @@ package htsjdk.samtools;
  * THE SOFTWARE.
  */
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-public class SamHeaderRecordComparatorTest {
+public class SamHeaderRecordComparatorTest extends HtsjdkTest {
 
 	@DataProvider(name="UsualSuspects")
 	public Object[][] createData() {
diff --git a/src/test/java/htsjdk/samtools/SamIndexesTest.java b/src/test/java/htsjdk/samtools/SamIndexesTest.java
index d13001f..f78b0f3 100644
--- a/src/test/java/htsjdk/samtools/SamIndexesTest.java
+++ b/src/test/java/htsjdk/samtools/SamIndexesTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.CRAIEntry;
 import htsjdk.samtools.cram.CRAIIndex;
 import htsjdk.samtools.seekablestream.SeekableFileStream;
@@ -19,7 +20,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.zip.GZIPOutputStream;
 
-public class SamIndexesTest {
+public class SamIndexesTest extends HtsjdkTest {
 
     @Test
     public void testEmptyBai() throws IOException {
diff --git a/src/test/java/htsjdk/samtools/SamPairUtilTest.java b/src/test/java/htsjdk/samtools/SamPairUtilTest.java
index 80841c9..f5c288a 100644
--- a/src/test/java/htsjdk/samtools/SamPairUtilTest.java
+++ b/src/test/java/htsjdk/samtools/SamPairUtilTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SamPairUtil.SetMateInfoIterator;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
@@ -32,7 +33,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 
-public class SamPairUtilTest {
+public class SamPairUtilTest extends HtsjdkTest {
 
     @Test(dataProvider = "testGetPairOrientation")
     public void testGetPairOrientation(final String testName,
diff --git a/src/test/java/htsjdk/samtools/SamReaderFactoryTest.java b/src/test/java/htsjdk/samtools/SamReaderFactoryTest.java
index 31ad5c2..c244f3c 100644
--- a/src/test/java/htsjdk/samtools/SamReaderFactoryTest.java
+++ b/src/test/java/htsjdk/samtools/SamReaderFactoryTest.java
@@ -1,11 +1,18 @@
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.seekablestream.ISeekableStreamFactory;
 import htsjdk.samtools.seekablestream.SeekableFileStream;
 import htsjdk.samtools.seekablestream.SeekableHTTPStream;
 import htsjdk.samtools.seekablestream.SeekableStreamFactory;
 import htsjdk.samtools.util.*;
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.Paths;
+import java.util.function.Function;
+import htsjdk.samtools.util.zip.InflaterFactory;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -22,8 +29,9 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.function.BiFunction;
+import java.util.zip.Inflater;
 
-public class SamReaderFactoryTest {
+public class SamReaderFactoryTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
     private static final Log LOG = Log.getInstance(SamReaderFactoryTest.class);
@@ -37,6 +45,34 @@ public class SamReaderFactoryTest {
         reader.close();
     }
 
+    @Test
+    public void variousFormatReaderInflatorFactoryTest() throws IOException {
+        final String inputFile = "compressed.bam";
+        final int[] inflateCalls = {0}; //Note: using an array is a HACK to fool the compiler
+        class MyInflater extends Inflater {
+            MyInflater(boolean gzipCompatible){
+                super(gzipCompatible);
+            }
+            @Override
+            public int inflate(byte[] b, int off, int len) throws java.util.zip.DataFormatException {
+                inflateCalls[0]++;
+                return super.inflate(b, off, len);
+            }
+        }
+        final InflaterFactory myInflaterFactory = new InflaterFactory() {
+            @Override
+            public Inflater makeInflater(final boolean gzipCompatible) {
+                return new MyInflater(gzipCompatible);
+            }
+        };
+
+        final File input = new File(TEST_DATA_DIR, inputFile);
+        try (final SamReader reader = SamReaderFactory.makeDefault().inflaterFactory(myInflaterFactory).open(input)) {
+            for (final SAMRecord ignored : reader) { }
+        }
+        Assert.assertNotEquals(inflateCalls[0], 0, "Not using Inflater from InflateFactory on file : " + inputFile);
+    }
+
     private int countRecordsInQueryInterval(final SamReader reader, final QueryInterval query) {
         final SAMRecordIterator iter = reader.queryOverlapping(new QueryInterval[] { query });
         int count = 0;
@@ -59,6 +95,40 @@ public class SamReaderFactoryTest {
         return count;
     }
 
+    private static SeekableByteChannel addHeader(SeekableByteChannel input) {
+        try {
+        int total = (int)input.size();
+        final String comment = "@HD\tVN:1.0  SO:unsorted\n" +
+            "@SQ\tSN:chr1\tLN:101\n" +
+            "@SQ\tSN:chr2\tLN:101\n" +
+            "@SQ\tSN:chr3\tLN:101\n" +
+            "@RG\tID:0\tSM:JP was here\n";
+
+            byte[] commentBuf = comment.getBytes();
+        ByteBuffer buf = ByteBuffer.allocate(total + commentBuf.length);
+        buf.put(commentBuf);
+        input.position(0);
+        while (input.read(buf)>0) {
+            // read until EOF
+        }
+        buf.flip();
+        return new SeekableByteChannelFromBuffer(buf);
+        } catch (IOException x) {
+            throw new RuntimeException(x);
+        }
+    }
+
+    @Test
+    public void testWrap() throws IOException {
+        final Path input = Paths.get(TEST_DATA_DIR.getPath(), "noheader.sam");
+        final SamReader wrappedReader =
+            SamReaderFactory
+                .makeDefault()
+                .open(input, SamReaderFactoryTest::addHeader, null);
+        int records = countRecords(wrappedReader);
+        Assert.assertEquals(10, records);
+    }
+
     // See https://github.com/samtools/htsjdk/issues/76
     @Test(dataProvider = "queryIntervalIssue76TestCases")
     public void queryIntervalIssue76(final String sequenceName, final int start, final int end, final int expectedCount) throws IOException {
@@ -94,11 +164,13 @@ public class SamReaderFactoryTest {
         int samRecordsCreated;
         int bamRecordsCreated;
 
+        @Override
         public SAMRecord createSAMRecord(final SAMFileHeader header) {
             ++samRecordsCreated;
             return super.createSAMRecord(header);
         }
 
+        @Override
         public BAMRecord createBAMRecord(final SAMFileHeader header, final int referenceSequenceIndex, final int alignmentStart, final short readNameLength, final short mappingQuality, final int indexingBin, final int cigarLen, final int flags, final int readLen, final int mateReferenceSequenceIndex, final int mateAlignmentStart, final int insertSize, final byte[] variableLengthBlock) {
             ++bamRecordsCreated;
             return super.createBAMRecord(header, referenceSequenceIndex, alignmentStart, readNameLength, mappingQuality, indexingBin, cigarLen, flags, readLen, mateReferenceSequenceIndex, mateAlignmentStart, insertSize, variableLengthBlock);
@@ -194,7 +266,7 @@ public class SamReaderFactoryTest {
             case FILE:
                 return new FileInputResource(f);
             case PATH:
-                return new PathInputResource(f.toPath());
+                return new PathInputResource(f.toPath(), Function.identity());
             case URL:
                 return new UrlInputResource(url);
             case SEEKABLE_STREAM:
@@ -250,7 +322,6 @@ public class SamReaderFactoryTest {
         }
     }
 
-
     final Set<List<SAMRecord>> observedRecordOrdering1 = new HashSet<List<SAMRecord>>();
     final Set<List<SAMRecord>> observedRecordOrdering3 = new HashSet<List<SAMRecord>>();
     final Set<List<SAMRecord>> observedRecordOrdering20 = new HashSet<List<SAMRecord>>();
diff --git a/src/test/java/htsjdk/samtools/SamReaderSortTest.java b/src/test/java/htsjdk/samtools/SamReaderSortTest.java
index 584410f..4d71210 100755
--- a/src/test/java/htsjdk/samtools/SamReaderSortTest.java
+++ b/src/test/java/htsjdk/samtools/SamReaderSortTest.java
@@ -3,7 +3,7 @@ package htsjdk.samtools;
 /*
  * The MIT License
  *
- * Copyright (c) 2009 The Broad Institute
+ * Copyright (c) 2009-2016 The Broad Institute
  *
  * Permission is hereby granted, free of charge, to any person obtaining a copy
  * of this software and associated documentation files (the "Software"), to deal
@@ -24,6 +24,8 @@ package htsjdk.samtools;
  * THE SOFTWARE.
  */
 
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.cram.ref.ReferenceSource;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -35,11 +37,17 @@ import java.io.File;
  *
  * @author ktibbett at broadinstitute.org
  */
-public class SamReaderSortTest {
+public class SamReaderSortTest extends HtsjdkTest {
 
-    public static final String COORDINATE_SORTED_FILE = "src/test/resources/htsjdk/samtools/coordinate_sorted.sam";
-    public static final String QUERYNAME_SORTED_FILE = "src/test/resources/htsjdk/samtools/queryname_sorted.sam";
-    public static final String QUERYNAME_SORTED_NO_HEADER_SORT = "src/test/resources/htsjdk/samtools/unsorted.sam";
+    private static final String COORDINATE_SORTED_FILE = "src/test/resources/htsjdk/samtools/coordinate_sorted.sam";
+    private static final String QUERYNAME_SORTED_FILE = "src/test/resources/htsjdk/samtools/queryname_sorted.sam";
+    private static final String QUERYNAME_SORTED_NO_HEADER_SORT = "src/test/resources/htsjdk/samtools/unsorted.sam";
+    private static final String CRAM_FILE = "src/test/resources/htsjdk/samtools/cram_query_sorted.cram";
+    private static final String CRAM_REFERENCE = "src/test/resources/htsjdk/samtools/cram_query_sorted.fasta";
+    private static final String CRAM_FILE_COORDINATE = "src/test/resources/htsjdk/samtools/cram/ce#tag_depadded.2.1.cram";
+    private static final String CRAM_REFERENCE_COORDINATE = "src/test/resources/htsjdk/samtools/cram/ce.fa";
+    private static final String CRAM_FILE_UNSORTED = "src/test/resources/htsjdk/samtools/cram/xx#unsorted.3.0.cram";
+    private static final String CRAM_REFERENCE_UNSORTED = "src/test/resources/htsjdk/samtools/cram/xx.fa";
 
     @Test(expectedExceptions = IllegalStateException.class)
     public void testSortsDisagree() throws Exception {
@@ -93,6 +101,49 @@ public class SamReaderSortTest {
         }
     }
 
+    private CRAMFileReader getCramFileReader(String file, String fileReference) {
+        final ReferenceSource referenceSource = new ReferenceSource(new File(fileReference));
+        return new CRAMFileReader(new File(file), referenceSource);
+    }
+
+    @Test(dataProvider = "sortsCramWithoutIndex")
+    public void testCramSort(String file, String fileReference, SAMFileHeader.SortOrder order) throws Exception {
+        final CRAMFileReader cramFileReader = getCramFileReader(file, fileReference);
+        final SAMRecordIterator samRecordIterator = cramFileReader.getIterator().assertSorted(order);
+        Assert.assertTrue(samRecordIterator.hasNext());
+        while (samRecordIterator.hasNext()) {
+            Assert.assertNotNull(samRecordIterator.next());
+        }
+    }
+
+    @Test(dataProvider = "sortsFailCramWithoutIndex", expectedExceptions = IllegalStateException.class)
+    public void testCramSortFail(String file, String fileReference, SAMFileHeader.SortOrder order) throws Exception {
+        final CRAMFileReader cramFileReader = getCramFileReader(file, fileReference);
+        final SAMRecordIterator samRecordIterator = cramFileReader.getIterator().assertSorted(order);
+        Assert.assertTrue(samRecordIterator.hasNext());
+        while (samRecordIterator.hasNext()) {
+            Assert.assertNotNull(samRecordIterator.next());
+        }
+    }
+
+    @DataProvider(name = "sortsFailCramWithoutIndex")
+    public Object[][] getSortsFailCramWithoutIndex() {
+        return new Object[][]{
+                {CRAM_FILE, CRAM_REFERENCE, SAMFileHeader.SortOrder.coordinate},
+                {CRAM_FILE_COORDINATE, CRAM_REFERENCE_COORDINATE, SAMFileHeader.SortOrder.queryname},
+                {CRAM_FILE_UNSORTED, CRAM_REFERENCE_UNSORTED, SAMFileHeader.SortOrder.coordinate}
+        };
+    }
+
+    @DataProvider(name = "sortsCramWithoutIndex")
+    public Object[][] getSortsCramWithoutIndex() {
+        return new Object[][]{
+                {CRAM_FILE, CRAM_REFERENCE, SAMFileHeader.SortOrder.queryname},
+                {CRAM_FILE_COORDINATE, CRAM_REFERENCE_COORDINATE, SAMFileHeader.SortOrder.coordinate},
+                {CRAM_FILE_UNSORTED, CRAM_REFERENCE_UNSORTED, SAMFileHeader.SortOrder.unsorted}
+        };
+    }
+
     @DataProvider(name = "invalidSorts")
     public Object[][] getInvalidSorts() {
         return new Object[][]{
diff --git a/src/test/java/htsjdk/samtools/SamReaderTest.java b/src/test/java/htsjdk/samtools/SamReaderTest.java
index 093dffb..4d4d056 100644
--- a/src/test/java/htsjdk/samtools/SamReaderTest.java
+++ b/src/test/java/htsjdk/samtools/SamReaderTest.java
@@ -23,7 +23,7 @@
  */
 package htsjdk.samtools;
 
-import htsjdk.samtools.cram.CRAMException;
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloseableIterator;
 import htsjdk.samtools.util.CloserUtil;
 import org.testng.Assert;
@@ -32,7 +32,7 @@ import org.testng.annotations.Test;
 
 import java.io.File;
 
-public class SamReaderTest {
+public class SamReaderTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
     @Test(dataProvider = "variousFormatReaderTestCases")
@@ -104,11 +104,13 @@ public class SamReaderTest {
         int samRecordsCreated;
         int bamRecordsCreated;
 
+        @Override
         public SAMRecord createSAMRecord(final SAMFileHeader header) {
             ++samRecordsCreated;
             return super.createSAMRecord(header);
         }
 
+        @Override
         public BAMRecord createBAMRecord(final SAMFileHeader header, final int referenceSequenceIndex, final int alignmentStart, final short readNameLength, final short mappingQuality, final int indexingBin, final int cigarLen, final int flags, final int readLen, final int mateReferenceSequenceIndex, final int mateAlignmentStart, final int insertSize, final byte[] variableLengthBlock) {
             ++bamRecordsCreated;
             return super.createBAMRecord(header, referenceSequenceIndex, alignmentStart, readNameLength, mappingQuality, indexingBin, cigarLen, flags, readLen, mateReferenceSequenceIndex, mateAlignmentStart, insertSize, variableLengthBlock);
diff --git a/src/test/java/htsjdk/samtools/SamSpecIntTest.java b/src/test/java/htsjdk/samtools/SamSpecIntTest.java
index 8305065..2ebc24e 100644
--- a/src/test/java/htsjdk/samtools/SamSpecIntTest.java
+++ b/src/test/java/htsjdk/samtools/SamSpecIntTest.java
@@ -24,6 +24,7 @@
 
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloserUtil;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -33,7 +34,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-public class SamSpecIntTest {
+public class SamSpecIntTest extends HtsjdkTest {
     private static final File SAM_INPUT = new File("src/test/resources/htsjdk/samtools/inttest.sam");
     private static final File BAM_INPUT = new File("src/test/resources/htsjdk/samtools/inttest.bam");
 
diff --git a/src/test/java/htsjdk/samtools/SamStreamsTest.java b/src/test/java/htsjdk/samtools/SamStreamsTest.java
index c92d6db..48a074a 100644
--- a/src/test/java/htsjdk/samtools/SamStreamsTest.java
+++ b/src/test/java/htsjdk/samtools/SamStreamsTest.java
@@ -24,6 +24,7 @@
 
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.seekablestream.SeekableFileStream;
 import htsjdk.samtools.seekablestream.SeekableStream;
 import htsjdk.samtools.seekablestream.SeekableStreamFactory;
@@ -34,7 +35,7 @@ import org.testng.annotations.Test;
 import java.io.*;
 import java.net.URL;
 
-public class SamStreamsTest {
+public class SamStreamsTest extends HtsjdkTest {
 
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
@@ -121,4 +122,4 @@ public class SamStreamsTest {
                 SeekableStreamFactory.getInstance().getStreamFor(new URL(resourceName));
         Assert.assertEquals(SamStreams.sourceLikeBam(strm), expected);
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/SeekableByteChannelFromBuffer.java b/src/test/java/htsjdk/samtools/SeekableByteChannelFromBuffer.java
new file mode 100644
index 0000000..63b0369
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/SeekableByteChannelFromBuffer.java
@@ -0,0 +1,85 @@
+package htsjdk.samtools;
+
+import java.io.IOException;
+import java.nio.Buffer;
+import java.nio.ByteBuffer;
+import java.nio.channels.ClosedChannelException;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.StandardOpenOption;
+
+/**
+ * A buffer-backed SeekableByteChannel, for testing.
+ */
+public class SeekableByteChannelFromBuffer implements SeekableByteChannel {
+
+  private ByteBuffer buf;
+  private boolean open = true;
+
+  public SeekableByteChannelFromBuffer(ByteBuffer buf) {
+    this.buf = buf;
+  }
+
+  @Override
+  public int read(ByteBuffer dst) throws IOException {
+    if (buf.position() == buf.limit()) {
+      // signal EOF
+      return -1;
+    }
+    int before = dst.position();
+    dst.put(buf);
+    return dst.position() - before;
+  }
+
+  @Override
+  public int write(ByteBuffer src) throws IOException {
+    throw new IOException("read-only channel");
+  }
+
+  @Override
+  public long position() throws IOException {
+    checkOpen();
+    return buf.position();
+  }
+
+  @Override
+  public SeekableByteChannel position(long newPosition) throws IOException {
+    checkOpen();
+    buf.position((int)newPosition);
+    return this;
+  }
+
+  @Override
+  public long size() throws IOException {
+    checkOpen();
+    return buf.limit();
+  }
+
+  @Override
+  public SeekableByteChannel truncate(long size) throws IOException {
+    checkOpen();
+    if (size <0) {
+      throw new IllegalArgumentException("negative size");
+    }
+    if (size > buf.limit()) {
+      throw new IllegalArgumentException("size larger than current");
+    }
+    buf.limit((int)size);
+    return null;
+  }
+
+  @Override
+  public boolean isOpen() {
+    return open;
+  }
+
+  @Override
+  public void close() throws IOException {
+    open = false;
+  }
+
+  private void checkOpen() throws IOException {
+    if (!open) {
+      throw new ClosedChannelException();
+    }
+  }
+}
diff --git a/src/test/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java b/src/test/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java
index 2c3a95c..01999c4 100644
--- a/src/test/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java
+++ b/src/test/java/htsjdk/samtools/SequenceNameTruncationAndValidationTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.CloserUtil;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
@@ -36,7 +37,7 @@ import java.io.File;
  *
  * @author alecw at broadinstitute.org
  */
-public class SequenceNameTruncationAndValidationTest {
+public class SequenceNameTruncationAndValidationTest extends HtsjdkTest {
     private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
     @Test(expectedExceptions = {SAMException.class}, dataProvider = "badSequenceNames")
diff --git a/src/test/java/htsjdk/samtools/ValidateSamFileTest.java b/src/test/java/htsjdk/samtools/ValidateSamFileTest.java
index 4ce0b7a..8aac6e2 100644
--- a/src/test/java/htsjdk/samtools/ValidateSamFileTest.java
+++ b/src/test/java/htsjdk/samtools/ValidateSamFileTest.java
@@ -24,9 +24,11 @@
 
 package htsjdk.samtools;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.BamIndexValidator.IndexValidationStringency;
 import htsjdk.samtools.metrics.MetricBase;
 import htsjdk.samtools.metrics.MetricsFile;
+import htsjdk.samtools.reference.FastaSequenceFile;
 import htsjdk.samtools.reference.ReferenceSequence;
 import htsjdk.samtools.reference.ReferenceSequenceFile;
 import htsjdk.samtools.util.CloserUtil;
@@ -57,7 +59,7 @@ import java.util.Iterator;
  *
  * @author Doug Voet
  */
-public class ValidateSamFileTest {
+public class ValidateSamFileTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/ValidateSamFileTest");
     private static final int TERMINATION_GZIP_BLOCK_SIZE = 28;
     private static final int RANDOM_NUMBER_TRUNC_BYTE = 128;
@@ -70,6 +72,20 @@ public class ValidateSamFileTest {
     }
 
     @Test
+    public void testValidCRAMFileWithoutSeqDict() throws Exception {
+        final File reference = new File(TEST_DATA_DIR, "nm_tag_validation.fa");
+        final SamReader samReader = SamReaderFactory
+                .makeDefault()
+                .validationStringency(ValidationStringency.SILENT)
+                .referenceSequence(reference)
+                .open(new File(TEST_DATA_DIR, "nm_tag_validation.cram"));
+        final Histogram<String> results = executeValidation(samReader,
+                new FastaSequenceFile(reference, true),
+                IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertTrue(!results.isEmpty());
+    }
+
+    @Test
     public void testSamFileVersion1pt5() throws Exception {
         final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, "test_samfile_version_1pt5.bam"));
         final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
@@ -103,7 +119,9 @@ public class ValidateSamFileTest {
         validator.validateSamFileVerbose(samBuilder.getSamReader(), null);
 
         final int lineCount = results.toString().split("\n").length;
-        Assert.assertEquals(lineCount, 11);
+        Assert.assertEquals(lineCount, 11); // 1 extra message added to indicate maximum number of errors
+        Assert.assertEquals(validator.getNumErrors(), 6);
+        Assert.assertEquals(validator.getNumWarnings(), 4);
     }
 
     @Test
@@ -129,6 +147,7 @@ public class ValidateSamFileTest {
         Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_FIRST_OF_PAIR.getHistogramString()).getValue(), 1.0);
         Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_SECOND_OF_PAIR.getHistogramString()).getValue(), 1.0);
         Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_MATE_REF_INDEX.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_UNPAIRED_MATE_REFERENCE.getHistogramString()).getValue(), 1.0);
     }
 
     @Test
@@ -155,6 +174,7 @@ public class ValidateSamFileTest {
         Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_FLAG_MATE_UNMAPPED.getHistogramString()).getValue(), 1.0);
         Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_MATE_ALIGNMENT_START.getHistogramString()).getValue(), 2.0);
         Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_MATE_REF_INDEX.getHistogramString()).getValue(), 2.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_UNALIGNED_MATE_START.getHistogramString()).getValue(), 1.0);
     }
 
     @Test(dataProvider = "missingMateTestCases")
@@ -214,6 +234,7 @@ public class ValidateSamFileTest {
         Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_CIGAR.getHistogramString()).getValue(), 1.0);
         Assert.assertEquals(results.get(SAMValidationError.Type.INVALID_FLAG_READ_UNMAPPED.getHistogramString()).getValue(), 1.0);
         Assert.assertEquals(results.get(SAMValidationError.Type.MISSING_TAG_NM.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_CIGAR_SEQ_LENGTH.getHistogramString()).getValue(), 1.0);
     }
 
     @Test
@@ -237,26 +258,32 @@ public class ValidateSamFileTest {
         final Histogram<String> results = executeValidation(samBuilder.getSamReader(), new ReferenceSequenceFile() {
             private int index = 0;
 
+            @Override
             public SAMSequenceDictionary getSequenceDictionary() {
                 return null;
             }
 
+            @Override
             public ReferenceSequence nextSequence() {
                 final byte[] bases = new byte[10000];
                 Arrays.fill(bases, (byte) 'A');
                 return new ReferenceSequence("foo", index++, bases);
             }
 
+            @Override
             public void reset() {
                 this.index = 0;
             }
 
+            @Override
             public boolean isIndexed() { return false; }
 
+            @Override
             public ReferenceSequence getSequence(final String contig) {
                 throw new UnsupportedOperationException();
             }
 
+            @Override
             public ReferenceSequence getSubsequenceAt(final String contig, final long start, final long stop) {
                 throw new UnsupportedOperationException();
             }
@@ -276,11 +303,10 @@ public class ValidateSamFileTest {
             throws Exception {
         final SamReader reader = SamReaderFactory.makeDefault().open(new File(TEST_DATA_DIR, inputFile));
         final Histogram<String> results = executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertNotNull(results.get(expectedError.getHistogramString()));
-        Assert.assertEquals(results.get(expectedError.getHistogramString()).getValue(), 1.0);
+        Assert.assertNotNull(results.get(expectedError.getHistogramString()), scenario);
+        Assert.assertEquals(results.get(expectedError.getHistogramString()).getValue(), 1.0, scenario);
     }
 
-
     @DataProvider(name = "testMateCigarScenarios")
     public Object[][] testMateCigarScenarios() {
         return new Object[][]{
@@ -294,8 +320,8 @@ public class ValidateSamFileTest {
             throws Exception {
         final SamReader reader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT).open(new File(TEST_DATA_DIR, inputFile));
         final Histogram<String> results = executeValidation(reader, null, IndexValidationStringency.EXHAUSTIVE);
-        Assert.assertNotNull(results.get(expectedError.getHistogramString()));
-        Assert.assertEquals(results.get(expectedError.getHistogramString()).getValue(), 1.0);
+        Assert.assertNotNull(results.get(expectedError.getHistogramString()), scenario);
+        Assert.assertEquals(results.get(expectedError.getHistogramString()).getValue(), 1.0, scenario);
     }
 
     @DataProvider(name = "testTruncatedScenarios")
@@ -376,9 +402,20 @@ public class ValidateSamFileTest {
     public void testHeaderValidation() throws Exception {
         final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
                 .open(new File(TEST_DATA_DIR, "buggyHeader.sam"));
-        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        final File referenceFile = new File(TEST_DATA_DIR, "../hg19mini.fasta");
+        final ReferenceSequenceFile reference = new FastaSequenceFile(referenceFile, false);
+        final Histogram<String> results = executeValidation(samReader, reference, IndexValidationStringency.EXHAUSTIVE);
         Assert.assertEquals(results.get(SAMValidationError.Type.UNRECOGNIZED_HEADER_TYPE.getHistogramString()).getValue(), 3.0);
         Assert.assertEquals(results.get(SAMValidationError.Type.HEADER_TAG_MULTIPLY_DEFINED.getHistogramString()).getValue(), 1.0);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_FILE_SEQ_DICT.getHistogramString()).getValue(), 1.0);
+    }
+
+    @Test
+    public void testSeqQualMismatch() throws Exception {
+        final SamReader samReader = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.SILENT)
+                .open(new File(TEST_DATA_DIR, "seq_qual_len_mismatch.sam"));
+        final Histogram<String> results = executeValidation(samReader, null, IndexValidationStringency.EXHAUSTIVE);
+        Assert.assertEquals(results.get(SAMValidationError.Type.MISMATCH_SEQ_QUAL_LENGTH.getHistogramString()).getValue(), 8.0);
     }
 
     @Test
@@ -475,10 +512,24 @@ public class ValidateSamFileTest {
                         "@RG\tID:0\tSM:Hi,Mom!\n" +
                         "E\t147\tchr1\t15\t255\t10M\t=\t2\t-30\tCAACAGAAGC\t)'.*.+2,))\tU2:Z:CAA";
 
+        final String SOTagCorrectlyProcessTestData =
+                "@HD\tVN:1.0\tSO:NOTKNOWN\n" +
+                        "@SQ\tSN:chr1\tLN:101\n" +
+                        "@RG\tID:0\tSM:Hi,Mom!\n" +
+                        "E\t147\tchr1\t15\t255\t10M\t=\t2\t-30\tCAACAGAAGC\t)'.*.+2,))\tU2:Z:CAA";
+
+        final String GOTagCorrectlyProcessTestData =
+                "@HD\tVN:1.0\tGO:NOTKNOWN\n" +
+                        "@SQ\tSN:chr1\tLN:101\n" +
+                        "@RG\tID:0\tSM:Hi,Mom!\n" +
+                        "E\t147\tchr1\t15\t255\t10M\t=\t2\t-30\tCAACAGAAGC\t)'.*.+2,))\tU2:Z:CAA";
+
         return new Object[][]{
                 {E2TagCorrectlyProcessTestData.getBytes(), SAMValidationError.Type.E2_BASE_EQUALS_PRIMARY_BASE},
                 {E2TagCorrectlyProcessTestData.getBytes(), SAMValidationError.Type.MISMATCH_READ_LENGTH_AND_E2_LENGTH},
-                {U2TagCorrectlyProcessTestData.getBytes(), SAMValidationError.Type.MISMATCH_READ_LENGTH_AND_U2_LENGTH}
+                {U2TagCorrectlyProcessTestData.getBytes(), SAMValidationError.Type.MISMATCH_READ_LENGTH_AND_U2_LENGTH},
+                {SOTagCorrectlyProcessTestData.getBytes(), SAMValidationError.Type.HEADER_TAG_NON_CONFORMING_VALUE},
+                {GOTagCorrectlyProcessTestData.getBytes(), SAMValidationError.Type.HEADER_TAG_NON_CONFORMING_VALUE}
         };
     }
 
@@ -500,16 +551,18 @@ public class ValidateSamFileTest {
     @DataProvider(name = "validateBamFileTerminationData")
     public Object[][] validateBamFileTerminationData() throws IOException {
         return new Object[][]{
-                {getBrokenFile(TERMINATION_GZIP_BLOCK_SIZE), SAMValidationError.Type.BAM_FILE_MISSING_TERMINATOR_BLOCK},
-                {getBrokenFile(RANDOM_NUMBER_TRUNC_BYTE), SAMValidationError.Type.TRUNCATED_FILE}
+                {getBrokenFile(TERMINATION_GZIP_BLOCK_SIZE), SAMValidationError.Type.BAM_FILE_MISSING_TERMINATOR_BLOCK, 1, 0},
+                {getBrokenFile(RANDOM_NUMBER_TRUNC_BYTE), SAMValidationError.Type.TRUNCATED_FILE, 0, 1}
         };
     }
 
     @Test(dataProvider = "validateBamFileTerminationData")
-    public void validateBamFileTerminationTest(File file, SAMValidationError.Type errorType) throws IOException {
+    public void validateBamFileTerminationTest(final File file, final SAMValidationError.Type errorType, final int numWarnings, final int numErrors) throws IOException {
         final SamFileValidator samFileValidator = new SamFileValidator(new PrintWriter(System.out), 8000);
         samFileValidator.validateBamFileTermination(file);
         Assert.assertEquals(samFileValidator.getErrorsByType().get(errorType).getValue(), 1.0);
+        Assert.assertEquals(samFileValidator.getNumWarnings(), numWarnings);
+        Assert.assertEquals(samFileValidator.getNumErrors(), numErrors);
     }
 
     private Histogram<String> executeValidation(final SamReader samReader, final ReferenceSequenceFile reference,
diff --git a/src/test/java/htsjdk/samtools/cram/CRAIEntryTest.java b/src/test/java/htsjdk/samtools/cram/CRAIEntryTest.java
index 6cf4934..d43f2fc 100644
--- a/src/test/java/htsjdk/samtools/cram/CRAIEntryTest.java
+++ b/src/test/java/htsjdk/samtools/cram/CRAIEntryTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.structure.Container;
 import htsjdk.samtools.cram.structure.Slice;
 import org.testng.Assert;
@@ -12,7 +13,7 @@ import java.util.List;
 /**
  * Created by vadim on 25/08/2015.
  */
-public class CRAIEntryTest {
+public class CRAIEntryTest extends HtsjdkTest {
 
     @Test
     public void testFromContainer() {
diff --git a/src/test/java/htsjdk/samtools/cram/CRAIIndexTest.java b/src/test/java/htsjdk/samtools/cram/CRAIIndexTest.java
index 7ebdb75..9e48d6b 100644
--- a/src/test/java/htsjdk/samtools/cram/CRAIIndexTest.java
+++ b/src/test/java/htsjdk/samtools/cram/CRAIIndexTest.java
@@ -1,23 +1,14 @@
 package htsjdk.samtools.cram;
 
-import htsjdk.samtools.BAMFileSpan;
-import htsjdk.samtools.CRAMCRAIIndexer;
-import htsjdk.samtools.DiskBasedBAMFileIndex;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMSequenceDictionary;
-import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.*;
 import htsjdk.samtools.seekablestream.SeekableBufferedStream;
 import htsjdk.samtools.seekablestream.SeekableFileStream;
 import htsjdk.samtools.seekablestream.SeekableStream;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
+import java.io.*;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.function.BiFunction;
@@ -26,7 +17,7 @@ import java.util.zip.GZIPOutputStream;
 /**
  * Created by vadim on 25/08/2015.
  */
-public class CRAIIndexTest {
+public class CRAIIndexTest extends HtsjdkTest {
 
     @Test
     public void testFind() throws IOException, CloneNotSupportedException {
diff --git a/src/test/java/htsjdk/samtools/cram/LosslessRoundTripTest.java b/src/test/java/htsjdk/samtools/cram/LosslessRoundTripTest.java
index 67cd483..1ae8e14 100644
--- a/src/test/java/htsjdk/samtools/cram/LosslessRoundTripTest.java
+++ b/src/test/java/htsjdk/samtools/cram/LosslessRoundTripTest.java
@@ -1,31 +1,18 @@
 package htsjdk.samtools.cram;
 
-import htsjdk.samtools.CRAMFileReader;
-import htsjdk.samtools.CRAMFileWriter;
-import htsjdk.samtools.Cigar;
-import htsjdk.samtools.CigarElement;
-import htsjdk.samtools.CigarOperator;
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMReadGroupRecord;
-import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.SAMRecordIterator;
-import htsjdk.samtools.SAMSequenceRecord;
-import htsjdk.samtools.ValidationStringency;
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.*;
 import htsjdk.samtools.cram.ref.ReferenceSource;
 import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
+import java.io.*;
 
 /**
  * Created by vadim on 19/02/2016.
  */
-public class LosslessRoundTripTest {
+public class LosslessRoundTripTest extends HtsjdkTest {
     @Test
     public void test_MD_NM() throws IOException {
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
diff --git a/src/test/java/htsjdk/samtools/cram/VersionTest.java b/src/test/java/htsjdk/samtools/cram/VersionTest.java
index 0602eb3..be2851e 100644
--- a/src/test/java/htsjdk/samtools/cram/VersionTest.java
+++ b/src/test/java/htsjdk/samtools/cram/VersionTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.CRAMFileWriter;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
@@ -26,7 +27,7 @@ import java.util.zip.CRC32;
 /**
  * Created by vadim on 18/02/2016.
  */
-public class VersionTest {
+public class VersionTest extends HtsjdkTest {
     /**
      * The test purpose is to ensure that a CRAM written by {@link CRAMFileWriter} adheres to CRAM3 specs expectations:
      * 1. version 3.+, via both actual byte comparison and CramIO API
diff --git a/src/test/java/htsjdk/samtools/cram/build/CompressionHeaderFactoryTest.java b/src/test/java/htsjdk/samtools/cram/build/CompressionHeaderFactoryTest.java
index a3d91cd..8e39d9f 100644
--- a/src/test/java/htsjdk/samtools/cram/build/CompressionHeaderFactoryTest.java
+++ b/src/test/java/htsjdk/samtools/cram/build/CompressionHeaderFactoryTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.build;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.ValidationStringency;
 import htsjdk.samtools.cram.encoding.readfeatures.Substitution;
 import htsjdk.samtools.cram.structure.CompressionHeader;
@@ -17,7 +18,7 @@ import java.util.List;
 /**
  * Created by vadim on 07/01/2016.
  */
-public class CompressionHeaderFactoryTest {
+public class CompressionHeaderFactoryTest extends HtsjdkTest {
     @Test
     public void testAllEncodingsPresent() {
         final CompressionHeader header = new CompressionHeaderFactory().build(new ArrayList<>(), new SubstitutionMatrix(new long[256][256]), true);
diff --git a/src/test/java/htsjdk/samtools/cram/build/ContainerFactoryTest.java b/src/test/java/htsjdk/samtools/cram/build/ContainerFactoryTest.java
index cb004a7..cf4f91e 100644
--- a/src/test/java/htsjdk/samtools/cram/build/ContainerFactoryTest.java
+++ b/src/test/java/htsjdk/samtools/cram/build/ContainerFactoryTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.build;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMSequenceRecord;
@@ -19,7 +20,7 @@ import java.util.List;
 /**
  * Created by vadim on 15/12/2015.
  */
-public class ContainerFactoryTest {
+public class ContainerFactoryTest extends HtsjdkTest {
 
     @Test
     public void testUnmapped() throws IOException, IllegalAccessException {
diff --git a/src/test/java/htsjdk/samtools/cram/build/ContainerParserTest.java b/src/test/java/htsjdk/samtools/cram/build/ContainerParserTest.java
index fe25ce6..b16dc0f 100644
--- a/src/test/java/htsjdk/samtools/cram/build/ContainerParserTest.java
+++ b/src/test/java/htsjdk/samtools/cram/build/ContainerParserTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.build;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.ValidationStringency;
@@ -24,7 +25,7 @@ import java.util.Map;
 /**
  * Created by vadim on 11/01/2016.
  */
-public class ContainerParserTest {
+public class ContainerParserTest extends HtsjdkTest {
 
     @Test
     public void testEOF() throws IOException, IllegalAccessException {
diff --git a/src/test/java/htsjdk/samtools/cram/build/CramIOTest.java b/src/test/java/htsjdk/samtools/cram/build/CramIOTest.java
index 1035f24..bab50dc 100644
--- a/src/test/java/htsjdk/samtools/cram/build/CramIOTest.java
+++ b/src/test/java/htsjdk/samtools/cram/build/CramIOTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.build;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMSequenceRecord;
 import htsjdk.samtools.cram.common.CramVersions;
@@ -15,7 +16,7 @@ import java.io.IOException;
 /**
  * Created by vadim on 25/08/2015.
  */
-public class CramIOTest {
+public class CramIOTest extends HtsjdkTest {
     @Test
     public void testCheckHeaderAndEOF_v2() throws IOException {
         final String id = "testid";
diff --git a/src/test/java/htsjdk/samtools/cram/build/Sam2CramRecordFactoryTest.java b/src/test/java/htsjdk/samtools/cram/build/Sam2CramRecordFactoryTest.java
new file mode 100644
index 0000000..088f4f3
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/cram/build/Sam2CramRecordFactoryTest.java
@@ -0,0 +1,109 @@
+package htsjdk.samtools.cram.build;
+
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMUtils;
+import htsjdk.samtools.cram.common.CramVersions;
+import htsjdk.samtools.cram.encoding.readfeatures.ReadBase;
+import htsjdk.samtools.cram.encoding.readfeatures.ReadFeature;
+import htsjdk.samtools.cram.encoding.readfeatures.Substitution;
+import htsjdk.samtools.cram.structure.CramCompressionRecord;
+import htsjdk.samtools.util.SequenceUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Created by vadim on 06/06/2017.
+ */
+public class Sam2CramRecordFactoryTest {
+
+    /**
+     * This checks that all read bases returned in the record from {@link Sam2CramRecordFactory#createCramRecord(SAMRecord)}
+     * are from the BAM read base set.
+     */
+    @Test
+    public void testReadBaseNormalization() {
+        final SAMFileHeader header = new SAMFileHeader();
+
+        final SAMRecord record = new SAMRecord(header);
+        record.setReadName("test");
+        record.setReadUnmappedFlag(true);
+        record.setReadBases(SequenceUtil.getIUPACCodesString().getBytes());
+        record.setBaseQualities(SAMRecord.NULL_QUALS);
+
+        final Sam2CramRecordFactory sam2CramRecordFactory = new Sam2CramRecordFactory(null, header, CramVersions.CRAM_v3);
+        final CramCompressionRecord cramRecord = sam2CramRecordFactory.createCramRecord(record);
+
+        Assert.assertNotEquals(cramRecord.readBases, record.getReadBases());
+        Assert.assertEquals(cramRecord.readBases, SequenceUtil.toBamReadBasesInPlace(record.getReadBases()));
+    }
+
+    @DataProvider(name = "emptyFeatureListProvider")
+    public Object[][] testPositive() {
+        return new Object[][]{
+                // a matching base
+                {"A", "A", "!"},
+                // a matching ambiguity base
+                {"R", "R", "!"},
+        };
+    }
+
+    @Test(dataProvider = "emptyFeatureListProvider")
+    public void testAddMismatchReadFeaturesNoReadFeaturesForMatch(final String refBases, final String readBases, final String fastqScores) {
+        final List<ReadFeature> readFeatures = buildMatchOrMismatchReadFeatures(refBases, readBases, fastqScores);
+        Assert.assertTrue(readFeatures.isEmpty());
+    }
+
+    /**
+     * Test the outcome of a ACGTN mismatch.
+     * The result should always be a {@link Substitution} read feature.
+     */
+    @Test
+    public void testAddMismatchReadFeaturesSingleSubstitution() {
+        final List<ReadFeature> readFeatures = buildMatchOrMismatchReadFeatures("A", "C", "!");
+
+        Assert.assertEquals(1, readFeatures.size());
+
+        final ReadFeature rf = readFeatures.get(0);
+        Assert.assertTrue(rf instanceof Substitution);
+        final Substitution substitution = (Substitution) rf;
+        Assert.assertEquals(1, substitution.getPosition());
+        Assert.assertEquals('C', substitution.getBase());
+        Assert.assertEquals('A', substitution.getReferenceBase());
+    }
+
+    /**
+     * Test the outcome of non-ACGTN ref and read bases mismatching each other.
+     * The result should be explicit read base and score capture via {@link ReadBase}.
+     */
+    @Test
+    public void testAddMismatchReadFeaturesAmbiguityMismatch() {
+        final List<ReadFeature> readFeatures = buildMatchOrMismatchReadFeatures("R", "F", "1");
+        Assert.assertEquals(1, readFeatures.size());
+
+        final ReadFeature rf = readFeatures.get(0);
+        Assert.assertTrue(rf instanceof ReadBase);
+        final ReadBase readBaseFeature = (ReadBase) rf;
+        Assert.assertEquals(1, readBaseFeature.getPosition());
+        Assert.assertEquals('F', readBaseFeature.getBase());
+        Assert.assertEquals(SAMUtils.fastqToPhred('1'), readBaseFeature.getQualityScore());
+    }
+
+    private List<ReadFeature> buildMatchOrMismatchReadFeatures(final String refBases, final String readBases, final String scores) {
+        final SAMFileHeader header = new SAMFileHeader();
+        final CramCompressionRecord record = new CramCompressionRecord();
+        record.alignmentStart = 1;
+        final List<ReadFeature> readFeatures = new ArrayList<>();
+        final int fromPosInRead = 0;
+        final int alignmentStartOffset = 0;
+        final int nofReadBases = 1;
+
+        final Sam2CramRecordFactory sam2CramRecordFactory = new Sam2CramRecordFactory(refBases.getBytes(), header, CramVersions.CRAM_v3);
+        sam2CramRecordFactory.addMismatchReadFeatures(record.alignmentStart, readFeatures, fromPosInRead, alignmentStartOffset, nofReadBases, readBases.getBytes(), SAMUtils.fastqToPhred(scores));
+        return readFeatures;
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanTest.java b/src/test/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanTest.java
index f2ca2f2..fd24c6b 100644
--- a/src/test/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanTest.java
+++ b/src/test/java/htsjdk/samtools/cram/encoding/huffman/codec/HuffmanTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.encoding.huffman.codec;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.cram.io.DefaultBitInputStream;
 import htsjdk.samtools.cram.io.DefaultBitOutputStream;
 import htsjdk.samtools.cram.structure.ReadTag;
@@ -13,7 +14,7 @@ import java.io.IOException;
 /**
  * Created by vadim on 22/04/2015.
  */
-public class HuffmanTest {
+public class HuffmanTest extends HtsjdkTest {
     @Test
     public void testHuffmanIntHelper() throws IOException {
         int size = 1000000;
diff --git a/src/test/java/htsjdk/samtools/cram/encoding/rans/RansTest.java b/src/test/java/htsjdk/samtools/cram/encoding/rans/RansTest.java
index ca84686..8e05a12 100644
--- a/src/test/java/htsjdk/samtools/cram/encoding/rans/RansTest.java
+++ b/src/test/java/htsjdk/samtools/cram/encoding/rans/RansTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.encoding.rans;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -10,7 +11,7 @@ import java.util.Random;
 /**
  * Created by vadim on 22/04/2015.
  */
-public class RansTest {
+public class RansTest extends HtsjdkTest {
     @Test
     public void testEmpty() {
         roundTrip(new byte[0]);
diff --git a/src/test/java/htsjdk/samtools/cram/io/ITF8Test.java b/src/test/java/htsjdk/samtools/cram/io/ITF8Test.java
index 5d95d2c..a206ad1 100644
--- a/src/test/java/htsjdk/samtools/cram/io/ITF8Test.java
+++ b/src/test/java/htsjdk/samtools/cram/io/ITF8Test.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.io;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.Tuple;
 import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
@@ -16,7 +17,7 @@ import java.util.List;
 /**
  * Created by vadim on 03/02/2015.
  */
-public class ITF8Test {
+public class ITF8Test extends HtsjdkTest {
 
     private ExposedByteArrayOutputStream testBAOS;
     private ByteArrayInputStream testBAIS;
diff --git a/src/test/java/htsjdk/samtools/cram/io/LTF8Test.java b/src/test/java/htsjdk/samtools/cram/io/LTF8Test.java
index 5103797..03d310d 100644
--- a/src/test/java/htsjdk/samtools/cram/io/LTF8Test.java
+++ b/src/test/java/htsjdk/samtools/cram/io/LTF8Test.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.io;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.BeforeMethod;
@@ -14,7 +15,7 @@ import java.util.List;
 /**
  * Created by vadim on 03/02/2015.
  */
-public class LTF8Test {
+public class LTF8Test extends HtsjdkTest {
 
     private ExposedByteArrayOutputStream ltf8TestBAOS;
     private ByteArrayInputStream ltf8TestBAIS;
diff --git a/src/test/java/htsjdk/samtools/cram/lossy/QualityScorePreservationTest.java b/src/test/java/htsjdk/samtools/cram/lossy/QualityScorePreservationTest.java
index 34b4676..73859a4 100644
--- a/src/test/java/htsjdk/samtools/cram/lossy/QualityScorePreservationTest.java
+++ b/src/test/java/htsjdk/samtools/cram/lossy/QualityScorePreservationTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.cram.lossy;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SamInputResource;
@@ -23,7 +24,7 @@ import java.util.List;
 
 import static org.testng.Assert.*;
 
-public class QualityScorePreservationTest {
+public class QualityScorePreservationTest extends HtsjdkTest {
 
     @Test
     public void test1() {
@@ -96,12 +97,10 @@ public class QualityScorePreservationTest {
         }
     }
 
-    private SAMFileHeader samFileHeader = new SAMFileHeader();
-
     private SAMRecord buildSAMRecord(String seqName, String line) {
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         try {
-            baos.write("@HD\tVN:1.0\tGO:none SO:coordinate\n".getBytes());
+            baos.write("@HD\tVN:1.0\tGO:none\tSO:coordinate\n".getBytes());
             baos.write(("@SQ\tSN:" + seqName + "\tLN:247249719\n").getBytes());
             baos.write(line.replaceAll("\\s+", "\t").getBytes());
             baos.close();
@@ -120,7 +119,7 @@ public class QualityScorePreservationTest {
 
     @Test
     public void test3() {
-        String line1 = "98573 0 20 1 10 40M * 0 0 AAAAAAAAAA !!!!!!!!!!";
+        String line1 = "98573 0 20 1 10 10M * 0 0 AAAAAAAAAA !!!!!!!!!!";
         String seqName = "20";
 
         byte[] ref = new byte[40];
diff --git a/src/test/java/htsjdk/samtools/cram/ref/EnaRefServiceTest.java b/src/test/java/htsjdk/samtools/cram/ref/EnaRefServiceTest.java
index 852a513..7f53784 100644
--- a/src/test/java/htsjdk/samtools/cram/ref/EnaRefServiceTest.java
+++ b/src/test/java/htsjdk/samtools/cram/ref/EnaRefServiceTest.java
@@ -1,11 +1,12 @@
 package htsjdk.samtools.cram.ref;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.io.IOException;
 
-public class EnaRefServiceTest {
+public class EnaRefServiceTest extends HtsjdkTest {
 
     @Test
     public void test() throws IOException, EnaRefService.GaveUpException {
diff --git a/src/test/java/htsjdk/samtools/cram/ref/ReferenceSourceTest.java b/src/test/java/htsjdk/samtools/cram/ref/ReferenceSourceTest.java
new file mode 100644
index 0000000..34ae95b
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/cram/ref/ReferenceSourceTest.java
@@ -0,0 +1,33 @@
+package htsjdk.samtools.cram.ref;
+
+import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.samtools.reference.InMemoryReferenceSequenceFile;
+import htsjdk.samtools.util.SequenceUtil;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.util.Arrays;
+
+/**
+ * Created by vadim on 29/06/2017.
+ */
+public class ReferenceSourceTest {
+
+    @Test
+    public void testReferenceSourceUpperCasesBases() {
+        final String sequenceName = "1";
+        final String nonIupacCharacters = "1=eE";
+        final byte[] originalRefBases = (nonIupacCharacters + SequenceUtil.getIUPACCodesString()).getBytes();
+        SAMSequenceRecord sequenceRecord = new SAMSequenceRecord(sequenceName, originalRefBases.length);
+
+        InMemoryReferenceSequenceFile memoryReferenceSequenceFile = new InMemoryReferenceSequenceFile();
+        memoryReferenceSequenceFile.add(sequenceName, Arrays.copyOf(originalRefBases, originalRefBases.length));
+        Assert.assertEquals(memoryReferenceSequenceFile.getSequence(sequenceName).getBases(), originalRefBases);
+
+        ReferenceSource referenceSource = new ReferenceSource(memoryReferenceSequenceFile);
+        byte[] refBasesFromSource = referenceSource.getReferenceBases(sequenceRecord, false);
+
+        Assert.assertNotEquals(refBasesFromSource, originalRefBases);
+        Assert.assertEquals(refBasesFromSource, SequenceUtil.upperCase(originalRefBases));
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/cram/structure/CramCompressionRecordTest.java b/src/test/java/htsjdk/samtools/cram/structure/CramCompressionRecordTest.java
index 03360bd..a455476 100644
--- a/src/test/java/htsjdk/samtools/cram/structure/CramCompressionRecordTest.java
+++ b/src/test/java/htsjdk/samtools/cram/structure/CramCompressionRecordTest.java
@@ -1,11 +1,8 @@
 package htsjdk.samtools.cram.structure;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMRecord;
-import htsjdk.samtools.cram.encoding.readfeatures.Deletion;
-import htsjdk.samtools.cram.encoding.readfeatures.InsertBase;
-import htsjdk.samtools.cram.encoding.readfeatures.Insertion;
-import htsjdk.samtools.cram.encoding.readfeatures.ReadFeature;
-import htsjdk.samtools.cram.encoding.readfeatures.SoftClip;
+import htsjdk.samtools.cram.encoding.readfeatures.*;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -14,7 +11,7 @@ import java.util.ArrayList;
 /**
  * Created by vadim on 28/09/2015.
  */
-public class CramCompressionRecordTest {
+public class CramCompressionRecordTest extends HtsjdkTest {
     @Test
     public void test_getAlignmentEnd() {
         CramCompressionRecord r = new CramCompressionRecord();
diff --git a/src/test/java/htsjdk/samtools/cram/structure/ReadTagTest.java b/src/test/java/htsjdk/samtools/cram/structure/ReadTagTest.java
index 3ed0b40..314fd24 100644
--- a/src/test/java/htsjdk/samtools/cram/structure/ReadTagTest.java
+++ b/src/test/java/htsjdk/samtools/cram/structure/ReadTagTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.cram.structure;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.ValidationStringency;
@@ -31,14 +32,9 @@ import org.testng.annotations.Test;
 
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
-public class ReadTagTest {
+public class ReadTagTest extends HtsjdkTest {
 
     @Test
     public void test () {
diff --git a/src/test/java/htsjdk/samtools/cram/structure/SliceTests.java b/src/test/java/htsjdk/samtools/cram/structure/SliceTests.java
index c52dccb..eeb34ee 100644
--- a/src/test/java/htsjdk/samtools/cram/structure/SliceTests.java
+++ b/src/test/java/htsjdk/samtools/cram/structure/SliceTests.java
@@ -1,7 +1,7 @@
 package htsjdk.samtools.cram.structure;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.CRAMFileReader;
-import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.ValidationStringency;
 import htsjdk.samtools.cram.CRAMException;
@@ -17,7 +17,7 @@ import java.util.Iterator;
 /**
  * Created by vadim on 07/12/2015.
  */
-public class SliceTests {
+public class SliceTests extends HtsjdkTest {
     @Test
     public void testUnmappedValidateRef() {
         Slice slice = new Slice();
diff --git a/src/test/java/htsjdk/samtools/cram/structure/SubstitutionMatrixTest.java b/src/test/java/htsjdk/samtools/cram/structure/SubstitutionMatrixTest.java
index 31e7708..6251189 100644
--- a/src/test/java/htsjdk/samtools/cram/structure/SubstitutionMatrixTest.java
+++ b/src/test/java/htsjdk/samtools/cram/structure/SubstitutionMatrixTest.java
@@ -1,17 +1,15 @@
 package htsjdk.samtools.cram.structure;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.BeforeGroups;
 import org.testng.annotations.DataProvider;
-import org.testng.annotations.Parameters;
 import org.testng.annotations.Test;
 
-import java.util.Arrays;
-
 /**
  * Created by Vadim on 12/03/2015.
  */
-public class SubstitutionMatrixTest {
+public class SubstitutionMatrixTest extends HtsjdkTest {
 
     SubstitutionMatrix m;
     long[][] freqs;
diff --git a/src/test/java/htsjdk/samtools/fastq/FastqEncoderTest.java b/src/test/java/htsjdk/samtools/fastq/FastqEncoderTest.java
new file mode 100644
index 0000000..c367397
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/fastq/FastqEncoderTest.java
@@ -0,0 +1,76 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 Daniel Gomez-Sanchez
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.fastq;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMRecordSetBuilder;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+/**
+ * @author Daniel Gomez-Sanchez (magicDGS)
+ */
+public class FastqEncoderTest extends HtsjdkTest {
+
+    @Test
+    public void testAsFastqRecord() throws Exception {
+        final SAMRecord record = new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "10M", null, 2);
+        record.setReadPairedFlag(true);
+        // test first of pair encoding
+        record.setFirstOfPairFlag(true);
+        testRecord(record.getReadName() + FastqConstants.FIRST_OF_PAIR, FastqEncoder.asFastqRecord(record), record);
+        record.setFirstOfPairFlag(false);
+        record.setSecondOfPairFlag(true);
+        testRecord(record.getReadName() + FastqConstants.SECOND_OF_PAIR, FastqEncoder.asFastqRecord(record), record);
+        record.setSecondOfPairFlag(false);
+        testRecord(record.getReadName(), FastqEncoder.asFastqRecord(record), record);
+    }
+
+    private void testRecord(final String expectedReadName, final FastqRecord fastqRecord, final SAMRecord samRecord) {
+        Assert.assertEquals(fastqRecord.getReadName(), expectedReadName);
+        Assert.assertEquals(fastqRecord.getBaseQualities(), samRecord.getBaseQualities());
+        Assert.assertEquals(fastqRecord.getReadBases(), samRecord.getReadBases());
+        Assert.assertNull(fastqRecord.getBaseQualityHeader());
+    }
+
+    @Test
+    public void testAsSAMRecord() throws Exception {
+        // create a random record
+        final SAMRecord samRecord = new SAMRecordSetBuilder().addFrag("test", 0, 1, false, false, "10M", null, 2);
+        FastqRecord fastqRecord = new FastqRecord(samRecord.getReadName(), samRecord.getReadBases(), "", samRecord.getBaseQualities());
+        testConvertedSAMRecord(FastqEncoder.asSAMRecord(fastqRecord, samRecord.getHeader()), samRecord);
+        fastqRecord = new FastqRecord(samRecord.getReadName() + FastqConstants.FIRST_OF_PAIR, samRecord.getReadBases(), "", samRecord.getBaseQualities());
+        testConvertedSAMRecord(FastqEncoder.asSAMRecord(fastqRecord, samRecord.getHeader()), samRecord);
+        fastqRecord = new FastqRecord(samRecord.getReadName() + FastqConstants.SECOND_OF_PAIR, samRecord.getReadBases(), "", samRecord.getBaseQualities());
+        testConvertedSAMRecord(FastqEncoder.asSAMRecord(fastqRecord, samRecord.getHeader()), samRecord);
+    }
+
+    private void testConvertedSAMRecord(final SAMRecord converted, final SAMRecord original) {
+        Assert.assertEquals(converted.getReadName(), original.getReadName());
+        Assert.assertEquals(converted.getBaseQualities(), original.getBaseQualities());
+        Assert.assertEquals(converted.getReadBases(), original.getReadBases());
+        Assert.assertTrue(converted.getReadUnmappedFlag());
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/fastq/FastqRecordTest.java b/src/test/java/htsjdk/samtools/fastq/FastqRecordTest.java
index f6f238e..9a47a86 100644
--- a/src/test/java/htsjdk/samtools/fastq/FastqRecordTest.java
+++ b/src/test/java/htsjdk/samtools/fastq/FastqRecordTest.java
@@ -1,9 +1,13 @@
 package htsjdk.samtools.fastq;
 
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.util.TestUtil;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-public final class FastqRecordTest {
+import java.util.ArrayList;
+
+public final class FastqRecordTest extends HtsjdkTest {
 
     @Test
     public void testBasic() {
@@ -15,7 +19,7 @@ public final class FastqRecordTest {
 
         Assert.assertNull(fastqRecord.getBaseQualityHeader());
 
-        Assert.assertEquals(fastqRecord.getReadHeader(), seqHeaderPrefix);
+        Assert.assertEquals(fastqRecord.getReadName(), seqHeaderPrefix);
         Assert.assertEquals(fastqRecord.getBaseQualityString(), qualLine);
         Assert.assertEquals(fastqRecord.getReadString(), seqLine);
         Assert.assertNotNull(fastqRecord.toString());//just check not nullness
@@ -25,9 +29,9 @@ public final class FastqRecordTest {
         Assert.assertEquals(fastqRecord, fastqRecord);
         Assert.assertNotEquals(fastqRecord, "fred");
         Assert.assertNotEquals("fred", fastqRecord);
-        Assert.assertEquals(fastqRecord.length(), seqLine.length());
+        Assert.assertEquals(fastqRecord.getReadLength(), seqLine.length());
         Assert.assertEquals(fastqRecord.getBaseQualityString().length(), fastqRecord.getReadString().length());
-        Assert.assertEquals(fastqRecord.getReadString().length(), fastqRecord.length());
+        Assert.assertEquals(fastqRecord.getReadString().length(), fastqRecord.getReadLength());
     }
 
     @Test
@@ -37,7 +41,7 @@ public final class FastqRecordTest {
         final String qualHeaderPrefix = "";
         final String qualLine = ";<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
         final FastqRecord fastqRecord = new FastqRecord(seqHeaderPrefix, seqLine, qualHeaderPrefix, qualLine);
-        Assert.assertNull(fastqRecord.getReadHeader());
+        Assert.assertNull(fastqRecord.getReadName());
         Assert.assertNull(fastqRecord.getBaseQualityHeader());
     }
 
@@ -57,6 +61,11 @@ public final class FastqRecordTest {
         Assert.assertSame(fastqRecord.getBaseQualityHeader(), fastqRecordCopy.getBaseQualityHeader());
     }
 
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testNullCopy() {
+        new FastqRecord(null);
+    }
+
     @Test
     public void testNullSeq() {
         final String seqHeaderPrefix = "header";
@@ -201,4 +210,14 @@ public final class FastqRecordTest {
         new FastqRecord("header", seqLine1, "qualHeaderPrefix", qualLine1);
         //Note: this does not blow up now but it will once we enforce that seqLine and qualLine be the same length
     }
-}
\ No newline at end of file
+
+    @Test
+    public void testFastqSerialize() throws Exception {
+        final ArrayList<FastqRecord> records = new ArrayList<>();
+        records.add(new FastqRecord("q1", "ACGTACGT",     "", "########"));
+        records.add(new FastqRecord("q2", "CCAGCGTAATA",  "", "????????###"));
+        records.add(new FastqRecord("q3", "NNNNNNNNNNNN", "", "############"));
+
+        Assert.assertEquals(TestUtil.serializeAndDeserialize(records),records);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/fastq/FastqWriterTest.java b/src/test/java/htsjdk/samtools/fastq/FastqWriterTest.java
deleted file mode 100644
index eba5c5b..0000000
--- a/src/test/java/htsjdk/samtools/fastq/FastqWriterTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * The MIT License
- *
- * Pierre Lindenbaum PhD
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.fastq;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import htsjdk.samtools.util.TestUtil;
-
-import java.io.File;
-import java.util.ArrayList;
-
-/**
- * test fastq
- */
-public class FastqWriterTest {
-    private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/util/QualityEncodingDetectorTest");
-
-    @DataProvider(name = "fastqsource")
-    public Object[][] createTestData() {
-        return new Object[][]{
-                {"solexa_full_range_as_solexa.fastq"},
-                {"5k-30BB2AAXX.3.aligned.sam.fastq"}
-        };
-    }
-
-    @Test(dataProvider = "fastqsource")
-    public void testReadReadWriteFastq(final String basename) throws Exception {
-        final File tmpFile = File.createTempFile("test.", ".fastq");
-        tmpFile.deleteOnExit();
-        final FastqReader fastqReader = new FastqReader(new File(TEST_DATA_DIR,basename));
-        final FastqWriterFactory writerFactory = new FastqWriterFactory();
-        final FastqWriter fastqWriter = writerFactory.newWriter(tmpFile);
-        for(final FastqRecord rec: fastqReader) fastqWriter.write(rec);
-        fastqWriter.close();
-        fastqReader.close();
-    }
-    
-    @Test(dataProvider = "fastqsource")
-    public void testFastqSerialize(final String basename) throws Exception {
-        //write 
-        final ArrayList<FastqRecord> records = new ArrayList<>();
-        final FastqReader fastqReader = new FastqReader(new File(TEST_DATA_DIR,basename));
-        for(final FastqRecord rec: fastqReader) {
-            records.add(rec);
-            if(records.size()>100) break;
-        }
-        fastqReader.close();
-        Assert.assertEquals(TestUtil.serializeAndDeserialize(records),records);
-    }
-}
diff --git a/src/test/java/htsjdk/samtools/filter/FailsVendorReadQualityFilterTest.java b/src/test/java/htsjdk/samtools/filter/FailsVendorReadQualityFilterTest.java
index cb2cb05..ed83f09 100644
--- a/src/test/java/htsjdk/samtools/filter/FailsVendorReadQualityFilterTest.java
+++ b/src/test/java/htsjdk/samtools/filter/FailsVendorReadQualityFilterTest.java
@@ -23,13 +23,14 @@
  */
 package htsjdk.samtools.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMRecordSetBuilder;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-public class FailsVendorReadQualityFilterTest {
+public class FailsVendorReadQualityFilterTest extends HtsjdkTest {
 
     private final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
     private final FailsVendorReadQualityFilter filter = new FailsVendorReadQualityFilter();
diff --git a/src/test/java/htsjdk/samtools/filter/InsertSizeFilterTest.java b/src/test/java/htsjdk/samtools/filter/InsertSizeFilterTest.java
index fc4937d..48d8edc 100644
--- a/src/test/java/htsjdk/samtools/filter/InsertSizeFilterTest.java
+++ b/src/test/java/htsjdk/samtools/filter/InsertSizeFilterTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMRecordSetBuilder;
 import org.testng.Assert;
@@ -7,7 +8,7 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-public class InsertSizeFilterTest {
+public class InsertSizeFilterTest extends HtsjdkTest {
     private static final int READ_LENGTH = 20;
     private final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
 
diff --git a/src/test/java/htsjdk/samtools/filter/IntervalFilterTest.java b/src/test/java/htsjdk/samtools/filter/IntervalFilterTest.java
new file mode 100644
index 0000000..3107508
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/filter/IntervalFilterTest.java
@@ -0,0 +1,96 @@
+package htsjdk.samtools.filter;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.SAMFileHeader;
+import htsjdk.samtools.SAMRecord;
+import htsjdk.samtools.SAMRecordSetBuilder;
+import htsjdk.samtools.util.CollectionUtil;
+import htsjdk.samtools.util.Interval;
+import htsjdk.samtools.util.IntervalList;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.*;
+
+/**
+ * Created by farjoun on 5/27/17.
+ */
+public class IntervalFilterTest extends HtsjdkTest {
+
+
+    @DataProvider(name="testReadsData")
+    public Iterator<Object[]> testReadsData() {
+
+        final SAMFileHeader fileHeader;
+        final IntervalList list;
+
+        fileHeader = IntervalList.fromFile(new File("src/test/resources/htsjdk/samtools/intervallist/IntervalListchr123_empty.interval_list")).getHeader();
+        fileHeader.setSortOrder(SAMFileHeader.SortOrder.unsorted);
+
+        list = new IntervalList(fileHeader);
+
+        list.add(new Interval("1", 50, 150));   //de-facto 1:50-150 1:301-500      2:1-150 2:250-270 2:290-400
+        list.add(new Interval("1", 301, 500));
+        list.add(new Interval("2", 1, 150));
+        list.add(new Interval("2", 250, 270));
+        list.add(new Interval("2", 300, 299)); // empty, but located.
+
+        List<Object[]> tests = new ArrayList<>();
+
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+
+        int readLength = 36;
+        builder.setReadLength(readLength);
+
+        int total = 0;
+        int expected = 0;
+
+        builder.addPair("abutting" + total, 0, 50 - readLength, 151); //both abutting
+        total += 2;
+        tests.add(new Object[]{list, fileHeader, CollectionUtil.makeCollection(builder.iterator()), expected, total});
+
+        builder.addPair("intersecting" + total, 0, 50 - readLength + 1, 150); // both overlapping
+        total += 2;
+        expected += 2;
+        tests.add(new Object[]{list, fileHeader, CollectionUtil.makeCollection(builder.iterator()), expected, total});
+
+        builder.addPair("intersecting" + total, 0, 150, 200); // only the first
+        total += 2;
+        expected++;
+        tests.add(new Object[]{list, fileHeader, CollectionUtil.makeCollection(builder.iterator()), expected, total});
+
+        builder.addPair("intersecting" + total, 0, 1, 150); // only the second
+        total += 2;
+        expected++;
+        tests.add(new Object[]{list, fileHeader, CollectionUtil.makeCollection(builder.iterator()), expected, total});
+
+        builder.addFrag("intersecting_with_empty" + total, 1, 295, false); // intersects an empty interval
+        total += 1;
+        expected++;
+        tests.add(new Object[]{list, fileHeader, CollectionUtil.makeCollection(builder.iterator()), expected, total});
+
+        builder.addPair("clear" + total, 0, 200, 250); // not intersecting
+        total += 2;
+        tests.add(new Object[]{list, fileHeader, CollectionUtil.makeCollection(builder.iterator()), expected, total});
+
+        return tests.iterator();
+    }
+
+    @Test(dataProvider = "testReadsData")
+    public void testReads(final IntervalList list, final SAMFileHeader fileHeader, final Collection<SAMRecord> recordCollection,
+                          final int expectedPassing, final int expectedTotal ) {
+        IntervalFilter intervalFilter = new IntervalFilter(list.getIntervals(), fileHeader);
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(recordCollection.iterator(), intervalFilter);
+
+        // check that the total number of passing reads is the expected number
+        Assert.assertEquals(filteringSamIterator.stream()
+                // check that the reads that pass have the word "intersecting" in their name
+                .peek(s -> Assert.assertTrue(s.getReadName().contains("intersecting")))
+                .count(), expectedPassing);
+
+        //check that the total number of reads given in the Collection, is the expected number
+        Assert.assertEquals(recordCollection.size(), expectedTotal);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/filter/IntervalKeepPairFilterTest.java b/src/test/java/htsjdk/samtools/filter/IntervalKeepPairFilterTest.java
index 3d30255..7d3c23e 100644
--- a/src/test/java/htsjdk/samtools/filter/IntervalKeepPairFilterTest.java
+++ b/src/test/java/htsjdk/samtools/filter/IntervalKeepPairFilterTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMRecordSetBuilder;
 import htsjdk.samtools.util.CollectionUtil;
 import org.testng.Assert;
@@ -11,7 +12,7 @@ import java.util.List;
 import java.util.ArrayList;
 import java.util.stream.StreamSupport;
 
-public class IntervalKeepPairFilterTest {
+public class IntervalKeepPairFilterTest extends HtsjdkTest {
     private static final int READ_LENGTH = 151;
     private final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
 
diff --git a/src/test/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java b/src/test/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java
index 7835576..043f24d 100644
--- a/src/test/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java
+++ b/src/test/java/htsjdk/samtools/filter/JavascriptSamRecordFilterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMRecordIterator;
 import htsjdk.samtools.SamReader;
 import htsjdk.samtools.SamReaderFactory;
@@ -39,7 +40,7 @@ import java.io.IOException;
  * @author Pierre Lindenbaum PhD Institut du Thorax - INSERM - Nantes - France
  */
 
-public class JavascriptSamRecordFilterTest {
+public class JavascriptSamRecordFilterTest extends HtsjdkTest {
     final File testDir = new File("./src/test/resources/htsjdk/samtools");
 
     @DataProvider
diff --git a/src/test/java/htsjdk/samtools/filter/MappingQualityFilterTest.java b/src/test/java/htsjdk/samtools/filter/MappingQualityFilterTest.java
index 2bffcd6..9d9f7b8 100644
--- a/src/test/java/htsjdk/samtools/filter/MappingQualityFilterTest.java
+++ b/src/test/java/htsjdk/samtools/filter/MappingQualityFilterTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMRecordSetBuilder;
 import org.testng.Assert;
@@ -7,7 +8,7 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-public class MappingQualityFilterTest {
+public class MappingQualityFilterTest extends HtsjdkTest {
     private final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
 
     @BeforeTest
diff --git a/src/test/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilterTest.java b/src/test/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilterTest.java
new file mode 100644
index 0000000..953ac20
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/filter/NotPrimaryAlignmentFilterTest.java
@@ -0,0 +1,109 @@
+package htsjdk.samtools.filter;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.SAMRecordSetBuilder;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+/**
+ * Created by farjoun on 5/27/17.
+ */
+public class NotPrimaryAlignmentFilterTest extends HtsjdkTest {
+
+    @Test
+    public void testSecondaryRecords() {
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        int i = 0;
+        for (boolean record1Unmapped : new boolean[]{true, false}) {
+            for (boolean record2Unmapped : new boolean[]{true, false}) {
+                for (boolean record1Strand : new boolean[]{true, false}) {
+                    for (boolean record2Strand : new boolean[]{true, false}) {
+
+                        builder.addPair("pair" + i, 0, 10, 30,
+                                record1Unmapped, record2Unmapped,
+                                null, null,
+                                record1Strand, record2Strand,
+                                true, true,
+                                10);
+                        builder.addFrag("frag" + i++, 0, 10,
+                                record1Unmapped,
+                                record2Strand,
+                                null, null,
+                                10, true);
+                    }
+                }
+            }
+        }
+
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(builder.getRecords().iterator(),
+                new NotPrimaryAlignmentFilter());
+
+        Assert.assertEquals(filteringSamIterator.hasNext(), false);
+    }
+
+    @Test
+    public void testPrimaryRecords() {
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        int i = 0;
+        for (boolean record1Unmapped : new boolean[]{true, false}) {
+            for (boolean record2Unmapped : new boolean[]{true, false}) {
+                for (boolean record1Strand : new boolean[]{true, false}) {
+                    for (boolean record2Strand : new boolean[]{true, false}) {
+
+                        builder.addPair("pair" + i, 0, 10, 30,
+                                record1Unmapped, record2Unmapped,
+                                null, null,
+                                record1Strand, record2Strand,
+                                false, false,
+                                10);
+                        builder.addFrag("frag" + i++, 0, 10,
+                                record1Unmapped,
+                                record2Strand,
+                                null, null,
+                                10, false);
+                    }
+                }
+            }
+        }
+
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(builder.getRecords().iterator(),
+                new NotPrimaryAlignmentFilter());
+
+        // i is incremented once for each 3 records that are added (a pair and a fragment)
+        Assert.assertEquals(filteringSamIterator.stream().count(), i * 3);
+    }
+
+    @Test
+    public void testSupplementaryRecords() {
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        int i = 0;
+        for (boolean record1Unmapped : new boolean[]{true, false}) {
+            for (boolean record2Unmapped : new boolean[]{true, false}) {
+                for (boolean record1Strand : new boolean[]{true, false}) {
+                    for (boolean record2Strand : new boolean[]{true, false}) {
+
+                        builder.addPair("pair" + i, 0, 10, 30,
+                                record1Unmapped, record2Unmapped,
+                                null, null,
+                                record1Strand, record2Strand,
+                                false, false,
+                                10);
+                        builder.addFrag("frag" + i++, 0, 10,
+                                record1Unmapped,
+                                record2Strand,
+                                null, null,
+                                10, false);
+                    }
+                }
+            }
+        }
+        builder.forEach(r -> r.setSupplementaryAlignmentFlag(true));
+
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(builder.getRecords().iterator(),
+                new NotPrimaryAlignmentFilter());
+
+        // i is incremented once for each 3 records that are added (a pair and a fragment)
+        Assert.assertEquals(filteringSamIterator.stream().count(), i * 3);
+    }
+}
+
diff --git a/src/test/java/htsjdk/samtools/filter/OverclippedReadFilterTest.java b/src/test/java/htsjdk/samtools/filter/OverclippedReadFilterTest.java
index bff8491..e154e40 100644
--- a/src/test/java/htsjdk/samtools/filter/OverclippedReadFilterTest.java
+++ b/src/test/java/htsjdk/samtools/filter/OverclippedReadFilterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.Cigar;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMRecordSetBuilder;
@@ -31,7 +32,7 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-public class OverclippedReadFilterTest {
+public class OverclippedReadFilterTest extends HtsjdkTest {
     private final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
     private final int unclippedBasesThreshold = 30;
 
diff --git a/src/test/java/htsjdk/samtools/filter/ReadNameFilterTest.java b/src/test/java/htsjdk/samtools/filter/ReadNameFilterTest.java
new file mode 100644
index 0000000..e9fef69
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/filter/ReadNameFilterTest.java
@@ -0,0 +1,52 @@
+package htsjdk.samtools.filter;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.SAMRecordSetBuilder;
+import htsjdk.samtools.util.CollectionUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Created by farjoun on 5/27/17.
+ */
+public class ReadNameFilterTest extends HtsjdkTest {
+
+    final private static File TEST_DIR = new File("src/test/resources/htsjdk/samtools/filter");
+    final private static List<String> names = CollectionUtil.makeList("Read1_filter", "read3_stay", "Read2_filter", "read4_stay", "Hello_filter", "goodbye");
+
+    @Test
+    public void testFilterNames() {
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        names.forEach(builder::addUnmappedFragment);
+
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(builder.getRecords().iterator(),
+                new ReadNameFilter(new File(TEST_DIR, "names.txt"), true));
+
+        Assert.assertEquals(filteringSamIterator.stream()
+                .peek(s -> Assert.assertTrue(s.getReadName().contains("filter")))
+                .count(), 3);
+    }
+
+    @DataProvider(name = "TrueFalse")
+    public Object[][] TrueFalse() {
+        return new Object[][]{{true}, {false}};
+    }
+
+    @Test(dataProvider = "TrueFalse")
+    public void testFilterNamesEmptySetTrue(boolean include) {
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+
+        names.forEach(builder::addUnmappedFragment);
+
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(builder.getRecords().iterator(),
+                new ReadNameFilter(Collections.emptySet(), include));
+
+        Assert.assertEquals(filteringSamIterator.hasNext(), !include);
+        Assert.assertEquals(filteringSamIterator.stream().count(), include ? 0 : names.size());
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilterTest.java b/src/test/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilterTest.java
new file mode 100644
index 0000000..94a6f36
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/filter/SecondaryOrSupplementaryFilterTest.java
@@ -0,0 +1,107 @@
+package htsjdk.samtools.filter;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.SAMRecordSetBuilder;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import static org.testng.Assert.*;
+
+/**
+ * Created by farjoun on 5/27/17.
+ */
+public class SecondaryOrSupplementaryFilterTest extends HtsjdkTest {
+
+    @Test
+    public void testSecondaryRecords() {
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        int i = 0;
+        for (boolean record1Unmapped : new boolean[]{true, false}) {
+            for (boolean record2Unmapped : new boolean[]{true, false}) {
+                for (boolean record1Strand : new boolean[]{true, false}) {
+                    for (boolean record2Strand : new boolean[]{true, false}) {
+
+                        builder.addPair("pair" + i, 0, 10, 30,
+                                record1Unmapped, record2Unmapped,
+                                null, null,
+                                record1Strand, record2Strand,
+                                true, true,
+                                10);
+                        builder.addFrag("frag" + i++, 0, 10,
+                                record1Unmapped,
+                                record2Strand,
+                                null, null,
+                                10, true);
+                    }
+                }
+            }
+        }
+
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(builder.getRecords().iterator(),
+                new SecondaryOrSupplementaryFilter());
+
+        Assert.assertEquals(filteringSamIterator.hasNext(), false);
+    }
+
+    @Test
+    public void testSupplementaryRecords() {
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        int i = 0;
+        for (boolean record1Unmapped : new boolean[]{true, false}) {
+            for (boolean record2Unmapped : new boolean[]{true, false}) {
+                for (boolean record1Strand : new boolean[]{true, false}) {
+                    for (boolean record2Strand : new boolean[]{true, false}) {
+
+                        builder.addPair("pair" + i, 0, 10, 30,
+                                record1Unmapped, record2Unmapped,
+                                null, null,
+                                record1Strand, record2Strand,
+                                false, false,
+                                10).stream().forEach(r -> r.setSupplementaryAlignmentFlag(true));
+                        builder.addFrag("frag" + i++, 0, 10,
+                                record1Unmapped,
+                                record2Strand,
+                                null, null,
+                                10, false).setSupplementaryAlignmentFlag(true);
+                    }
+                }
+            }
+        }
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(builder.getRecords().iterator(),
+                new SecondaryOrSupplementaryFilter());
+
+        Assert.assertEquals(filteringSamIterator.hasNext(), false);
+    }
+
+    @Test
+    public void testPrimaryRecords() {
+        SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
+        int i = 0;
+        for (boolean record1Unmapped : new boolean[]{true, false}) {
+            for (boolean record2Unmapped : new boolean[]{true, false}) {
+                for (boolean record1Strand : new boolean[]{true, false}) {
+                    for (boolean record2Strand : new boolean[]{true, false}) {
+
+                        builder.addPair("pair" + i, 0, 10, 30,
+                                record1Unmapped, record2Unmapped,
+                                null, null,
+                                record1Strand, record2Strand,
+                                false, false,
+                                10);
+                        builder.addFrag("frag" + i++, 0, 10,
+                                record1Unmapped,
+                                record2Strand,
+                                null, null,
+                                10, false);
+                    }
+                }
+            }
+        }
+
+        FilteringSamIterator filteringSamIterator = new FilteringSamIterator(builder.getRecords().iterator(),
+                new SecondaryOrSupplementaryFilter());
+
+        // i is incremented once for each 3 records that are added (a pair and a fragment)
+        Assert.assertEquals(filteringSamIterator.stream().count(), i * 3);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/filter/SolexaNoiseFilterTest.java b/src/test/java/htsjdk/samtools/filter/SolexaNoiseFilterTest.java
index 96fa324..5ea20d4 100644
--- a/src/test/java/htsjdk/samtools/filter/SolexaNoiseFilterTest.java
+++ b/src/test/java/htsjdk/samtools/filter/SolexaNoiseFilterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMRecordSetBuilder;
 import org.testng.Assert;
@@ -32,7 +33,7 @@ import org.testng.annotations.Test;
 /**
  * Basic test for the SolexaNoiseFilter
  */
-public class SolexaNoiseFilterTest {
+public class SolexaNoiseFilterTest extends HtsjdkTest {
 
     private final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
     private final SolexaNoiseFilter filter = new SolexaNoiseFilter();
diff --git a/src/test/java/htsjdk/samtools/filter/TagFilterTest.java b/src/test/java/htsjdk/samtools/filter/TagFilterTest.java
index 6e0c702..d885cbe 100644
--- a/src/test/java/htsjdk/samtools/filter/TagFilterTest.java
+++ b/src/test/java/htsjdk/samtools/filter/TagFilterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.ReservedTagConstants;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMRecordSetBuilder;
@@ -36,7 +37,7 @@ import java.util.List;
 /**
  * Tests for the TagFilter class
  */
-public class TagFilterTest {
+public class TagFilterTest extends HtsjdkTest {
     private final SAMRecordSetBuilder builder = new SAMRecordSetBuilder();
 
 
@@ -74,4 +75,4 @@ public class TagFilterTest {
             {"Null value negative test", ReservedTagConstants.XN, Arrays.asList(1), null, false} 
         };
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/liftover/LiftOverTest.java b/src/test/java/htsjdk/samtools/liftover/LiftOverTest.java
index 8e9f92e..206dd68 100644
--- a/src/test/java/htsjdk/samtools/liftover/LiftOverTest.java
+++ b/src/test/java/htsjdk/samtools/liftover/LiftOverTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.liftover;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.Interval;
 import htsjdk.samtools.util.OverlapDetector;
 import org.testng.Assert;
@@ -40,7 +41,7 @@ import java.util.TreeMap;
 /**
  * @author alecw at broadinstitute.org
  */
-public class LiftOverTest {
+public class LiftOverTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/liftover");
     private static final File CHAIN_FILE = new File(TEST_DATA_DIR, "hg18ToHg19.over.chain");
 
diff --git a/src/test/java/htsjdk/samtools/metrics/MetricBaseTest.java b/src/test/java/htsjdk/samtools/metrics/MetricBaseTest.java
index cbbbbc3..5bd4ea4 100644
--- a/src/test/java/htsjdk/samtools/metrics/MetricBaseTest.java
+++ b/src/test/java/htsjdk/samtools/metrics/MetricBaseTest.java
@@ -1,10 +1,11 @@
 package htsjdk.samtools.metrics;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-public class MetricBaseTest {
+public class MetricBaseTest extends HtsjdkTest {
 
     private static class TestMetric extends MetricBase{
         public Object anyObject;
diff --git a/src/test/java/htsjdk/samtools/metrics/MetricsFileTest.java b/src/test/java/htsjdk/samtools/metrics/MetricsFileTest.java
index 228d87d..2d8bf6f 100644
--- a/src/test/java/htsjdk/samtools/metrics/MetricsFileTest.java
+++ b/src/test/java/htsjdk/samtools/metrics/MetricsFileTest.java
@@ -24,6 +24,7 @@
 
 package htsjdk.samtools.metrics;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMException;
 import htsjdk.samtools.util.FormatUtil;
 import htsjdk.samtools.util.Histogram;
@@ -45,7 +46,7 @@ import java.util.Date;
  *
  * @author Tim Fennell
  */
-public class MetricsFileTest {
+public class MetricsFileTest extends HtsjdkTest {
     public enum TestEnum {One, Two, Three}
 
     public static class TestMetric extends MetricBase implements Cloneable, Serializable {
diff --git a/src/test/java/htsjdk/samtools/metrics/StringHeaderTest.java b/src/test/java/htsjdk/samtools/metrics/StringHeaderTest.java
index 32f1322..ed0adf3 100644
--- a/src/test/java/htsjdk/samtools/metrics/StringHeaderTest.java
+++ b/src/test/java/htsjdk/samtools/metrics/StringHeaderTest.java
@@ -1,13 +1,14 @@
 package htsjdk.samtools.metrics;
 
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.TestUtil;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.io.IOException;
 
-public class StringHeaderTest {
+public class StringHeaderTest extends HtsjdkTest {
 
     @Test
     public void testStringHeaderSerialization() throws IOException, ClassNotFoundException {
@@ -16,4 +17,4 @@ public class StringHeaderTest {
         Assert.assertEquals(deserializedHeader, header);
     }
 
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/metrics/VersionHeaderTest.java b/src/test/java/htsjdk/samtools/metrics/VersionHeaderTest.java
index 576f8d1..97534d0 100644
--- a/src/test/java/htsjdk/samtools/metrics/VersionHeaderTest.java
+++ b/src/test/java/htsjdk/samtools/metrics/VersionHeaderTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.metrics;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.TestUtil;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -7,7 +8,7 @@ import org.testng.annotations.Test;
 import java.io.IOException;
 
 
-public class VersionHeaderTest {
+public class VersionHeaderTest extends HtsjdkTest {
 
     @Test
     public void testSerializeVersionHeader() throws IOException, ClassNotFoundException {
@@ -19,4 +20,4 @@ public class VersionHeaderTest {
 
     }
 
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/reference/FastaSequenceFileTest.java b/src/test/java/htsjdk/samtools/reference/FastaSequenceFileTest.java
index 87927f6..a7462db 100644
--- a/src/test/java/htsjdk/samtools/reference/FastaSequenceFileTest.java
+++ b/src/test/java/htsjdk/samtools/reference/FastaSequenceFileTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.reference;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.StringUtil;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -33,7 +34,7 @@ import java.io.PrintWriter;
 /**
  * @author alecw at broadinstitute.org
  */
-public class FastaSequenceFileTest {
+public class FastaSequenceFileTest extends HtsjdkTest {
     @Test
     public void testTrailingWhitespace() throws Exception {
         final File fasta = File.createTempFile("test", ".fasta");
diff --git a/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexCreatorTest.java b/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexCreatorTest.java
new file mode 100644
index 0000000..193770a
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexCreatorTest.java
@@ -0,0 +1,90 @@
+/*
+ * The MIT License (MIT)
+ *
+ * Copyright (c) 2017 Daniel Gomez-Sanchez
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package htsjdk.samtools.reference;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.util.IOUtil;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.nio.file.Files;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * @author Daniel Gomez-Sanchez (magicDGS)
+ */
+public class FastaSequenceIndexCreatorTest extends HtsjdkTest {
+    private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/reference");
+
+
+    @DataProvider(name = "indexedSequences")
+    public Object[][] getIndexedSequences() {
+        return new Object[][]{
+                {new File(TEST_DATA_DIR, "Homo_sapiens_assembly18.trimmed.fasta")},
+                {new File(TEST_DATA_DIR, "Homo_sapiens_assembly18.trimmed.fasta.gz")},
+                {new File(TEST_DATA_DIR, "header_with_white_space.fasta")},
+                {new File(TEST_DATA_DIR, "crlf.fasta")}
+        };
+    }
+
+    @Test(dataProvider = "indexedSequences")
+    public void testBuildFromFasta(final File indexedFile) throws Exception {
+        final FastaSequenceIndex original = new FastaSequenceIndex(new File(indexedFile.getAbsolutePath() + ".fai"));
+        final FastaSequenceIndex build = FastaSequenceIndexCreator.buildFromFasta(indexedFile.toPath());
+        Assert.assertEquals(original, build);
+    }
+
+    @Test(dataProvider = "indexedSequences")
+    public void testCreate(final File indexedFile) throws Exception {
+        // copy the file to index
+        final File tempDir = IOUtil.createTempDir("FastaSequenceIndexCreatorTest", "testCreate");
+        final File copied = new File(tempDir, indexedFile.getName());
+        copied.deleteOnExit();
+        Files.copy(indexedFile.toPath(), copied.toPath());
+
+        // create the index for the copied file
+        FastaSequenceIndexCreator.create(copied.toPath(), false);
+
+        // test if the expected .fai and the created one are the same
+        final File expectedFai = new File(indexedFile.getAbsolutePath() +  ".fai");
+        final File createdFai = new File(copied.getAbsolutePath() + ".fai");
+
+        // read all the files and compare line by line
+        try(final Stream<String> expected = Files.lines(expectedFai.toPath());
+                final Stream<String> created = Files.lines(createdFai.toPath())) {
+            final List<String> expectedLines = expected.filter(String::isEmpty).collect(Collectors.toList());
+            final List<String> createdLines = created.filter(String::isEmpty).collect(Collectors.toList());
+            Assert.assertEquals(expectedLines, createdLines);
+        }
+
+        // load the tmp index and check that both are the same
+        Assert.assertEquals(new FastaSequenceIndex(createdFai), new FastaSequenceIndex(expectedFai));
+    }
+
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java b/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java
index bfef121..c6fa138 100644
--- a/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java
+++ b/src/test/java/htsjdk/samtools/reference/FastaSequenceIndexTest.java
@@ -24,19 +24,26 @@
 
 package htsjdk.samtools.reference;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMException;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
+import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.nio.file.Files;
 import java.util.Iterator;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 
 /**
  * Test the fasta sequence index reader.
  */
-public class FastaSequenceIndexTest {
+public class FastaSequenceIndexTest extends HtsjdkTest {
     private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/reference");
 
     @DataProvider(name="homosapiens")
@@ -253,4 +260,30 @@ public class FastaSequenceIndexTest {
         Assert.assertEquals(ent.getBasesPerLine(),70,"Contig file:gi|17981852|ref|NC_001807.4| bases per line is not correct");
         Assert.assertEquals(ent.getBytesPerLine(),71,"Contig file:gi|17981852|ref|NC_001807.4| bytes per line is not correct");
     }
+
+    @Test
+    public void testWrite() throws Exception {
+        // gets the original file and index
+        final File originalFile = new File(TEST_DATA_DIR, "testing.fai");
+        final FastaSequenceIndex originalIndex = new FastaSequenceIndex(originalFile);
+
+        // write the index to a temp file and test if files are the same
+        final File fileToWrite = File.createTempFile("testing.toWrite", "fai");
+        fileToWrite.deleteOnExit();
+        originalIndex.write(fileToWrite.toPath());
+
+        // read all the files and compare line by line
+        try(final Stream<String> original = Files.lines(originalFile.toPath());
+            final Stream<String> written = Files.lines(fileToWrite.toPath())) {
+            final List<String> originalLines = original.filter(s -> ! s.isEmpty()).collect(Collectors.toList());
+            final List<String> actualLines = written.filter(s -> !s.isEmpty()).collect(Collectors.toList());
+            Assert.assertEquals(actualLines, originalLines);
+        }
+
+        // load the tmp index and check that both are the same
+        final FastaSequenceIndex writtenIndex = new FastaSequenceIndex(fileToWrite);
+        Assert.assertEquals(writtenIndex, originalIndex);
+    }
+
+
 }
diff --git a/src/test/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java b/src/test/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java
index 086b3be..d352a03 100644
--- a/src/test/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java
+++ b/src/test/java/htsjdk/samtools/reference/IndexedFastaSequenceFileTest.java
@@ -24,6 +24,7 @@
 
 package htsjdk.samtools.reference;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMException;
 import htsjdk.samtools.util.CloserUtil;
 import htsjdk.samtools.util.StringUtil;
@@ -37,7 +38,7 @@ import java.io.FileNotFoundException;
 /**
  * Test the indexed fasta sequence file reader.
  */
-public class IndexedFastaSequenceFileTest{
+public class IndexedFastaSequenceFileTest extends HtsjdkTest {
     private static File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/reference");
     private static File SEQUENCE_FILE = new File(TEST_DATA_DIR,"Homo_sapiens_assembly18.trimmed.fasta");
     private static File SEQUENCE_FILE_NODICT = new File(TEST_DATA_DIR,"Homo_sapiens_assembly18.trimmed.nodict.fasta");
diff --git a/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java
index 6eeae7b..5d827f8 100644
--- a/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java
+++ b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileFactoryTests.java
@@ -1,6 +1,8 @@
 package htsjdk.samtools.reference;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
+import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.File;
@@ -8,7 +10,7 @@ import java.io.File;
 /**
  * Simple tests for the reference sequence file factory
  */
-public class ReferenceSequenceFileFactoryTests {
+public class ReferenceSequenceFileFactoryTests extends HtsjdkTest {
     public static final File hg18 = new File("src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta");
 
     @Test public void testPositivePath() {
@@ -36,4 +38,21 @@ public class ReferenceSequenceFileFactoryTests {
         Assert.assertTrue(f instanceof IndexedFastaSequenceFile, "Got non-indexed reader by default.");
     }
 
+
+    @DataProvider
+    public Object[][] fastaNames() {
+        return new Object[][] {
+                {"break.fa", "break.dict"},
+                {"break.txt.txt", "break.txt.dict"},
+                {"break.fasta.fasta", "break.fasta.dict"},
+                {"break.fa.gz", "break.dict"},
+                {"break.txt.gz.txt.gz", "break.txt.gz.dict"},
+                {"break.fasta.gz.fasta.gz", "break.fasta.gz.dict"}
+        };
+    }
+
+    @Test(dataProvider = "fastaNames")
+    public void testGetDefaultDictionaryForReferenceSequence(final String fastaFile, final String expectedDict) throws Exception {
+        Assert.assertEquals(ReferenceSequenceFileFactory.getDefaultDictionaryForReferenceSequence(new File(fastaFile)), new File(expectedDict));
+    }
 }
diff --git a/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java
index fa746d6..e6299c3 100644
--- a/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java
+++ b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceFileWalkerTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.reference;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMException;
 import htsjdk.samtools.util.CloserUtil;
 import org.testng.Assert;
@@ -11,7 +12,7 @@ import java.io.File;
 /**
  * Created by farjoun on 2/14/14.
  */
-public class ReferenceSequenceFileWalkerTest {
+public class ReferenceSequenceFileWalkerTest extends HtsjdkTest {
 
 
     @DataProvider(name = "TestReference")
diff --git a/src/test/java/htsjdk/samtools/reference/ReferenceSequenceTests.java b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceTests.java
index 797b0b3..4bb922c 100644
--- a/src/test/java/htsjdk/samtools/reference/ReferenceSequenceTests.java
+++ b/src/test/java/htsjdk/samtools/reference/ReferenceSequenceTests.java
@@ -24,6 +24,7 @@
 
 package htsjdk.samtools.reference;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -38,7 +39,7 @@ import java.util.Random;
  *
  * @author Tim Fennell
  */
-public class ReferenceSequenceTests {
+public class ReferenceSequenceTests extends HtsjdkTest {
     private static final byte[] BASES = "acgtACGTN".getBytes();
     private final Random random = new Random();
 
diff --git a/src/test/java/htsjdk/samtools/seekablestream/ByteArraySeekableStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/ByteArraySeekableStreamTest.java
new file mode 100644
index 0000000..04a228f
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/seekablestream/ByteArraySeekableStreamTest.java
@@ -0,0 +1,116 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2017 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ */
+
+package htsjdk.samtools.seekablestream;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.IOException;
+
+/**
+ * Created by farjoun on 5/27/17.
+ */
+public class ByteArraySeekableStreamTest extends HtsjdkTest {
+    private final byte[] bytes = "ABCDE12345".getBytes();
+
+    @Test
+    public void testNormalBehavior() throws IOException {
+        ByteArraySeekableStream byteArraySeekableStream = new ByteArraySeekableStream(bytes);
+
+        Assert.assertEquals(byteArraySeekableStream.length(), 10);
+        for (int i = 0; i < 10; i++) {
+            Assert.assertFalse(byteArraySeekableStream.eof());
+            Assert.assertEquals(byteArraySeekableStream.position(), i);
+            Assert.assertEquals(byteArraySeekableStream.read(), bytes[i]);
+        }
+
+        Assert.assertTrue(byteArraySeekableStream.eof());
+        Assert.assertEquals(byteArraySeekableStream.position(), 10);
+        Assert.assertEquals(byteArraySeekableStream.read(), -1);
+
+        final long i = 0;
+        byteArraySeekableStream.seek(i);
+
+        Assert.assertEquals(byteArraySeekableStream.position(), i);
+        Assert.assertEquals(byteArraySeekableStream.read(), bytes[(int) i]);
+
+        byte[] copy = new byte[10];
+
+        Assert.assertEquals(byteArraySeekableStream.read(copy), 9);
+        Assert.assertEquals(byteArraySeekableStream.position(), 10);
+
+        byteArraySeekableStream.seek(0L);
+
+        Assert.assertEquals(byteArraySeekableStream.read(copy), 10);
+        Assert.assertEquals(byteArraySeekableStream.position(), 10);
+
+        Assert.assertEquals(copy, bytes);
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testCantSeekNegative() throws IOException {
+
+        ByteArraySeekableStream byteArraySeekableStream = new ByteArraySeekableStream(bytes);
+
+        byteArraySeekableStream.seek(-1L);
+
+        // if allowed to seek, this will throw OutOfBounds
+        final int f = byteArraySeekableStream.read();
+    }
+
+    @Test
+    public void testCantReadPostEof() throws IOException {
+
+        ByteArraySeekableStream byteArraySeekableStream = new ByteArraySeekableStream(bytes);
+        byte[] copy = new byte[10];
+
+        byteArraySeekableStream.seek(10);
+        Assert.assertEquals(byteArraySeekableStream.read(copy), -1);
+        Assert.assertEquals(byteArraySeekableStream.read(), -1);
+    }
+
+    @DataProvider(name = "abnormalReadRequests")
+    public Object[][] abnormalReadRequestsProvider() {
+        return new Object[][]{
+                {new byte[10], -1, 0},
+                {new byte[10], -1, -1},
+                {new byte[10], 0, -1},
+                {new byte[10], 0, -1},
+                {new byte[10], 0, 11},
+                {new byte[10], 6, 6},
+                {new byte[10], 11, 0},
+        };
+    }
+
+    @Test(dataProvider = "abnormalReadRequests", expectedExceptions = IndexOutOfBoundsException.class)
+    public void testAbnormalReadRequest(final byte[] b, final int off, final int length) throws IOException {
+
+        ByteArraySeekableStream byteArraySeekableStream = new ByteArraySeekableStream(bytes);
+        int i = byteArraySeekableStream.read(b, off, length);
+
+        Assert.assertEquals(i, -2); ///impossible
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java
index 9720218..8de5873 100644
--- a/src/test/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableBufferedStreamTest.java
@@ -24,6 +24,7 @@
 
 package htsjdk.samtools.seekablestream;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -33,7 +34,7 @@ import java.net.URL;
 
 import static org.testng.Assert.assertEquals;
 
-public class SeekableBufferedStreamTest {
+public class SeekableBufferedStreamTest extends HtsjdkTest {
 
 //    private final File BAM_INDEX_FILE = new File("testdata/htsjdk/samtools/BAMFileIndexTest/index_test.bam.bai");
     private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableFTPStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableFTPStreamTest.java
index 966c18e..4dfc8b7 100644
--- a/src/test/java/htsjdk/samtools/seekablestream/SeekableFTPStreamTest.java
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableFTPStreamTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.seekablestream;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.AfterMethod;
 import org.testng.annotations.BeforeMethod;
@@ -35,7 +36,7 @@ import java.net.URL;
  * @author Jim Robinson
  * @since 10/3/11
  */
-public class SeekableFTPStreamTest {
+public class SeekableFTPStreamTest extends HtsjdkTest {
 
 
     static String urlString = "ftp://ftp.broadinstitute.org/pub/igv/TEST/test.txt";
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java
index 35e1545..ddf54ef 100644
--- a/src/test/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableFileStreamTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.seekablestream;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.BufferedLineReader;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -36,7 +37,7 @@ import java.io.File;
  * Time: 11:13:19 AM
  * To change this template use File | Settings | File Templates.
  */
-public class SeekableFileStreamTest {
+public class SeekableFileStreamTest extends HtsjdkTest {
 
     @Test
     public void testSeek() throws Exception {
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableMemoryStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableMemoryStreamTest.java
index 09ad92d..428090c 100644
--- a/src/test/java/htsjdk/samtools/seekablestream/SeekableMemoryStreamTest.java
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableMemoryStreamTest.java
@@ -24,13 +24,14 @@
 
 package htsjdk.samtools.seekablestream;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.io.EOFException;
 import java.io.IOException;
 
-public class SeekableMemoryStreamTest {
+public class SeekableMemoryStreamTest extends HtsjdkTest {
 
     @Test
     public void test_getSource() {
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekablePathStreamTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekablePathStreamTest.java
index 067f5be..8b1f151 100644
--- a/src/test/java/htsjdk/samtools/seekablestream/SeekablePathStreamTest.java
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekablePathStreamTest.java
@@ -26,10 +26,12 @@ package htsjdk.samtools.seekablestream;
 import java.io.File;
 import java.nio.file.Files;
 import java.nio.file.Path;
+
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-public class SeekablePathStreamTest {
+public class SeekablePathStreamTest extends HtsjdkTest {
 
     @Test
     public void testRead() throws Exception {
diff --git a/src/test/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java b/src/test/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java
index 5eb0af6..82f8f1c 100644
--- a/src/test/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java
+++ b/src/test/java/htsjdk/samtools/seekablestream/SeekableStreamFactoryTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.seekablestream;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.TestUtil;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
@@ -9,7 +10,7 @@ import java.io.File;
 import java.io.IOException;
 import java.net.URL;
 
-public class SeekableStreamFactoryTest {
+public class SeekableStreamFactoryTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools");
 
     @Test
diff --git a/src/test/java/htsjdk/samtools/sra/AbstractSRATest.java b/src/test/java/htsjdk/samtools/sra/AbstractSRATest.java
index 297b892..eeba1d2 100644
--- a/src/test/java/htsjdk/samtools/sra/AbstractSRATest.java
+++ b/src/test/java/htsjdk/samtools/sra/AbstractSRATest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.sra;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMRecordIterator;
 import org.testng.Assert;
@@ -12,7 +13,7 @@ import java.lang.reflect.Method;
 import java.util.NoSuchElementException;
 
 @Test(groups = "sra")
-public abstract class AbstractSRATest {
+public abstract class AbstractSRATest extends HtsjdkTest {
     private static boolean canResolveNetworkAccession = false;
     private static String checkAccession = "SRR000123";
 
@@ -24,14 +25,14 @@ public abstract class AbstractSRATest {
         canResolveNetworkAccession = SRAAccession.isValid(checkAccession);
     }
 
-    @BeforeMethod
+    @BeforeMethod(groups = "sra")
     public final void assertSRAIsSupported() {
         if(SRAAccession.checkIfInitialized() != null){
             throw new SkipException("Skipping SRA Test because SRA native code is unavailable.");
         }
     }
 
-    @BeforeMethod
+    @BeforeMethod(groups = "sra")
     public final void skipIfCantResolve(Method method, Object[] params) {
         String accession = null;
 
diff --git a/src/test/java/htsjdk/samtools/util/AbstractLocusInfoTest.java b/src/test/java/htsjdk/samtools/util/AbstractLocusInfoTest.java
index a21c743..7a0d4be 100644
--- a/src/test/java/htsjdk/samtools/util/AbstractLocusInfoTest.java
+++ b/src/test/java/htsjdk/samtools/util/AbstractLocusInfoTest.java
@@ -24,19 +24,20 @@
 
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
 import htsjdk.samtools.SAMSequenceRecord;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
-import static org.junit.Assert.assertEquals;
+import static org.testng.Assert.assertEquals;
 
 /**
  * @author Mariia_Zueva at epam.com, EPAM Systems, Inc. <www.epam.com>
  */
 
-public class AbstractLocusInfoTest {
+public class AbstractLocusInfoTest extends HtsjdkTest {
     private final byte[] qualities = {30, 50, 50, 60, 60, 70, 70, 70, 80, 90, 30, 50, 50, 60, 60, 70, 70, 70, 80, 90};
     private byte[] bases = {'A', 'C', 'G', 'T', 'A', 'C', 'G', 'T', 'T', 'C', 'A', 'C', 'G', 'T', 'A', 'C', 'G', 'T', 'T', 'C'};
     private EdgingRecordAndOffset typedRecordAndOffset;
diff --git a/src/test/java/htsjdk/samtools/util/AbstractLocusIteratorTestTemplate.java b/src/test/java/htsjdk/samtools/util/AbstractLocusIteratorTestTemplate.java
index 0c08436..d1e2f0f 100644
--- a/src/test/java/htsjdk/samtools/util/AbstractLocusIteratorTestTemplate.java
+++ b/src/test/java/htsjdk/samtools/util/AbstractLocusIteratorTestTemplate.java
@@ -25,6 +25,7 @@
 package htsjdk.samtools.util;
 
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecordSetBuilder;
 import htsjdk.samtools.SAMSequenceDictionary;
@@ -36,7 +37,7 @@ import htsjdk.samtools.SAMSequenceRecord;
  * @author Mariia_Zueva at epam.com, EPAM Systems, Inc. <www.epam.com>
  * 
  */
-public abstract class AbstractLocusIteratorTestTemplate {
+public abstract class AbstractLocusIteratorTestTemplate extends HtsjdkTest {
 
     /** Coverage for tests with the same reads */
     final static int coverage = 2;
@@ -65,4 +66,4 @@ public abstract class AbstractLocusIteratorTestTemplate {
     public abstract void testEmitUncoveredLoci();
     public abstract void testSimpleGappedAlignment();
     public abstract void testOverlappingGappedAlignmentsWithoutIndels();
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/util/AbstractRecordAndOffsetTest.java b/src/test/java/htsjdk/samtools/util/AbstractRecordAndOffsetTest.java
index 568c84c..0e0845f 100644
--- a/src/test/java/htsjdk/samtools/util/AbstractRecordAndOffsetTest.java
+++ b/src/test/java/htsjdk/samtools/util/AbstractRecordAndOffsetTest.java
@@ -23,12 +23,13 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
+
+import static org.testng.Assert.assertEquals;
 
 /**
  * 
@@ -36,7 +37,7 @@ import static org.junit.Assert.assertEquals;
  *
  */
 
-public class AbstractRecordAndOffsetTest {
+public class AbstractRecordAndOffsetTest extends HtsjdkTest {
 
     private final byte[] qualities = {30, 40, 50, 60, 70, 80 ,90, 70, 80, 90};
     private byte[] bases = {'A', 'C', 'G', 'T', 'A', 'C', 'G', 'T', 'T', 'C'};
@@ -52,12 +53,11 @@ public class AbstractRecordAndOffsetTest {
 
     @Test
     public void testConstructor(){
-        AbstractRecordAndOffset abstractRecordAndOffset = new AbstractRecordAndOffset(record, 0, 10, 3);
-        assertArrayEquals(qualities, abstractRecordAndOffset.getBaseQualities());
-        assertArrayEquals(bases, abstractRecordAndOffset.getRecord().getReadBases());
+        AbstractRecordAndOffset abstractRecordAndOffset = new AbstractRecordAndOffset(record, 0);
+        assertEquals(qualities, abstractRecordAndOffset.getBaseQualities());
+        assertEquals(bases, abstractRecordAndOffset.getRecord().getReadBases());
         assertEquals('A', abstractRecordAndOffset.getReadBase());
         assertEquals(30, abstractRecordAndOffset.getBaseQuality());
         assertEquals(0, abstractRecordAndOffset.getOffset());
-        assertEquals(-1, abstractRecordAndOffset.getRefPos());
     }
 }
diff --git a/src/test/java/htsjdk/samtools/util/AsyncBlockCompressedInputStreamTest.java b/src/test/java/htsjdk/samtools/util/AsyncBlockCompressedInputStreamTest.java
new file mode 100644
index 0000000..a1f9881
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/AsyncBlockCompressedInputStreamTest.java
@@ -0,0 +1,92 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2016 Daniel Cameron
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+public class AsyncBlockCompressedInputStreamTest extends HtsjdkTest {
+    private final File BAM_FILE = new File("src/test/resources/htsjdk/samtools/BAMFileIndexTest/index_test.bam");
+    @Test
+    public void testAsync() throws Exception {
+    	BlockCompressedInputStream sync = new BlockCompressedInputStream(new SeekableFileStream(BAM_FILE));
+    	List<byte[]> expected = new ArrayList<>();
+    	List<Long> virtualOffset = new ArrayList<>();
+    	List<Integer> length = new ArrayList<>();
+    	byte[] buffer = new byte[BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE / 2];
+    	virtualOffset.add(sync.getFilePointer());
+    	int len = sync.read(buffer);
+    	length.add(len);
+    	while (len > 0) {
+    		expected.add(buffer);
+    		buffer = new byte[buffer.length];
+    		len = sync.read(buffer);
+    		length.add(len);
+    		virtualOffset.add(sync.getFilePointer());
+    	}
+    	sync.close();
+    	buffer = new byte[buffer.length];
+    	List<BlockCompressedInputStream> list = new ArrayList<>();
+    	for (int i = 0; i < 8; i++) {
+    		list.add(new AsyncBlockCompressedInputStream(new SeekableFileStream(BAM_FILE)));
+    	}
+    	// read till EOF
+    	for (int i = 0; i < expected.size(); i++) {
+	    	for (BlockCompressedInputStream async : list) {
+	    		len = async.read(buffer);
+	    		Assert.assertEquals(len, (int)length.get(i));
+	    		Assert.assertEquals(buffer[0], expected.get(i)[0]);
+	    	}
+    	}
+    	for (int j = 0; j < 128; j++) {
+	    	// seek and read
+	    	for (BlockCompressedInputStream async : list) {
+	    		async.seek(virtualOffset.get(0));
+	    	}
+	    	for (int i = 0; i < Math.min(expected.size(), 8); i++) {
+		    	for (BlockCompressedInputStream async : list) {
+		    		len = async.read(buffer);
+		    		Assert.assertEquals(len, (int)length.get(i));
+		    		Assert.assertEquals(buffer[0], expected.get(i)[0]);
+		    	}
+	    	}
+    	}
+    	for (BlockCompressedInputStream async : list) {
+    		async.close();
+    	}
+    }
+    @Test
+    public void testFilePointer() throws Exception {    	
+    	BlockCompressedInputStream sync = new BlockCompressedInputStream(BAM_FILE);
+    	Assert.assertEquals(sync.getFilePointer(), 0);
+    	sync.close();
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/util/AsyncBufferedIteratorTest.java b/src/test/java/htsjdk/samtools/util/AsyncBufferedIteratorTest.java
index 817c60e..e35dadc 100644
--- a/src/test/java/htsjdk/samtools/util/AsyncBufferedIteratorTest.java
+++ b/src/test/java/htsjdk/samtools/util/AsyncBufferedIteratorTest.java
@@ -23,10 +23,11 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-public class AsyncBufferedIteratorTest {
+public class AsyncBufferedIteratorTest extends HtsjdkTest {
     private static class TestCloseableIterator implements CloseableIterator<Integer> {
         private int[] results;
         private volatile int offset = 0;
@@ -73,9 +74,15 @@ public class AsyncBufferedIteratorTest {
         TestCloseableIterator it = new TestCloseableIterator(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 });
         AsyncBufferedIterator<Integer> abi = new AsyncBufferedIterator<Integer>(it, 3, 2, "testBackgroundBlocks");
         Assert.assertNotNull(getThreadWithName("testBackgroundBlocks"));
-        Thread.sleep(10); // how do we write this test and not be subject to race conditions?
+        // how do we write this test and not be subject to race conditions?
         // should have read 9 records: 2*3 in the buffers, and another 3 read but
-        // blocking waiting to be added 
+        // blocking waiting to be added
+        for (int i = 0; i < 64; i++) {
+        	if (it.consumed() >= 9) {
+        		break;
+        	}
+        	Thread.sleep(1);
+        }
         Assert.assertEquals(it.consumed(), 9);
         abi.close();
     }
diff --git a/src/test/java/htsjdk/samtools/util/AsyncWriterTest.java b/src/test/java/htsjdk/samtools/util/AsyncWriterTest.java
index c807cef..1d2c304 100644
--- a/src/test/java/htsjdk/samtools/util/AsyncWriterTest.java
+++ b/src/test/java/htsjdk/samtools/util/AsyncWriterTest.java
@@ -23,10 +23,11 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-public class AsyncWriterTest {
+public class AsyncWriterTest extends HtsjdkTest {
     private static class MyException extends RuntimeException {
         final Integer item;
         public MyException(Integer item) {
diff --git a/src/test/java/htsjdk/samtools/util/BinaryCodecTest.java b/src/test/java/htsjdk/samtools/util/BinaryCodecTest.java
index 91e1147..b59c952 100644
--- a/src/test/java/htsjdk/samtools/util/BinaryCodecTest.java
+++ b/src/test/java/htsjdk/samtools/util/BinaryCodecTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -44,7 +45,7 @@ import java.io.IOException;
   * the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
   */
 
-public class BinaryCodecTest {
+public class BinaryCodecTest extends HtsjdkTest {
 	public final static String TEST_BASENAME = "htsjdk-BinaryCodecTest";
 
     @Test
diff --git a/src/test/java/htsjdk/samtools/util/BlockCompressedFilePointerUtilTest.java b/src/test/java/htsjdk/samtools/util/BlockCompressedFilePointerUtilTest.java
index 850b4bf..38c3ec3 100644
--- a/src/test/java/htsjdk/samtools/util/BlockCompressedFilePointerUtilTest.java
+++ b/src/test/java/htsjdk/samtools/util/BlockCompressedFilePointerUtilTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -31,8 +32,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 
-public class BlockCompressedFilePointerUtilTest
-{
+public class BlockCompressedFilePointerUtilTest extends HtsjdkTest {
     @Test
     public void basicTest() 
     {
diff --git a/src/test/java/htsjdk/samtools/util/BlockCompressedInputStreamTest.java b/src/test/java/htsjdk/samtools/util/BlockCompressedInputStreamTest.java
new file mode 100644
index 0000000..4c9d532
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/BlockCompressedInputStreamTest.java
@@ -0,0 +1,201 @@
+package htsjdk.samtools.util;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.seekablestream.SeekableFileStream;
+import htsjdk.samtools.util.zip.InflaterFactory;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.io.*;
+import java.net.URL;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.zip.Inflater;
+
+public class BlockCompressedInputStreamTest extends HtsjdkTest {
+	// random data pulled from /dev/random then compressed using bgzip from tabix
+	private static final File BLOCK_UNCOMPRESSED = new File("src/test/resources/htsjdk/samtools/util/random.bin");
+	private static final File BLOCK_COMPRESSED = new File("src/test/resources/htsjdk/samtools/util/random.bin.gz");
+	private static final long[] BLOCK_COMPRESSED_OFFSETS = new long[] { 0, 0xfc2e, 0x1004d, 0x1fc7b, 0x2009a, };
+	private static final long[] BLOCK_UNCOMPRESSED_END_POSITIONS = new long[] { 64512, 65536, 130048 };
+	@Test
+    public void stream_should_match_uncompressed_stream() throws Exception {
+		byte[] uncompressed = Files.readAllBytes(BLOCK_UNCOMPRESSED.toPath());
+		try (BlockCompressedInputStream stream = new BlockCompressedInputStream(new FileInputStream(BLOCK_COMPRESSED))) {
+			for (int i = 0; i < uncompressed.length; i++) {
+				Assert.assertEquals(stream.read(), Byte.toUnsignedInt(uncompressed[i]));
+			}
+			Assert.assertTrue(stream.endOfBlock());
+		}
+	}
+	@Test
+    public void endOfBlock_should_be_true_only_when_entire_block_is_read() throws Exception {
+		long size = BLOCK_UNCOMPRESSED.length();
+		// input file contains 5 blocks
+		List<Long> offsets = new ArrayList<>();
+		for (int i = 0; i < BLOCK_UNCOMPRESSED_END_POSITIONS.length; i++) {
+			offsets.add(BLOCK_UNCOMPRESSED_END_POSITIONS[i]);
+		}
+		List<Long> endOfBlockTrue = new ArrayList<>();
+		try (BlockCompressedInputStream stream = new BlockCompressedInputStream(new FileInputStream(BLOCK_COMPRESSED))) {
+			for (long i = 0; i < size; i++) {
+				if (stream.endOfBlock()) {
+					endOfBlockTrue.add(i);
+				}
+				stream.read();
+			}
+		}
+		Assert.assertEquals(endOfBlockTrue, offsets);
+	}
+	@Test
+    public void decompression_should_cross_block_boundries() throws Exception {
+		byte[] uncompressed = Files.readAllBytes(BLOCK_UNCOMPRESSED.toPath());
+		try (BlockCompressedInputStream stream = new BlockCompressedInputStream(new FileInputStream(BLOCK_COMPRESSED))) {
+			byte[] decompressed = new byte[uncompressed.length]; 
+			stream.read(decompressed);
+			Assert.assertEquals(decompressed, uncompressed);
+			Assert.assertTrue(stream.endOfBlock());
+			Assert.assertEquals(stream.read(), -1);
+		}
+	}
+	@Test
+    public void seek_should_read_block() throws Exception {
+		byte[] uncompressed = Files.readAllBytes(BLOCK_UNCOMPRESSED.toPath());
+		try (SeekableFileStream sfs = new SeekableFileStream(BLOCK_COMPRESSED)) {
+			try (BlockCompressedInputStream stream = new BlockCompressedInputStream(sfs)) {
+				// seek to the start of the first block
+				for (int i = 0; i < BLOCK_COMPRESSED_OFFSETS.length-1; i++) {
+					stream.seek(BLOCK_COMPRESSED_OFFSETS[i] << 16);
+					Assert.assertEquals(sfs.position(), BLOCK_COMPRESSED_OFFSETS[i + 1]);
+					// check 
+					byte[] actual = new byte[uncompressed.length];
+					int len = stream.read(actual);
+					actual = Arrays.copyOf(actual, len);
+					byte[] expected = Arrays.copyOfRange(uncompressed, uncompressed.length - actual.length, uncompressed.length);
+					Assert.assertEquals(actual, expected);
+				}
+			}
+		}
+	}
+	@Test
+    public void available_should_return_number_of_bytes_left_in_current_block() throws Exception {
+		try (BlockCompressedInputStream stream = new BlockCompressedInputStream(BLOCK_COMPRESSED)) {
+			for (int i = 0; i < BLOCK_UNCOMPRESSED_END_POSITIONS[0]; i++) {
+				Assert.assertEquals(stream.available(), BLOCK_UNCOMPRESSED_END_POSITIONS[0] - i);
+				stream.read();
+			}
+		}
+	}
+
+    private static class CountingInflater extends Inflater {
+        // Must be static unfortunately, since there's no way to reach down into an inflater instance given a stream
+        static int inflateCalls = 0;
+
+        CountingInflater(boolean gzipCompatible) {
+            super(gzipCompatible);
+        }
+        @Override
+        public int inflate(byte[] b, int off, int len) throws java.util.zip.DataFormatException {
+            inflateCalls++;
+            return super.inflate(b, off, len);
+        }
+    }
+
+    private static class CountingInflaterFactory extends InflaterFactory {
+        @Override
+        public Inflater makeInflater( boolean gzipCompatible ) {
+            return new CountingInflater(gzipCompatible);
+        }
+    }
+
+    @FunctionalInterface
+    private interface CheckedExceptionInputStreamSupplier {
+        InputStream get() throws IOException;
+    }
+
+    private List<String> writeTempBlockCompressedFileForInflaterTest( final File tempFile ) throws IOException {
+        final List<String> linesWritten = new ArrayList<>();
+        try ( final BlockCompressedOutputStream bcos = new BlockCompressedOutputStream(tempFile, 5) ) {
+            String s = "Hi, Mom!\n";
+            bcos.write(s.getBytes()); //Call 1
+            linesWritten.add(s);
+            s = "Hi, Dad!\n";
+            bcos.write(s.getBytes()); //Call 2
+            linesWritten.add(s);
+            bcos.flush();
+            final StringBuilder sb = new StringBuilder(BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE * 2);
+            s = "1234567890123456789012345678901234567890123456789012345678901234567890\n";
+            while ( sb.length() <= BlockCompressedStreamConstants.DEFAULT_UNCOMPRESSED_BLOCK_SIZE ) {
+                sb.append(s);
+                linesWritten.add(s);
+            }
+            bcos.write(sb.toString().getBytes()); //Call 3
+        }
+        return linesWritten;
+    }
+
+    @DataProvider(name = "customInflaterInput")
+    public Object[][] customInflateInput() throws IOException {
+        final File tempFile = File.createTempFile("testCustomInflater.", ".bam");
+        tempFile.deleteOnExit();
+        final List<String> linesWritten = writeTempBlockCompressedFileForInflaterTest(tempFile);
+
+        final InflaterFactory countingInflaterFactory = new CountingInflaterFactory();
+
+        return new Object[][]{
+                // set the default InflaterFactory to a CountingInflaterFactory
+                {(CheckedExceptionInputStreamSupplier) () -> new BlockCompressedInputStream(new FileInputStream(tempFile), false), linesWritten, 4, countingInflaterFactory},
+                {(CheckedExceptionInputStreamSupplier) () -> new BlockCompressedInputStream(tempFile), linesWritten, 4, countingInflaterFactory},
+                {(CheckedExceptionInputStreamSupplier) () -> new AsyncBlockCompressedInputStream(tempFile), linesWritten, 4, countingInflaterFactory},
+                {(CheckedExceptionInputStreamSupplier) () -> new BlockCompressedInputStream(new URL("http://broadinstitute.github.io/picard/testdata/index_test.bam")), null, 21, countingInflaterFactory},
+                // provide a CountingInflaterFactory explicitly
+                {(CheckedExceptionInputStreamSupplier) () -> new BlockCompressedInputStream(new FileInputStream(tempFile), false, countingInflaterFactory), linesWritten, 4, null},
+                {(CheckedExceptionInputStreamSupplier) () -> new BlockCompressedInputStream(tempFile, countingInflaterFactory), linesWritten, 4, null},
+                {(CheckedExceptionInputStreamSupplier) () -> new AsyncBlockCompressedInputStream(tempFile, countingInflaterFactory), linesWritten, 4, null},
+                {(CheckedExceptionInputStreamSupplier) () -> new BlockCompressedInputStream(new URL("http://broadinstitute.github.io/picard/testdata/index_test.bam"), countingInflaterFactory), null, 21, null}
+        };
+    }
+
+    @Test(dataProvider = "customInflaterInput", singleThreaded = true)
+    public void testCustomInflater(final CheckedExceptionInputStreamSupplier bcisSupplier,
+                                   final List<String> expectedOutput,
+                                   final int expectedInflateCalls,
+                                   final InflaterFactory customDefaultInflaterFactory) throws Exception
+    {
+        // clear inflate call counter in CountingInflater
+        CountingInflater.inflateCalls = 0;
+
+        // If requested, set the global default InflaterFactory to a custom factory. Otherwise, set it to the default.
+        if ( customDefaultInflaterFactory != null )  {
+            BlockGunzipper.setDefaultInflaterFactory(customDefaultInflaterFactory);
+        }
+        else {
+            BlockGunzipper.setDefaultInflaterFactory(new InflaterFactory());
+        }
+
+        try (final BufferedReader reader = new BufferedReader(new InputStreamReader(bcisSupplier.get()))) {
+            String line;
+            for (int i = 0; (line = reader.readLine()) != null; ++i) {
+                // check expected output, if provided
+                if (expectedOutput != null) {
+                    Assert.assertEquals(line + "\n", expectedOutput.get(i));
+                }
+            }
+        }
+
+        // verify custom inflater was used by checking number of inflate calls
+        Assert.assertEquals(CountingInflater.inflateCalls, expectedInflateCalls, "inflate calls");
+
+        // Reset the default InflaterFactory back to the default value
+        BlockGunzipper.setDefaultInflaterFactory(new InflaterFactory());
+    }
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testSetNullInflaterFactory() {
+        // test catching null InflaterFactory
+        BlockGunzipper.setDefaultInflaterFactory(null);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java b/src/test/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java
index 8a0d97f..35175cd 100644
--- a/src/test/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java
+++ b/src/test/java/htsjdk/samtools/util/BlockCompressedOutputStreamTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.FileTruncatedException;
 import htsjdk.samtools.util.zip.DeflaterFactory;
 import org.testng.Assert;
@@ -39,7 +40,7 @@ import java.util.List;
 import java.util.Random;
 import java.util.zip.Deflater;
 
-public class BlockCompressedOutputStreamTest {
+public class BlockCompressedOutputStreamTest extends HtsjdkTest {
 
     private static final String HTSJDK_TRIBBLE_RESOURCES = "src/test/resources/htsjdk/tribble/";
 
@@ -80,6 +81,7 @@ public class BlockCompressedOutputStreamTest {
         Assert.assertEquals(bcis2.read(buffer), available, "Should read to end of block");
         Assert.assertTrue(bcis2.endOfBlock(), "Should be at end of block");
         bcis2.close();
+        Assert.assertEquals(bcis2.read(buffer), -1, "Should be end of file");
     }
 
     @DataProvider(name = "seekReadExceptionsData")
@@ -88,24 +90,32 @@ public class BlockCompressedOutputStreamTest {
         return new Object[][]{
                 {HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.truncated.gz", FileTruncatedException.class,
                         BlockCompressedInputStream.PREMATURE_END_MSG + System.getProperty("user.dir") + "/" +
-                                HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.truncated.gz", true, false, 0},
+                                HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.truncated.gz", true, false, false, 0},
                 {HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.truncated.hdr.gz", IOException.class,
                         BlockCompressedInputStream.INCORRECT_HEADER_SIZE_MSG + System.getProperty("user.dir") + "/" +
-                                HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.truncated.hdr.gz", true, false, 0},
+                                HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.truncated.hdr.gz", true, false, false, 0},
                 {HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.gz", IOException.class,
-                        BlockCompressedInputStream.CANNOT_SEEK_STREAM_MSG, false, true, 0},
+                        BlockCompressedInputStream.CANNOT_SEEK_STREAM_MSG, false, true, false, 0},
+                {HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.gz", IOException.class,
+                        BlockCompressedInputStream.CANNOT_SEEK_CLOSED_STREAM_MSG, false, true, true, 0},
                 {HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.gz", IOException.class,
                         BlockCompressedInputStream.INVALID_FILE_PTR_MSG + 1000 + " for " + System.getProperty("user.dir") + "/" +
-                                HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.gz", true, true, 1000 }
+                                HTSJDK_TRIBBLE_RESOURCES + "vcfexample.vcf.gz", true, true, false, 1000 }
         };
     }
 
     @Test(dataProvider = "seekReadExceptionsData")
-    public void testSeekReadExceptions(final String filePath, final Class c, final String msg, final boolean isFile, final boolean isSeek, final int pos) throws Exception {
+    public void testSeekReadExceptions(final String filePath, final Class c, final String msg, final boolean isFile, final boolean isSeek, final boolean isClosed,
+                                       final int pos) throws Exception {
 
         final BlockCompressedInputStream bcis = isFile ?
                 new BlockCompressedInputStream(new File(filePath)) :
                 new BlockCompressedInputStream(new FileInputStream(filePath));
+
+        if ( isClosed ) {
+            bcis.close();
+        }
+
         boolean haveException = false;
         try {
             if ( isSeek ) {
@@ -170,8 +180,8 @@ public class BlockCompressedOutputStreamTest {
         final int[] deflateCalls = {0}; //Note: using and array is a HACK to fool the compiler
 
         class MyDeflater extends Deflater{
-            MyDeflater(int level, boolean nowrap){
-                super(level, nowrap);
+            MyDeflater(int level, boolean gzipCompatible){
+                super(level, gzipCompatible);
             }
             @Override
             public int deflate(byte[] b, int off, int len) {
@@ -181,8 +191,9 @@ public class BlockCompressedOutputStreamTest {
 
         }
         final DeflaterFactory myDeflaterFactory= new DeflaterFactory(){
-            public Deflater makeDeflater(final int compressionLevel, final boolean nowrap) {
-                return new MyDeflater(compressionLevel, nowrap);
+            @Override
+            public Deflater makeDeflater(final int compressionLevel, final boolean gzipCompatible) {
+                return new MyDeflater(compressionLevel, gzipCompatible);
             }
         };
         final List<String> linesWritten = new ArrayList<>();
@@ -210,5 +221,6 @@ public class BlockCompressedOutputStreamTest {
         }
         bcis.close();
         Assert.assertEquals(deflateCalls[0], 3, "deflate calls");
+        Assert.assertEquals(reader.readLine(), null);
     }
 }
diff --git a/src/test/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java b/src/test/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java
index 5b58372..4a14bd9 100644
--- a/src/test/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java
+++ b/src/test/java/htsjdk/samtools/util/BlockCompressedTerminatorTest.java
@@ -23,37 +23,103 @@
  */
 package htsjdk.samtools.util;
 
+import com.google.common.jimfs.Configuration;
+import com.google.common.jimfs.Jimfs;
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.SeekableByteChannelFromBuffer;
 import org.testng.Assert;
+import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
+import java.io.EOFException;
 import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.FileSystem;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Arrays;
 
 /**
  * @author alecw at broadinstitute.org
  */
-public class BlockCompressedTerminatorTest {
+public class BlockCompressedTerminatorTest extends HtsjdkTest {
     private static final File TEST_DATA_DIR = new File("src/test/resources/htsjdk/samtools/util");
+    private static final File DEFECTIVE = new File(TEST_DATA_DIR, "defective_bgzf.bam");
+    private static final File NO_TERMINATOR = new File(TEST_DATA_DIR, "no_bgzf_terminator.bam");
 
-    @Test
-    public void testFileWithTerminator() throws Exception {
+    @DataProvider
+    public Object[][] getFiles() throws IOException {
+        return new Object[][]{
+                {getValidCompressedFile(), BlockCompressedInputStream.FileTermination.HAS_TERMINATOR_BLOCK},
+                {NO_TERMINATOR, BlockCompressedInputStream.FileTermination.HAS_HEALTHY_LAST_BLOCK},
+                {DEFECTIVE, BlockCompressedInputStream.FileTermination.DEFECTIVE}
+        };
+    }
+
+    @Test( dataProvider = "getFiles")
+    public void testCheckTerminationForFiles(File compressedFile, BlockCompressedInputStream.FileTermination expected) throws IOException {
+        Assert.assertEquals(BlockCompressedInputStream.checkTermination(compressedFile), expected);
+    }
+
+    @Test( dataProvider = "getFiles")
+    public void testCheckTerminationForPaths(File compressedFile, BlockCompressedInputStream.FileTermination expected) throws IOException {
+        try(FileSystem fs = Jimfs.newFileSystem("test", Configuration.unix())){
+            final Path compressedFileInJimfs = Files.copy(compressedFile.toPath(), fs.getPath("something"));
+            Assert.assertEquals(BlockCompressedInputStream.checkTermination(compressedFileInJimfs), expected);
+        }
+    }
+
+    @Test( dataProvider = "getFiles")
+    public void testCheckTerminationForSeekableByteChannels(File compressedFile, BlockCompressedInputStream.FileTermination expected) throws IOException {
+        try(SeekableByteChannel channel = Files.newByteChannel(compressedFile.toPath())){
+            Assert.assertEquals(BlockCompressedInputStream.checkTermination(channel), expected);
+        }
+    }
+
+    @Test(dataProvider = "getFiles")
+    public void testChannelPositionIsRestored(File compressedFile, BlockCompressedInputStream.FileTermination expected) throws IOException {
+        final long position = 50;
+        try(SeekableByteChannel channel = Files.newByteChannel(compressedFile.toPath())){
+            channel.position(position);
+            Assert.assertEquals(channel.position(), position);
+            Assert.assertEquals(BlockCompressedInputStream.checkTermination(channel), expected);
+            Assert.assertEquals(channel.position(), position);
+        }
+    }
+
+    private static File getValidCompressedFile() throws IOException {
         final File tmpCompressedFile = File.createTempFile("test.", ".bgzf");
         tmpCompressedFile.deleteOnExit();
         final BlockCompressedOutputStream os = new BlockCompressedOutputStream(tmpCompressedFile);
         os.write("Hi, Mom!\n".getBytes());
         os.close();
-        Assert.assertEquals(BlockCompressedInputStream.checkTermination(tmpCompressedFile),
-                BlockCompressedInputStream.FileTermination.HAS_TERMINATOR_BLOCK);
+        return tmpCompressedFile;
     }
 
     @Test
-    public void testValidFileWithoutTerminator() throws Exception {
-        Assert.assertEquals(BlockCompressedInputStream.checkTermination(new File(TEST_DATA_DIR, "no_bgzf_terminator.bam")),
-                BlockCompressedInputStream.FileTermination.HAS_HEALTHY_LAST_BLOCK);
+    public void testReadFullyReadsBytesCorrectly() throws IOException {
+        try(final SeekableByteChannel channel = Files.newByteChannel(DEFECTIVE.toPath())){
+            final ByteBuffer readBuffer = ByteBuffer.allocate(10);
+            Assert.assertTrue(channel.size() > readBuffer.capacity());
+            BlockCompressedInputStream.readFully(channel, readBuffer);
+
+            ByteBuffer expected = ByteBuffer.allocate(10);
+            channel.position(0).read(expected);
+            Assert.assertEquals(readBuffer.array(), expected.array());
+        }
     }
 
-    @Test
-    public void testDefectiveFile() throws Exception {
-        Assert.assertEquals(BlockCompressedInputStream.checkTermination(new File(TEST_DATA_DIR, "defective_bgzf.bam")),
-                BlockCompressedInputStream.FileTermination.DEFECTIVE);
+    @Test(expectedExceptions = EOFException.class)
+    public void testReadFullyThrowWhenItCantReadEnough() throws IOException {
+        try(final SeekableByteChannel channel = Files.newByteChannel(DEFECTIVE.toPath())){
+            final ByteBuffer readBuffer = ByteBuffer.allocate(1000);
+            Assert.assertTrue(channel.size() < readBuffer.capacity());
+            BlockCompressedInputStream.readFully(channel, readBuffer);
+        }
     }
+
+
+
 }
diff --git a/src/test/java/htsjdk/samtools/util/StringLineReaderTest.java b/src/test/java/htsjdk/samtools/util/BufferedLineReaderTest.java
similarity index 73%
rename from src/test/java/htsjdk/samtools/util/StringLineReaderTest.java
rename to src/test/java/htsjdk/samtools/util/BufferedLineReaderTest.java
index 9919f89..00e35d2 100644
--- a/src/test/java/htsjdk/samtools/util/StringLineReaderTest.java
+++ b/src/test/java/htsjdk/samtools/util/BufferedLineReaderTest.java
@@ -1,7 +1,7 @@
 /*
- * The MIT License
+ * The MIT License (MIT)
  *
- * Copyright (c) 2009 The Broad Institute
+ * Copyright (c) 2017 Daniel Gomez-Sanchez
  *
  * Permission is hereby granted, free of charge, to any person obtaining a copy
  * of this software and associated documentation files (the "Software"), to deal
@@ -10,23 +10,27 @@
  * copies of the Software, and to permit persons to whom the Software is
  * furnished to do so, subject to the following conditions:
  *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
  *
  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
  */
+
 package htsjdk.samtools.util;
 
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-public class StringLineReaderTest {
+/**
+ * @author Daniel Gomez-Sanchez (magicDGS)
+ */
+public class BufferedLineReaderTest {
 
     private static final String[] TERMINATORS = {"\r", "\n", "\r\n"};
     private static final boolean[] LAST_LINE_TERMINATED = {false, true};
@@ -39,14 +43,15 @@ public class StringLineReaderTest {
      * Test a bunch of combinations instead of writing a method for each.
      */
     @Test
-    public void testBasic() {
+    public void testFromString() {
         for (final String terminator : TERMINATORS) {
             for (final boolean lastLineTerminated : LAST_LINE_TERMINATED) {
-                for (final EmptyLineState emptyLineState : EmptyLineState.values()) {
+                for (final EmptyLineState emptyLineState : EmptyLineState
+                        .values()) {
                     if (emptyLineState == EmptyLineState.COMPLETELY_EMPTY) {
-                        emptyTestHelper(terminator, lastLineTerminated);
+                        fromStringEmptyTestHelper(terminator, lastLineTerminated);
                     } else {
-                        testHelper(terminator, lastLineTerminated, emptyLineState);
+                        fromStringTestHelper(terminator, lastLineTerminated, emptyLineState);
                     }
                 }
             }
@@ -58,19 +63,19 @@ public class StringLineReaderTest {
      * @param terminator what the terminator should be in the input
      * @param lastLineTerminated does the input have a terminator
      */
-    private void emptyTestHelper(final String terminator, final boolean lastLineTerminated) {
+    private void fromStringEmptyTestHelper(final String terminator, final boolean lastLineTerminated) {
         final String input;
         if (lastLineTerminated) {
             input = terminator;
         } else {
             input = "";
         }
-        final StringLineReader slr = new StringLineReader(input);
-        final String output = slr.readLine();
+        final BufferedLineReader blr = BufferedLineReader.fromString(input);
+        final String output = blr.readLine();
         if (lastLineTerminated) {
             Assert.assertEquals(output, "");
         }
-        Assert.assertNull(slr.readLine());
+        Assert.assertNull(blr.readLine());
     }
 
     /**
@@ -79,7 +84,7 @@ public class StringLineReaderTest {
      * @param lastLineTerminated should the input end with a terminator
      * @param emptyLineState where in the input should an empty line be.
      */
-    private void testHelper(final String terminator, final boolean lastLineTerminated, final EmptyLineState emptyLineState) {
+    private void fromStringTestHelper(final String terminator, final boolean lastLineTerminated, final EmptyLineState emptyLineState) {
         final String[] lines = new String[3];
         if (emptyLineState == EmptyLineState.FIRST_LINE) {
             lines[0] = "";
@@ -89,7 +94,7 @@ public class StringLineReaderTest {
             lines[0] = "Hi, Dad?";
             lines[1] = "Hi, Mom!";
             lines[2] = "";
-        } else  if (emptyLineState == EmptyLineState.MIDDLE_LINE) {
+        } else if (emptyLineState == EmptyLineState.MIDDLE_LINE) {
             lines[0] = "Hi, Dad?";
             lines[1] = "";
             lines[2] = "Hi, Mom!";
@@ -98,22 +103,24 @@ public class StringLineReaderTest {
         if (lastLineTerminated) {
             input = input.concat(terminator);
         }
-        final StringLineReader slr = new StringLineReader(input);
+        final BufferedLineReader blr = BufferedLineReader.fromString(input);
         for (int i = 0; i < lines.length - 1; ++i) {
-            final String s = slr.readLine();
+            final String s = blr.readLine();
             String expected = lines[i];
             Assert.assertEquals(s, expected);
         }
 
         // Last line may need to be handled specially
-        String s = slr.readLine();
-        if (!lastLineTerminated && emptyLineState == EmptyLineState.LAST_LINE) {
+        String s = blr.readLine();
+        if (!lastLineTerminated
+                && emptyLineState == EmptyLineState.LAST_LINE) {
             Assert.assertNull(s);
         } else {
             String expected = lines[lines.length - 1];
             Assert.assertEquals(s, expected);
         }
-        s = slr.readLine();
+        s = blr.readLine();
         Assert.assertNull(s);
     }
-}
+
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/util/CigarElementUnitTest.java b/src/test/java/htsjdk/samtools/util/CigarElementUnitTest.java
new file mode 100644
index 0000000..23607ac
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/CigarElementUnitTest.java
@@ -0,0 +1,43 @@
+package htsjdk.samtools.util;
+
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.CigarElement;
+import htsjdk.samtools.CigarOperator;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+public class CigarElementUnitTest extends HtsjdkTest {
+
+    @Test(expectedExceptions = IllegalArgumentException.class)
+    public void testNegativeLengthCheck(){
+        final CigarElement element = new CigarElement(-1, CigarOperator.M);
+    }
+
+
+    @DataProvider
+    public Object[][] elementsForEquals() {
+        final CigarElement mElement = new CigarElement(10, CigarOperator.M);
+        return new Object[][] {
+                // same object
+                {mElement, mElement, true},
+                // different equal objects
+                {mElement, new CigarElement(mElement.getLength(), mElement.getOperator()), true},
+                // different lengths
+                {mElement, new CigarElement(mElement.getLength() + 1, mElement.getOperator()), false},
+                // different operators
+                {mElement, new CigarElement(mElement.getLength(), CigarOperator.X), false},
+                // different class
+                {mElement, mElement.toString(), false}
+        };
+    }
+
+    @Test(dataProvider = "elementsForEquals")
+    public void testEqualsAndHashCode(final CigarElement element, final Object other, final boolean isEquals) {
+        Assert.assertEquals(element.equals(other), isEquals);
+        if (isEquals) {
+            Assert.assertEquals(element.hashCode(), other.hashCode());
+        }
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/util/CigarUtilTest.java b/src/test/java/htsjdk/samtools/util/CigarUtilTest.java
index 0aca395..6fe7b71 100644
--- a/src/test/java/htsjdk/samtools/util/CigarUtilTest.java
+++ b/src/test/java/htsjdk/samtools/util/CigarUtilTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.Cigar;
 import htsjdk.samtools.CigarElement;
 import htsjdk.samtools.TextCigarCodec;
@@ -40,7 +41,7 @@ import java.util.List;
  *
  * @author Martha Borkan  mborkan at broadinstitute.org
  */
-public class CigarUtilTest {
+public class CigarUtilTest extends HtsjdkTest {
 
    @Test(dataProvider="clipData")
     public void basicTest(final String testName, final int start, final String inputCigar, final boolean negativeStrand,
diff --git a/src/test/java/htsjdk/samtools/util/CloseableIteratorTest.java b/src/test/java/htsjdk/samtools/util/CloseableIteratorTest.java
index b96d1f6..102b824 100644
--- a/src/test/java/htsjdk/samtools/util/CloseableIteratorTest.java
+++ b/src/test/java/htsjdk/samtools/util/CloseableIteratorTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -8,7 +9,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.stream.Collectors;
 
-public class CloseableIteratorTest {
+public class CloseableIteratorTest extends HtsjdkTest {
     @Test
     public void testToList() {
         final List<Integer> expected = Arrays.asList(1,2,3,4,5);
diff --git a/src/test/java/htsjdk/samtools/util/CodeUtilTest.java b/src/test/java/htsjdk/samtools/util/CodeUtilTest.java
index e8b9957..c4978c1 100644
--- a/src/test/java/htsjdk/samtools/util/CodeUtilTest.java
+++ b/src/test/java/htsjdk/samtools/util/CodeUtilTest.java
@@ -1,9 +1,10 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-public class CodeUtilTest {
+public class CodeUtilTest extends HtsjdkTest {
 
     @Test
     public void getOrElseTest() {
diff --git a/src/test/java/htsjdk/samtools/util/ComparableTupleTest.java b/src/test/java/htsjdk/samtools/util/ComparableTupleTest.java
index 7e8b082..708058d 100644
--- a/src/test/java/htsjdk/samtools/util/ComparableTupleTest.java
+++ b/src/test/java/htsjdk/samtools/util/ComparableTupleTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.variant.variantcontext.Allele;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
@@ -8,7 +9,7 @@ import org.testng.annotations.Test;
 /**
  * Created by farjoun on 1/28/16.
  */
-public class ComparableTupleTest {
+public class ComparableTupleTest extends HtsjdkTest {
 
     private enum Tenum {
         Hi,
diff --git a/src/test/java/htsjdk/samtools/util/CoordSpanInputSteamTest.java b/src/test/java/htsjdk/samtools/util/CoordSpanInputSteamTest.java
index 1b90882..07de158 100644
--- a/src/test/java/htsjdk/samtools/util/CoordSpanInputSteamTest.java
+++ b/src/test/java/htsjdk/samtools/util/CoordSpanInputSteamTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.seekablestream.ByteArraySeekableStream;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -15,7 +16,7 @@ import java.util.Random;
 /**
  * Created by vadim on 25/03/2015.
  */
-public class CoordSpanInputSteamTest {
+public class CoordSpanInputSteamTest extends HtsjdkTest {
 
     @Test
     public void test_first_3_bytes() throws IOException {
diff --git a/src/test/java/htsjdk/samtools/util/DateParserTest.java b/src/test/java/htsjdk/samtools/util/DateParserTest.java
new file mode 100644
index 0000000..11ab2a6
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/DateParserTest.java
@@ -0,0 +1,151 @@
+// DateParser.java
+// $Id: DateParser.java,v 1.3 2001/01/04 13:26:19 bmahe Exp $
+// (c) COPYRIGHT MIT, INRIA and Keio, 2000.
+
+/*
+W3C IPR SOFTWARE NOTICE
+
+Copyright 1995-1998 World Wide Web Consortium, (Massachusetts Institute of
+Technology, Institut National de Recherche en Informatique et en
+Automatique, Keio University). All Rights Reserved.
+http://www.w3.org/Consortium/Legal/
+
+This W3C work (including software, documents, or other related items) is
+being provided by the copyright holders under the following license. By
+obtaining, using and/or copying this work, you (the licensee) agree that you
+have read, understood, and will comply with the following terms and
+conditions:
+
+Permission to use, copy, and modify this software and its documentation,
+with or without modification,  for any purpose and without fee or royalty is
+hereby granted, provided that you include the following on ALL copies of the
+software and documentation or portions thereof, including modifications,
+that you make:
+
+  1. The full text of this NOTICE in a location viewable to users of the
+     redistributed or derivative work.
+  2. Any pre-existing intellectual property disclaimers, notices, or terms
+     and conditions. If none exist, a short notice of the following form
+     (hypertext is preferred, text is permitted) should be used within the
+     body of any redistributed or derivative code: "Copyright World Wide
+     Web Consortium, (Massachusetts Institute of Technology, Institut
+     National de Recherche en Informatique et en Automatique, Keio
+     University). All Rights Reserved. http://www.w3.org/Consortium/Legal/"
+  3. Notice of any changes or modifications to the W3C files, including the
+     date changes were made. (We recommend you provide URIs to the location
+     from which the code is derived).
+
+In addition, creators of derivitive works must include the full text of this
+NOTICE in a location viewable to users of the derivitive work.
+
+THIS SOFTWARE AND DOCUMENTATION IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS
+MAKE NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT
+LIMITED TO, WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR
+PURPOSE OR THAT THE USE OF THE SOFTWARE OR DOCUMENTATION WILL NOT INFRINGE
+ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS.
+
+COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR
+DOCUMENTATION.
+
+The name and trademarks of copyright holders may NOT be used in advertising
+or publicity pertaining to the software without specific, written prior
+permission. Title to copyright in this software and any associated
+documentation will at all times remain with copyright holders.
+
+____________________________________
+
+This formulation of W3C's notice and license became active on August 14
+1998. See the older formulation for the policy prior to this date. Please
+see our Copyright FAQ for common questions about using materials from our
+site, including specific terms and conditions for packages like libwww,
+Amaya, and Jigsaw. Other questions about this notice can be directed to
+site-policy at w3.org .
+
+
+
+
+webmaster
+(last updated 14-Aug-1998)
+
+ */
+
+package htsjdk.samtools.util;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+import java.util.Date;
+
+/**
+ * NOTE: This code has been taken from w3.org, and modified slightly to handle timezones of the form [-+]DDDD,
+ * and also to fix a bug in the application of time zone to the parsed date.
+ *
+ * Date parser for ISO 8601 format
+ * http://www.w3.org/TR/1998/NOTE-datetime-19980827
+ * @version $Revision: 1.3 $
+ * @author  bmahe at w3.org
+ */
+
+public class DateParserTest extends HtsjdkTest {
+
+    private static void test(final String isodate) {
+        Date date = DateParser.parse(isodate);
+        final String isodateRoundTrip = DateParser.getIsoDate(date);
+
+        final Date orig = DateParser.parse(isodate);
+        final Date roundTrip = DateParser.parse(isodateRoundTrip);
+
+        assertDatesAreClose(orig, roundTrip);
+    }
+
+    private static void test(final Date date) {
+        String isodate;
+        isodate = DateParser.getIsoDate(date);
+        final Date dateRoundTrip = DateParser.parse(isodate);
+
+        assertDatesAreClose(date, dateRoundTrip);
+        Assert.assertTrue(Math.abs(date.getTime() - dateRoundTrip.getTime()) < 10);
+    }
+
+    @DataProvider(name="dateDate")
+    public Object[][] dateData() {
+        return new Object[][]{
+                {"1997-07-16T19:20:30.45-02:00"},
+                {"1997-07-16T19:20:30+01:00"},
+                {"1997-07-16T19:20:30+01:00"},
+                {"1997-07-16T19:20"},
+                {"1997-07-16"},
+                {"1997-07"},
+                {"1997"},
+        };
+    }
+
+    @Test(dataProvider = "dateDate")
+    public static void testString(final String string) {
+        test(string);
+    }
+
+    @Test(dataProvider = "dateDate")
+    public static void testDates(final String string) {
+        test(DateParser.parse(string));
+    }
+
+    @Test
+    public static void testDate() {
+        test(new Date());
+    }
+
+    public static void assertDatesAreClose(final Date lhs, final Date rhs) {
+        Assert.assertEquals(lhs.getYear(), rhs.getYear());
+        Assert.assertEquals(lhs.getMonth(), rhs.getMonth());
+        Assert.assertEquals(lhs.getDate(), rhs.getDate());
+        Assert.assertEquals(lhs.getDay(), rhs.getDay());
+        Assert.assertEquals(lhs.getHours(), rhs.getHours());
+        Assert.assertEquals(lhs.getMinutes(), rhs.getMinutes());
+        Assert.assertEquals(lhs.getSeconds(), rhs.getSeconds());
+        Assert.assertEquals(lhs.getTimezoneOffset(), rhs.getTimezoneOffset());
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/util/DiskBackedQueueTest.java b/src/test/java/htsjdk/samtools/util/DiskBackedQueueTest.java
index 88b05e2..9596652 100644
--- a/src/test/java/htsjdk/samtools/util/DiskBackedQueueTest.java
+++ b/src/test/java/htsjdk/samtools/util/DiskBackedQueueTest.java
@@ -50,7 +50,9 @@ public class DiskBackedQueueTest extends SortingCollectionTest {
         };
     }
 
+    @Override
     @BeforeMethod void setup() { resetTmpDir(); }
+    @Override
     @AfterMethod void tearDown() { resetTmpDir(); }
 
     /**
@@ -59,6 +61,7 @@ public class DiskBackedQueueTest extends SortingCollectionTest {
      * @param numStringsToGenerate
      * @param maxRecordsInRam
      */
+    @Override
     @Test(dataProvider = "diskBackedQueueProvider")
     public void testPositive(final String testName, final int numStringsToGenerate, final int maxRecordsInRam) {
         final String[] strings = new String[numStringsToGenerate];
diff --git a/src/test/java/htsjdk/samtools/util/EdgeReadIteratorTest.java b/src/test/java/htsjdk/samtools/util/EdgeReadIteratorTest.java
index a5459c6..3d46d2a 100644
--- a/src/test/java/htsjdk/samtools/util/EdgeReadIteratorTest.java
+++ b/src/test/java/htsjdk/samtools/util/EdgeReadIteratorTest.java
@@ -34,7 +34,7 @@ import java.io.InputStreamReader;
 import java.util.Arrays;
 import java.util.List;
 
-import static org.junit.Assert.assertEquals;
+import static org.testng.Assert.assertEquals;
 
 /**
  * Tests check that for each alignment block of processed reads, iterator returns a <code>EdgingRecordAndOffset</code>
@@ -80,16 +80,16 @@ public class EdgeReadIteratorTest extends AbstractLocusIteratorTestTemplate {
         int pos = 1;
         final int coveredStart = 165;
         for (final AbstractLocusInfo li : sli) {
-            Assert.assertEquals(li.getPosition(), pos++);
+            assertEquals(li.getPosition(), pos++);
             final int expectedReads;
             if (li.getPosition() == coveredStart || li.getPosition() == coveredEnd) {
                 expectedReads = 2;
             } else {
                 expectedReads = 0;
             }
-            Assert.assertEquals(li.getRecordAndOffsets().size(), expectedReads);
+            assertEquals(li.getRecordAndOffsets().size(), expectedReads);
         }
-        Assert.assertEquals(pos, 100001);
+        assertEquals(pos, 100001);
     }
 
     /**
@@ -196,16 +196,16 @@ public class EdgeReadIteratorTest extends AbstractLocusIteratorTestTemplate {
 
         i = 0;
         for (final AbstractLocusInfo<EdgingRecordAndOffset> li : sli) {
-            Assert.assertEquals(li.getRecordAndOffsets().size(), expectedDepths[i]);
-            Assert.assertEquals(li.getPosition(), expectedReferencePositions[i]);
-            Assert.assertEquals(li.getRecordAndOffsets().size(), expectedReadOffsets[i].length);
+            assertEquals(li.getRecordAndOffsets().size(), expectedDepths[i]);
+            assertEquals(li.getPosition(), expectedReferencePositions[i]);
+            assertEquals(li.getRecordAndOffsets().size(), expectedReadOffsets[i].length);
             for (int j = 0; j < expectedReadOffsets[i].length; ++j) {
-                Assert.assertEquals(li.getRecordAndOffsets().get(j).getOffset(), expectedReadOffsets[i][j]);
+                assertEquals(li.getRecordAndOffsets().get(j).getOffset(), expectedReadOffsets[i][j]);
                 if (start.contains(li.getPosition() - 1)) {
-                    Assert.assertEquals(li.getRecordAndOffsets().get(j).getType(), EdgingRecordAndOffset.Type.BEGIN);
+                    assertEquals(li.getRecordAndOffsets().get(j).getType(), EdgingRecordAndOffset.Type.BEGIN);
                 }
                 if (end.contains(li.getPosition() - 1)) {
-                    Assert.assertEquals(li.getRecordAndOffsets().get(j).getType(), EdgingRecordAndOffset.Type.END);
+                    assertEquals(li.getRecordAndOffsets().get(j).getType(), EdgingRecordAndOffset.Type.END);
                 }
             }
             ++i;
diff --git a/src/test/java/htsjdk/samtools/util/EdgingRecordAndOffsetTest.java b/src/test/java/htsjdk/samtools/util/EdgingRecordAndOffsetTest.java
index a4f6478..09c281b 100644
--- a/src/test/java/htsjdk/samtools/util/EdgingRecordAndOffsetTest.java
+++ b/src/test/java/htsjdk/samtools/util/EdgingRecordAndOffsetTest.java
@@ -24,22 +24,20 @@
 package htsjdk.samtools.util;
 
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMRecord;
+import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotSame;
-
 /**
  * 
  * @author Mariia_Zueva at epam.com, EPAM Systems, Inc. <www.epam.com>
  *
  */
 
-public class EdgingRecordAndOffsetTest {
+public class EdgingRecordAndOffsetTest extends HtsjdkTest {
     private final byte[] qualities = {30, 50, 50, 60, 60, 70 ,70, 70, 80, 90};
     private final byte[] bases = {'A', 'C', 'G', 'T', 'A', 'C', 'G', 'T', 'T', 'C'};
     private SAMRecord record;
@@ -55,40 +53,40 @@ public class EdgingRecordAndOffsetTest {
     @Test
     public void testConstructor(){
         EdgingRecordAndOffset typedRecordAndOffset = EdgingRecordAndOffset.createBeginRecord(record, 0, 10, 3);
-        assertArrayEquals(qualities, typedRecordAndOffset.getBaseQualities());
-        assertArrayEquals(bases, typedRecordAndOffset.getRecord().getReadBases());
-        assertEquals('A', typedRecordAndOffset.getReadBase());
-        assertEquals(0, typedRecordAndOffset.getOffset());
-        assertEquals(3, typedRecordAndOffset.getRefPos());
-        assertEquals(EdgingRecordAndOffset.Type.BEGIN, typedRecordAndOffset.getType());
+        Assert.assertEquals(qualities, typedRecordAndOffset.getBaseQualities());
+        Assert.assertEquals(bases, typedRecordAndOffset.getRecord().getReadBases());
+        Assert.assertEquals('A', typedRecordAndOffset.getReadBase());
+        Assert.assertEquals(0, typedRecordAndOffset.getOffset());
+        Assert.assertEquals(3, typedRecordAndOffset.getRefPos());
+        Assert.assertEquals(EdgingRecordAndOffset.Type.BEGIN, typedRecordAndOffset.getType());
     }
 
     @Test
     public void  testGetSetStart(){
         EdgingRecordAndOffset typedRecordAndOffset = EdgingRecordAndOffset.createBeginRecord(record, 0, 10, 3);
         EdgingRecordAndOffset typedRecordAndOffsetEnd = EdgingRecordAndOffset.createEndRecord(typedRecordAndOffset);
-        assertEquals(typedRecordAndOffset, typedRecordAndOffsetEnd.getStart());
-        assertEquals(EdgingRecordAndOffset.Type.END, typedRecordAndOffsetEnd.getType());
+        Assert.assertEquals(typedRecordAndOffset, typedRecordAndOffsetEnd.getStart());
+        Assert.assertEquals(EdgingRecordAndOffset.Type.END, typedRecordAndOffsetEnd.getType());
     }
 
     @Test
     public void testNotEqualsTypedRecords(){
         EdgingRecordAndOffset typedRecordAndOffset = EdgingRecordAndOffset.createBeginRecord(record, 0, 10, 3);
         EdgingRecordAndOffset secondEdgingRecordAndOffset = EdgingRecordAndOffset.createBeginRecord(record, 5, 10, 3);
-        assertNotSame(typedRecordAndOffset.getBaseQuality(), secondEdgingRecordAndOffset.getBaseQuality());
-        assertArrayEquals(typedRecordAndOffset.getBaseQualities(), secondEdgingRecordAndOffset.getBaseQualities());
+        Assert.assertNotEquals(typedRecordAndOffset.getBaseQuality(), secondEdgingRecordAndOffset.getBaseQuality());
+        Assert.assertEquals(typedRecordAndOffset.getBaseQualities(), secondEdgingRecordAndOffset.getBaseQualities());
     }
 
     @Test
     public void testGetOffset(){
         EdgingRecordAndOffset secondEdgingRecordAndOffset = EdgingRecordAndOffset.createBeginRecord(record, 5, 10, 3);
-        assertEquals(70, secondEdgingRecordAndOffset.getBaseQuality());
-        assertEquals('C', secondEdgingRecordAndOffset.getReadBase());
+        Assert.assertEquals(70, secondEdgingRecordAndOffset.getBaseQuality());
+        Assert.assertEquals('C', secondEdgingRecordAndOffset.getReadBase());
     }
 
     @Test
     public void testGetQualityAtPosition(){
         EdgingRecordAndOffset secondEdgingRecordAndOffset = EdgingRecordAndOffset.createBeginRecord(record, 0, 10, 1);
-        assertEquals(50, secondEdgingRecordAndOffset.getBaseQuality(2));
+        Assert.assertEquals(50, secondEdgingRecordAndOffset.getBaseQuality(2));
     }
 }
diff --git a/src/test/java/htsjdk/samtools/util/HistogramTest.java b/src/test/java/htsjdk/samtools/util/HistogramTest.java
index 62b1441..ef44469 100644
--- a/src/test/java/htsjdk/samtools/util/HistogramTest.java
+++ b/src/test/java/htsjdk/samtools/util/HistogramTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -12,7 +13,7 @@ import java.util.HashSet;
 import static java.lang.Math.abs;
 import static java.lang.StrictMath.pow;
 
-public class HistogramTest {
+public class HistogramTest extends HtsjdkTest {
 
     @Test(dataProvider = "histogramData")
     public void testHistogramFunctions(final int[] values, final double mean, final double stdev, final Integer trimByWidth) {
diff --git a/src/test/java/htsjdk/samtools/util/IntervalListTest.java b/src/test/java/htsjdk/samtools/util/IntervalListTest.java
index 6c5fcd4..e138ee0 100644
--- a/src/test/java/htsjdk/samtools/util/IntervalListTest.java
+++ b/src/test/java/htsjdk/samtools/util/IntervalListTest.java
@@ -24,8 +24,8 @@
 
 package htsjdk.samtools.util;
 
-import htsjdk.samtools.SAMFileHeader;
-import htsjdk.samtools.SAMSequenceRecord;
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.*;
 import htsjdk.variant.vcf.VCFFileReader;
 import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
@@ -33,6 +33,7 @@ import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.File;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -45,7 +46,7 @@ import java.util.TreeSet;
 /**
  * Tests the IntervalList class
  */
-public class IntervalListTest {
+public class IntervalListTest extends HtsjdkTest {
 
     final SAMFileHeader fileHeader;
     final IntervalList list1, list2, list3;
@@ -75,6 +76,15 @@ public class IntervalListTest {
         list3.add(new Interval("3", 50, 470));
     }
 
+    @Test
+    public void testIntervalListFrom() {
+        final String testPath = "src/test/resources/htsjdk/samtools/intervallist/IntervalListFromVCFTestComp.interval_list";
+        final IntervalList fromFileList = IntervalList.fromFile(new File(testPath));
+        final IntervalList fromPathList = IntervalList.fromPath(Paths.get(testPath));
+        fromFileList.getHeader().getSequenceDictionary().assertSameDictionary(fromPathList.getHeader().getSequenceDictionary());
+        Assert.assertEquals(CollectionUtil.makeCollection(fromFileList.iterator()), CollectionUtil.makeCollection(fromPathList.iterator()));
+    }
+
     @DataProvider(name = "intersectData")
     public Object[][] intersectData() {
         final IntervalList intersect123 = new IntervalList(fileHeader);
@@ -364,12 +374,97 @@ public class IntervalListTest {
     }
 
     @Test(dataProvider = "subtractSingletonData")
-    public void testSubtractSingletonasListIntervalList(final IntervalList fromLists, final IntervalList whatLists, final IntervalList list) {
+    public void testSubtractSingletonAsListIntervalList(final IntervalList fromLists, final IntervalList whatLists, final IntervalList list) {
         Assert.assertEquals(
                 CollectionUtil.makeCollection(IntervalList.subtract(Collections.singletonList(fromLists), Collections.singletonList(whatLists)).iterator()),
                 CollectionUtil.makeCollection(list.iterator()));
     }
 
+    @DataProvider(name = "overlapsSingletonData")
+    public Object[][] overlapSingletonData() {
+        final IntervalList two_overlaps_one   = new IntervalList(fileHeader);
+        final IntervalList three_overlaps_two = new IntervalList(fileHeader);
+        final IntervalList three_overlaps_one = new IntervalList(fileHeader);
+        final IntervalList one_overlaps_three = new IntervalList(fileHeader);
+
+        // NB: commented lines below are there to show the intervals in the first list that will not be in the resulting list
+
+        two_overlaps_one.add(new Interval("1", 50, 150));
+        //two_overlaps_one.add(new Interval("1", 301, 500));
+        two_overlaps_one.add(new Interval("2", 1, 150));
+        two_overlaps_one.add(new Interval("2", 250, 270));
+        two_overlaps_one.add(new Interval("2", 290, 400));
+
+        three_overlaps_two.add(new Interval("1", 25, 400));
+        three_overlaps_two.add(new Interval("2", 200, 600));
+        //three_overlaps_two.add(new Interval("3", 50, 470));
+
+        three_overlaps_one.add(new Interval("1", 25, 400));
+        three_overlaps_one.add(new Interval("2", 200, 600));
+        //three_overlaps_one.add(new Interval("3", 50, 470));
+
+        one_overlaps_three.add(new Interval("1", 1, 100));
+        one_overlaps_three.add(new Interval("1", 101, 200));
+        one_overlaps_three.add(new Interval("1", 202, 300));
+        one_overlaps_three.add(new Interval("2", 200, 300));
+        //one_overlaps_three.add(new Interval("2", 100, 150));
+
+        return new Object[][]{
+                new Object[]{list1, list1, list1}, // should return itself
+                new Object[]{list1, IntervalList.invert(list1), new IntervalList(list1.getHeader())}, // should be empty
+                new Object[]{list2, list1, two_overlaps_one},
+                new Object[]{list3, list2, three_overlaps_two},
+                new Object[]{list3, list1, three_overlaps_one},
+                new Object[]{list1, list3, one_overlaps_three}
+        };
+    }
+
+    @DataProvider(name = "overlapsData")
+    public Object[][] overlapData() {
+        final IntervalList three_overlaps_one_and_two = new IntervalList(fileHeader);
+
+        three_overlaps_one_and_two.add(new Interval("1", 25, 400));
+        three_overlaps_one_and_two.add(new Interval("2", 200, 600));
+        //three_overlaps_one_and_two.add(new Interval("3", 50, 470));
+
+        return new Object[][]{
+                new Object[]{CollectionUtil.makeList(list3), CollectionUtil.makeList(list1, list2), three_overlaps_one_and_two},
+        };
+    }
+
+    @Test(dataProvider = "overlapsData")
+    public void testOverlapsIntervalLists(final List<IntervalList> fromLists, final List<IntervalList> whatLists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.overlaps(fromLists, whatLists).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @Test(dataProvider = "overlapsSingletonData")
+    public void testOverlapsSingletonIntervalLists(final IntervalList fromLists, final IntervalList whatLists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.overlaps(fromLists, whatLists).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @Test(dataProvider = "overlapsSingletonData")
+    public void testOverlapsSingletonAsListIntervalList(final IntervalList fromLists, final IntervalList whatLists, final IntervalList list) {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.overlaps(Collections.singletonList(fromLists), Collections.singletonList(whatLists)).iterator()),
+                CollectionUtil.makeCollection(list.iterator()));
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void testOverlapsEmptyFirstList() {
+        IntervalList.overlaps(Collections.emptyList(), Collections.singletonList(list1));
+    }
+
+    @Test
+    public void testOverlapsEmptySecondList() {
+        Assert.assertEquals(
+                CollectionUtil.makeCollection(IntervalList.overlaps(Collections.singletonList(list1), Collections.emptyList()).iterator()),
+                Collections.emptyList());
+    }
+
     @DataProvider(name = "VCFCompData")
     public Object[][] VCFCompData() {
         return new Object[][]{
@@ -517,4 +612,16 @@ public class IntervalListTest {
         Assert.assertTrue(false);
 
     }
+
+    @Test public void uniqueIntervalsWithoutNames() {
+        final IntervalList test = new IntervalList(this.fileHeader);
+        test.add(new Interval("1", 100, 200));
+        test.add(new Interval("1", 500, 600));
+        test.add(new Interval("1", 550, 700));
+
+        for (final boolean concat : new boolean[]{true, false}) {
+            final IntervalList unique = test.uniqued(concat);
+            Assert.assertEquals(unique.size(), 2);
+        }
+    }
 }
diff --git a/src/test/java/htsjdk/samtools/util/IntervalTreeMapTest.java b/src/test/java/htsjdk/samtools/util/IntervalTreeMapTest.java
index 2e725ff..5e975f9 100644
--- a/src/test/java/htsjdk/samtools/util/IntervalTreeMapTest.java
+++ b/src/test/java/htsjdk/samtools/util/IntervalTreeMapTest.java
@@ -23,12 +23,13 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.util.Iterator;
 
-public class IntervalTreeMapTest {
+public class IntervalTreeMapTest extends HtsjdkTest {
     @Test
     public void testBasic() {
         IntervalTreeMap<Interval> m=new IntervalTreeMap<Interval>();
@@ -37,8 +38,8 @@ public class IntervalTreeMapTest {
         m.put(chr1Interval, chr1Interval);
         Interval chr2Interval = new Interval("chr2", 1,200);
         m.put(chr2Interval, chr2Interval);
-        
-        
+
+
         Assert.assertTrue(m.containsContained(new Interval("chr1", 9,101)));
         Assert.assertTrue(m.containsOverlapping(new Interval("chr1", 50,150)));
         Assert.assertFalse(m.containsOverlapping(new Interval("chr3", 1,100)));
diff --git a/src/test/java/htsjdk/samtools/util/IntervalTreeTest.java b/src/test/java/htsjdk/samtools/util/IntervalTreeTest.java
index 50d84c0..dcd225e 100644
--- a/src/test/java/htsjdk/samtools/util/IntervalTreeTest.java
+++ b/src/test/java/htsjdk/samtools/util/IntervalTreeTest.java
@@ -23,15 +23,21 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.util.Iterator;
 
+import static htsjdk.samtools.util.IntervalTree.Node.HAS_OVERLAPPING_PART;
+
 /**
  * @author alecw at broadinstitute.org
  */
-public class IntervalTreeTest {
+ at Test(singleThreaded=true) // to assure that the common resources aren't clobbered
+public class IntervalTreeTest extends HtsjdkTest {
     @Test
     public void testNoMatches()
     {
@@ -57,29 +63,147 @@ public class IntervalTreeTest {
         return ret;
     }
 
+    private final IntervalTree<String> intervalTree = new IntervalTree<String>();
+
+    @BeforeMethod
+    public void init(){ //due to the destructive nature of removeMany test...
+        intervalTree.clear();
+
+        // each interval has a "name:length"
+        intervalTree.put(1, 10, "foo1:10");
+        intervalTree.put(2, 9, "foo2:8");
+        intervalTree.put(3, 8, "foo3:6");
+        intervalTree.put(4, 7, "foo4:4");
+        intervalTree.put(5, 6, "foo5:2");
+        intervalTree.put(1, 9, "foo6:9");
+    }
+
+    @Test
+    public void testLength(){
+
+        Iterator<IntervalTree.Node<String>> iterator = intervalTree.iterator();
+        Iterable<IntervalTree.Node<String>> iterable = () -> iterator;
+
+        for (IntervalTree.Node<String> node : iterable) {
+            Assert.assertEquals(node.getLength(), Integer.parseInt(node.getValue().replaceAll(".*:", "")));
+        }
+    }
+
+    @DataProvider(name="adjacentIntervalsTestData")
+    public Object[][] adjacentIntervalsTestData() {
+        return new Object[][]{
+                {1, 4, 5, 10, true},
+                {1, 3, 5, 10, false},
+                {1, 4, 6, 10, false},
+                {1, 2, 6, 10, false},
+                {1, 10, 6, 10, false},
+                {1, 10, 11, 20, true},
+                {1, 10, 11, 20, true},
+        };
+    }
+
+    @Test(dataProvider = "adjacentIntervalsTestData")
+    public void testAdjacent(int start1, int end1, int start2, int end2, boolean areAdjacent){
+
+        final IntervalTree.Node<String> node1 = new IntervalTree.Node<>(start1, end1, "one");
+        final IntervalTree.Node<String> node2 = new IntervalTree.Node<>(start2, end2, "two");
+
+        Assert.assertTrue(node1.isAdjacent(node2) == areAdjacent);
+        Assert.assertTrue(node2.isAdjacent(node1) == areAdjacent);
+    }
+
+
+    @Test
+    public void testRank() {
+        for (IntervalTree.Node<String> node: intervalTree) {
+            Assert.assertEquals(intervalTree.findByIndex(
+                    intervalTree.getIndex(node.getStart(), node.getEnd())), node);
+        }
+    }
+
+    @Test
+    public void testIterator() {
+
+        final IntervalTree.Node<String> testNode = new IntervalTree.Node<>(3, 4, "foobar1");
+        int count = 0;
+        Iterator<IntervalTree.Node<String>> iterator = intervalTree.iterator(testNode.getStart(), testNode.getEnd());
+        Iterable<IntervalTree.Node<String>> iterable = () -> iterator;
+        for (IntervalTree.Node<String> node : iterable) {
+            Assert.assertTrue(node.compare(testNode.getStart(), testNode.getEnd()) <= 0);
+            count++;
+        }
+        Assert.assertEquals(count, 3); // foobar3, foobar4, and foobar5 only.
+    }
+
+    @Test
+    public void testRemoveMany() {
+        Iterator<IntervalTree.Node<String>> iterator = intervalTree.reverseIterator();
+        Iterable<IntervalTree.Node<String>> iterable = () -> iterator;
+
+        for (IntervalTree.Node<String> node : iterable) {
+            intervalTree.removeNode(node);
+        }
+        Assert.assertEquals(intervalTree.size(), 0);
+    }
+
+    @Test
+    public void testRevIterator() {
+
+        final IntervalTree.Node<String> testNode = new IntervalTree.Node<>(3, 4, "foobar1");
+        int count = 0;
+        Iterator<IntervalTree.Node<String>> iterator = intervalTree.reverseIterator(testNode.getStart(), testNode.getEnd());
+        Iterable<IntervalTree.Node<String>> iterable = () -> iterator;
+        for (IntervalTree.Node<String> node : iterable) {
+            Assert.assertTrue(node.compare(testNode.getStart(), testNode.getEnd()) >= 0);
+            count++;
+        }
+        Assert.assertEquals(count, 3); // foobar1, foobar2, and foobar6
+    }
+
+
+    @Test
+    public void testOverlapIterator() {
+
+        final IntervalTree.Node<String> testNode = new IntervalTree.Node<>(3, 4, "foobar1");
+        int count = 0;
+        Iterator<IntervalTree.Node<String>> iterator = intervalTree.overlappers(testNode.getStart(), testNode.getEnd());
+        Iterable<IntervalTree.Node<String>> iterable = () -> iterator;
+        for (IntervalTree.Node<String> node : iterable) {
+            Assert.assertTrue( (testNode.getRelationship(node) & HAS_OVERLAPPING_PART) != 0, String.format("%s with %s = %d", node.toString(), testNode.toString(), node.getRelationship(testNode)));
+            count++;
+        }
+        Assert.assertEquals(count, 5); // foobar1, foobar2, foobar3, foobar4, and foobar6
+    }
+
+
+    @Test
+    public void testTotalRevIterator() {
+
+        int count = 0;
+        Iterator<IntervalTree.Node<String>> iterator = intervalTree.reverseIterator();
+        Iterable<IntervalTree.Node<String>> iterable = () -> iterator;
+
+        for (IntervalTree.Node<String> ignored : iterable) {
+            count++;
+        }
+        Assert.assertEquals(count, intervalTree.size()); // foobar1, foobar2, and foobar6
+    }
+
     @Test
     public void testMatches()
     {
-        final IntervalTree<String> intervalTree = new IntervalTree<String>();
-        intervalTree.put(1, 10, "foo1");
-        intervalTree.put(2, 9, "foo2");
-        intervalTree.put(3, 8, "foo3");
-        intervalTree.put(4, 7, "foo4");
-        intervalTree.put(5, 6, "foo5");
-        intervalTree.put(1, 9, "foo6");
-
         // Single match
         Assert.assertEquals(countElements(intervalTree.overlappers(10, 10)), 1, "Test single overlap");
-        Assert.assertTrue(iteratorContains(intervalTree.overlappers(10, 10), "foo1"), "Test single overlap for correct overlapee");
+        Assert.assertTrue(iteratorContains(intervalTree.overlappers(10, 10), "foo1:10"), "Test single overlap for correct overlapee");
 
         // Multiple matches
         Assert.assertEquals(countElements(intervalTree.overlappers(7, 8)), 5, "Test multiple overlap");
-        Assert.assertTrue(iteratorContains(intervalTree.overlappers(7, 8), "foo1"), "Test multiple overlap for correct overlapees");
-        Assert.assertTrue(iteratorContains(intervalTree.overlappers(7, 8), "foo2"), "Test multiple overlap for correct overlapees");
-        Assert.assertTrue(iteratorContains(intervalTree.overlappers(7, 8), "foo3"), "Test multiple overlap for correct overlapees");
-        Assert.assertTrue(iteratorContains(intervalTree.overlappers(7, 8), "foo4"), "Test multiple overlap for correct overlapees");
-        Assert.assertTrue(iteratorContains(intervalTree.overlappers(7, 8), "foo6"), "Test multiple overlap for correct overlapees");
-        Assert.assertTrue(!iteratorContains(intervalTree.overlappers(7, 8), "foo5"), "Test multiple overlap for correct overlapees");
+        Assert.assertTrue( iteratorContains(intervalTree.overlappers(7, 8), "foo1:10"), "Test multiple overlap for correct overlapees");
+        Assert.assertTrue( iteratorContains(intervalTree.overlappers(7, 8), "foo2:8"), "Test multiple overlap for correct overlapees");
+        Assert.assertTrue( iteratorContains(intervalTree.overlappers(7, 8), "foo3:6"), "Test multiple overlap for correct overlapees");
+        Assert.assertTrue( iteratorContains(intervalTree.overlappers(7, 8), "foo4:4"), "Test multiple overlap for correct overlapees");
+        Assert.assertTrue( iteratorContains(intervalTree.overlappers(7, 8), "foo6:9"), "Test multiple overlap for correct overlapees");
+        Assert.assertTrue(!iteratorContains(intervalTree.overlappers(7, 8), "foo5:2"), "Test multiple overlap for correct overlapees");
     }
 
     private boolean iteratorContains(final Iterator<IntervalTree.Node<String>> nodeIterator, final String s) {
@@ -184,4 +308,6 @@ public class IntervalTreeTest {
         Assert.assertEquals(intervalTree.remove(46402360, 46402594), "frob");
         intervalTree.checkMaxEnds();
     }
+
+
 }
diff --git a/src/test/java/htsjdk/samtools/util/IoUtilTest.java b/src/test/java/htsjdk/samtools/util/IoUtilTest.java
index 0e4cd7a..ab1938c 100644
--- a/src/test/java/htsjdk/samtools/util/IoUtilTest.java
+++ b/src/test/java/htsjdk/samtools/util/IoUtilTest.java
@@ -23,6 +23,12 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
+import com.google.common.jimfs.Configuration;
+import com.google.common.jimfs.Jimfs;
+import java.nio.file.FileSystem;
+import java.nio.file.Path;
+import java.nio.file.spi.FileSystemProvider;
 import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.DataProvider;
@@ -41,7 +47,7 @@ import java.io.StringWriter;
 import java.util.Arrays;
 import java.util.List;
 
-public class IoUtilTest {
+public class IoUtilTest extends HtsjdkTest {
 
     private static final File SLURP_TEST_FILE = new File("src/test/resources/htsjdk/samtools/io/slurptest.txt");
     private static final File EMPTY_FILE = new File("src/test/resources/htsjdk/samtools/io/empty.txt");
@@ -162,12 +168,35 @@ public class IoUtilTest {
     public void testFileType(final String path, boolean expectedIsRegularFile) {
         final File file = new File(path);
         Assert.assertEquals(IOUtil.isRegularPath(file), expectedIsRegularFile);
+        if (null != file) {
+            Assert.assertEquals(IOUtil.isRegularPath(file.toPath()), expectedIsRegularFile);
+        }
     }
 
     @Test(dataProvider = "unixFileTypeTestCases", groups = {"unix"})
     public void testFileTypeUnix(final String path, boolean expectedIsRegularFile) {
         final File file = new File(path);
         Assert.assertEquals(IOUtil.isRegularPath(file), expectedIsRegularFile);
+        if (null != file) {
+            Assert.assertEquals(IOUtil.isRegularPath(file.toPath()), expectedIsRegularFile);
+        }
+    }
+
+    @Test
+    public void testAddExtension() throws IOException {
+        Path p = IOUtil.getPath("/folder/file");
+        List<FileSystemProvider> fileSystemProviders = FileSystemProvider.installedProviders();
+        Assert.assertEquals(IOUtil.addExtension(p, ".ext"), IOUtil.getPath("/folder/file.ext"));
+        p = IOUtil.getPath("folder/file");
+        Assert.assertEquals(IOUtil.addExtension(p, ".ext"), IOUtil.getPath("folder/file.ext"));
+        try (FileSystem jimfs = Jimfs.newFileSystem(Configuration.unix())) {
+            p = jimfs.getPath("folder/sub/file");
+            Assert.assertEquals(IOUtil.addExtension(p, ".ext"), jimfs.getPath("folder/sub/file.ext"));
+            p = jimfs.getPath("folder/file");
+            Assert.assertEquals(IOUtil.addExtension(p, ".ext"), jimfs.getPath("folder/file.ext"));
+            p = jimfs.getPath("file");
+            Assert.assertEquals(IOUtil.addExtension(p, ".ext"), jimfs.getPath("file.ext"));
+        }
     }
 
     @DataProvider(name = "fileTypeTestCases")
diff --git a/src/test/java/htsjdk/samtools/util/Iso8601DateTest.java b/src/test/java/htsjdk/samtools/util/Iso8601DateTest.java
index ce0ae08..93b9d65 100644
--- a/src/test/java/htsjdk/samtools/util/Iso8601DateTest.java
+++ b/src/test/java/htsjdk/samtools/util/Iso8601DateTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -31,7 +32,7 @@ import java.util.Date;
 /**
  * @author alecw at broadinstitute.org
  */
-public class Iso8601DateTest {
+public class Iso8601DateTest extends HtsjdkTest {
     @Test
     public void testBasic() {
         final String dateStr = "2008-12-15";
diff --git a/src/test/java/htsjdk/samtools/util/IupacTest.java b/src/test/java/htsjdk/samtools/util/IupacTest.java
index 64b78c0..86b0a41 100644
--- a/src/test/java/htsjdk/samtools/util/IupacTest.java
+++ b/src/test/java/htsjdk/samtools/util/IupacTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.BamFileIoUtils;
 import htsjdk.samtools.SAMFileHeader;
 import htsjdk.samtools.SAMFileWriter;
@@ -38,7 +39,7 @@ import org.testng.annotations.Test;
 import java.io.File;
 import java.util.Arrays;
 
-public class IupacTest {
+public class IupacTest extends HtsjdkTest {
     @Test(dataProvider = "basicDataProvider")
     public void basic(final String tempFileExtension) throws Exception {
         final File outputFile = File.createTempFile("iupacTest.", tempFileExtension);
diff --git a/src/test/java/htsjdk/samtools/util/LocatableUnitTest.java b/src/test/java/htsjdk/samtools/util/LocatableUnitTest.java
new file mode 100644
index 0000000..9dd2be1
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/LocatableUnitTest.java
@@ -0,0 +1,160 @@
+package htsjdk.samtools.util;
+
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+public class LocatableUnitTest {
+
+    private static Locatable getLocatable(final String contig, final int start, final int end) {
+        return new Locatable() {
+            @Override
+            public String getContig() {
+                return contig;
+            }
+
+            @Override
+            public int getStart() {
+                return start;
+            }
+
+            @Override
+            public int getEnd() {
+                return end;
+            }
+
+            @Override
+            public String toString() {
+                return String.format("%s:%s-%s", contig, start, end);
+            }
+        };
+    }
+
+    @DataProvider(name = "IntervalSizeData")
+    public Object[][] getIntervalSizeData() {
+        // Intervals + expected sizes
+        return new Object[][]{
+                { getLocatable("1", 1, 1), 1 },
+                { getLocatable("1", 1, 2), 2 },
+                { getLocatable("1", 1, 10), 10 },
+                { getLocatable("1", 2, 10), 9 },
+                { getLocatable("1", 1,0), 0}
+        };
+    }
+
+    @Test(dataProvider = "IntervalSizeData")
+    public void testGetSize(final Locatable interval, final int expectedSize ) {
+        Assert.assertEquals(interval.getLengthOnReference(), expectedSize, "size() incorrect for interval " + interval);
+    }
+
+    @DataProvider(name = "IntervalOverlapData")
+    public static Object[][] getIntervalOverlapData() {
+        final Locatable standardInterval = getLocatable("1", 10, 20);
+        final Locatable oneBaseInterval = getLocatable("1", 10, 10);
+
+        return new Object[][] {
+                { standardInterval, getLocatable("2", 10, 20), false },
+                { standardInterval, getLocatable("1", 1, 5), false },
+                { standardInterval, getLocatable("1", 1, 9), false },
+                { standardInterval, getLocatable("1", 1, 10), true },
+                { standardInterval, getLocatable("1", 1, 15), true },
+                { standardInterval, getLocatable("1", 10, 10), true },
+                { standardInterval, getLocatable("1", 10, 15), true },
+                { standardInterval, getLocatable("1", 10, 20), true },
+                { standardInterval, getLocatable("1", 15, 20), true },
+                { standardInterval, getLocatable("1", 15, 25), true },
+                { standardInterval, getLocatable("1", 20, 20), true },
+                { standardInterval, getLocatable("1", 20, 25), true },
+                { standardInterval, getLocatable("1", 21, 25), false },
+                { standardInterval, getLocatable("1", 25, 30), false },
+                { oneBaseInterval, getLocatable("2", 10, 10), false },
+                { oneBaseInterval, getLocatable("1", 1, 5), false },
+                { oneBaseInterval, getLocatable("1", 1, 9), false },
+                { oneBaseInterval, getLocatable("1", 1, 10), true },
+                { oneBaseInterval, getLocatable("1", 10, 10), true },
+                { oneBaseInterval, getLocatable("1", 10, 15), true },
+                { oneBaseInterval, getLocatable("1", 11, 15), false },
+                { oneBaseInterval, getLocatable("1", 15, 20), false },
+                { standardInterval, null, false },
+                { standardInterval, standardInterval, true },
+        };
+    }
+
+    @Test(dataProvider = "IntervalOverlapData")
+    public void testOverlap(final Locatable firstInterval, final Locatable secondInterval, final boolean expectedOverlapResult ) {
+        Assert.assertEquals(firstInterval.overlaps(secondInterval), expectedOverlapResult,
+                "overlap() returned incorrect result for intervals " + firstInterval + " and " + secondInterval);
+    }
+
+    @DataProvider(name = "overlapsWithMargin")
+    public Object[][] overlapsWithMargin(){
+        final Locatable standardInterval = getLocatable("1", 10, 20);
+        final Locatable middleInterval = getLocatable("1", 100, 200);
+        final Locatable zeroLengthInterval = getLocatable("1", 1, 0);
+
+        return new Object[][] {
+                { standardInterval, getLocatable("2", 10, 20), 100, false },
+                { standardInterval, getLocatable("1", 1, 15), 0, true },
+                { standardInterval, getLocatable("1", 30, 50), 9, false },
+                { standardInterval, getLocatable("1", 30, 50), 10, true },
+                { middleInterval, getLocatable("1", 50, 99), 0, false },
+                { middleInterval, getLocatable("1", 50, 90), 9, false },
+                { middleInterval, getLocatable("1", 50, 90), 10, true },
+                { middleInterval, getLocatable("1", 150, 149), 0, true },
+                { middleInterval, getLocatable("1", 100, 99), 0, true },
+                { middleInterval, getLocatable("1", 99, 98), 0, false },
+                { standardInterval, getLocatable(null, 10, 20), 100, false }
+        };
+    }
+
+    @Test(dataProvider = "overlapsWithMargin")
+    public void testOverlapWithMargin(final Locatable firstInterval, final Locatable secondInterval, int margin, final boolean expectedOverlapResult ) {
+        Assert.assertEquals(firstInterval.withinDistanceOf(secondInterval, margin), expectedOverlapResult,
+                "overlap() returned incorrect result for intervals " + firstInterval + " and " + secondInterval);
+    }
+
+    @DataProvider(name = "IntervalContainsData")
+    public Object[][] getIntervalContainsData() {
+        final Locatable containingInterval = getLocatable("1", 10, 20);
+        final Locatable zeroLengthIntervalBetween9And10 = getLocatable("1", 10, 9);
+        return new Object[][] {
+                { containingInterval, getLocatable("2", 10, 20), false },
+                { containingInterval, getLocatable("1", 1, 5), false },
+                { containingInterval, getLocatable("1", 1, 10), false },
+                { containingInterval, getLocatable("1", 5, 15), false },
+                { containingInterval, getLocatable("1", 9, 10), false },
+                { containingInterval, getLocatable("1", 9, 20), false },
+                { containingInterval, getLocatable("1", 10, 10), true },
+                { containingInterval, getLocatable("1", 10, 15), true },
+                { containingInterval, getLocatable("1", 10, 20), true },
+                { containingInterval, getLocatable("1", 10, 21), false },
+                { containingInterval, getLocatable("1", 15, 25), false },
+                { containingInterval, getLocatable("1", 20, 20), true },
+                { containingInterval, getLocatable("1", 20, 21), false },
+                { containingInterval, getLocatable("1", 20, 25), false },
+                { containingInterval, getLocatable("1", 21, 25), false },
+                { containingInterval, getLocatable("1", 25, 30), false },
+                { containingInterval, null, false },
+                { containingInterval, containingInterval, true },
+                { containingInterval, getLocatable(null, 10, 20), false},
+                { getLocatable(null, 10, 20), getLocatable(null, 10, 20), false},
+
+                //0 length intervals
+                { containingInterval, zeroLengthIntervalBetween9And10, true},
+                { containingInterval, getLocatable("1", 15, 14), true},
+                { containingInterval, getLocatable("1", 21,20), true},
+                { containingInterval, getLocatable("1", 25, 24), false},
+                {zeroLengthIntervalBetween9And10, getLocatable("1", 9, 8), false},
+                {zeroLengthIntervalBetween9And10, getLocatable("1", 11, 10), false},
+
+                //0 length interval is considered to contain itself
+                {zeroLengthIntervalBetween9And10, zeroLengthIntervalBetween9And10, true}
+        };
+    }
+
+    @Test(dataProvider = "IntervalContainsData")
+    public void testContains(final Locatable firstInterval, final Locatable secondInterval, final boolean expectedContainsResult ) {
+        Assert.assertEquals(firstInterval.contains(secondInterval), expectedContainsResult,
+                "contains() returned incorrect result for intervals " + firstInterval + " and " + secondInterval);
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/util/LogTest.java b/src/test/java/htsjdk/samtools/util/LogTest.java
new file mode 100644
index 0000000..a9b82b1
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/LogTest.java
@@ -0,0 +1,41 @@
+package htsjdk.samtools.util;
+
+import htsjdk.HtsjdkTest;
+import org.testng.Assert;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.file.Files;
+import java.util.List;
+
+public class LogTest extends HtsjdkTest {
+
+    private final Log log = Log.getInstance(getClass());
+
+    @Test
+    public void testLogToFile() throws IOException {
+        final File logFile = File.createTempFile(getClass().getSimpleName(), ".tmp");
+        logFile.deleteOnExit();
+
+        final Log.LogLevel originalLogLevel = Log.getGlobalLogLevel();
+        final PrintStream originalStream = Log.getGlobalPrintStream();
+
+        try (final PrintStream stream = new PrintStream(new FileOutputStream(logFile.getPath(), true))) {
+            Log.setGlobalPrintStream(stream);
+            Log.setGlobalLogLevel(Log.LogLevel.DEBUG);
+            final String words = "Hello World";
+            log.info(words);
+            final List<String> list = Files.readAllLines(logFile.toPath());
+            Assert.assertEquals(Log.getGlobalLogLevel(), Log.LogLevel.DEBUG);
+            Assert.assertEquals(list.size(), 1);
+            Assert.assertTrue(list.get(0).contains(words));
+        } finally {
+            Log.setGlobalLogLevel(originalLogLevel);
+            Log.setGlobalPrintStream(originalStream);
+        }
+    }
+}
diff --git a/src/test/java/htsjdk/samtools/util/MergingIteratorTest.java b/src/test/java/htsjdk/samtools/util/MergingIteratorTest.java
index d36bb6d..e5964ac 100644
--- a/src/test/java/htsjdk/samtools/util/MergingIteratorTest.java
+++ b/src/test/java/htsjdk/samtools/util/MergingIteratorTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -34,7 +35,7 @@ import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.Queue;
 
-public class MergingIteratorTest {
+public class MergingIteratorTest extends HtsjdkTest {
 
 	private static class QueueBackedIterator<T> implements CloseableIterator<T> {
 
diff --git a/src/test/java/htsjdk/samtools/util/OverlapDetectorTest.java b/src/test/java/htsjdk/samtools/util/OverlapDetectorTest.java
index ecde965..d8adf2e 100644
--- a/src/test/java/htsjdk/samtools/util/OverlapDetectorTest.java
+++ b/src/test/java/htsjdk/samtools/util/OverlapDetectorTest.java
@@ -1,12 +1,13 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.util.*;
 
-public class OverlapDetectorTest {
+public class OverlapDetectorTest extends HtsjdkTest {
 
     @DataProvider(name="intervalsMultipleContigs")
     public Object[][] intervalsMultipleContigs(){
diff --git a/src/test/java/htsjdk/samtools/util/PositionalOutputStreamTest.java b/src/test/java/htsjdk/samtools/util/PositionalOutputStreamTest.java
index 49de11d..939c748 100644
--- a/src/test/java/htsjdk/samtools/util/PositionalOutputStreamTest.java
+++ b/src/test/java/htsjdk/samtools/util/PositionalOutputStreamTest.java
@@ -1,7 +1,7 @@
 /*
  * The MIT License (MIT)
  *
- * Copyright (c) 2015 Daniel Gómez-Sánchez
+ * Copyright (c) 2015 Daniel Gomez-Sanchez
  *
  * Permission is hereby granted, free of charge, to any person obtaining a copy
  * of this software and associated documentation files (the "Software"), to deal
@@ -24,6 +24,7 @@
 
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -33,7 +34,7 @@ import java.io.OutputStream;
 /**
  * @author Daniel Gomez-Sanchez (magicDGS)
  */
-public class PositionalOutputStreamTest {
+public class PositionalOutputStreamTest extends HtsjdkTest {
 
     @Test
     public void basicPositionTest() throws Exception {
@@ -59,4 +60,4 @@ public class PositionalOutputStreamTest {
         Assert.assertEquals(wrapped.getPosition(), position);
     }
 
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java b/src/test/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java
index 9e014d7..071312d 100644
--- a/src/test/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java
+++ b/src/test/java/htsjdk/samtools/util/QualityEncodingDetectorTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMException;
 import htsjdk.samtools.SAMRecordSetBuilder;
 import htsjdk.samtools.SamReader;
@@ -13,7 +14,7 @@ import java.io.File;
 import java.util.Arrays;
 import java.util.List;
 
-public class QualityEncodingDetectorTest {
+public class QualityEncodingDetectorTest extends HtsjdkTest {
 
     private static class Testcase {
         private final File f;
diff --git a/src/test/java/htsjdk/samtools/util/RelativeIso8601DateTest.java b/src/test/java/htsjdk/samtools/util/RelativeIso8601DateTest.java
index e4e9ef9..0e0c9b2 100644
--- a/src/test/java/htsjdk/samtools/util/RelativeIso8601DateTest.java
+++ b/src/test/java/htsjdk/samtools/util/RelativeIso8601DateTest.java
@@ -1,5 +1,6 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -9,7 +10,7 @@ import java.util.List;
 
 /** @author mccowan */
 
-public class RelativeIso8601DateTest {
+public class RelativeIso8601DateTest extends HtsjdkTest {
 
     // 1 second resolution is ISO date
     private final static double DELTA_FOR_TIME = 1000;
diff --git a/src/test/java/htsjdk/samtools/util/SamLocusIteratorTest.java b/src/test/java/htsjdk/samtools/util/SamLocusIteratorTest.java
index 262b7c9..5bcea40 100644
--- a/src/test/java/htsjdk/samtools/util/SamLocusIteratorTest.java
+++ b/src/test/java/htsjdk/samtools/util/SamLocusIteratorTest.java
@@ -61,7 +61,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
             int pos = startPosition;
             for (final SamLocusIterator.LocusInfo li : sli) {
                 Assert.assertEquals(li.getPosition(), pos++);
-                Assert.assertEquals(li.getRecordAndPositions().size(), coverage);
+                Assert.assertEquals(li.getRecordAndOffsets().size(), coverage);
                 Assert.assertEquals(li.size(), coverage);
                 // make sure that we are not accumulating indels
                 Assert.assertEquals(li.getDeletedInRecord().size(), 0);
@@ -86,7 +86,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
         int pos = 165;
         for (final SamLocusIterator.LocusInfo li : sli) {
             Assert.assertEquals(li.getPosition(), pos++);
-            Assert.assertEquals(li.getRecordAndPositions().size(), 2);
+            Assert.assertEquals(li.getRecordAndOffsets().size(), 2);
             Assert.assertEquals(li.size(), 2);
         }
     }
@@ -123,7 +123,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
                 } else {
                     expectedReads = 0;
                 }
-                Assert.assertEquals(li.getRecordAndPositions().size(), expectedReads);
+                Assert.assertEquals(li.getRecordAndOffsets().size(), expectedReads);
                 Assert.assertEquals(li.size(), expectedReads);
                 // make sure that we are not accumulating indels
                 Assert.assertEquals(li.getDeletedInRecord().size(), 0);
@@ -161,7 +161,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
             // make sure we accumulated depth coverage for even positions, coverage/2 for odd positions
             int pos = startPosition;
             for (final SamLocusIterator.LocusInfo li : sli) {
-                Assert.assertEquals(li.getRecordAndPositions().size(), (pos % 2 == 0) ? coverage / 2 : coverage);
+                Assert.assertEquals(li.getRecordAndOffsets().size(), (pos % 2 == 0) ? coverage / 2 : coverage);
                 Assert.assertEquals(li.size(), (pos % 2 == 0) ? coverage / 2 : coverage);
                 Assert.assertEquals(li.getPosition(), pos++);
                 // make sure that we are not accumulating indels
@@ -200,7 +200,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
                 Assert.assertEquals(li.getPosition(), pos++);
                 if (isDeletedPosition) {
                     // make sure there are no reads without indels
-                    Assert.assertEquals(li.getRecordAndPositions().size(), 0);
+                    Assert.assertEquals(li.getRecordAndOffsets().size(), 0);
                     Assert.assertEquals(li.size(), coverage); // should include deletions
 
                     // make sure that we are accumulating indels
@@ -208,7 +208,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
                     Assert.assertEquals(li.getInsertedInRecord().size(), 0);
                 } else {
                     // make sure we are accumulating normal coverage
-                    Assert.assertEquals(li.getRecordAndPositions().size(), coverage);
+                    Assert.assertEquals(li.getRecordAndOffsets().size(), coverage);
                     Assert.assertEquals(li.size(), coverage);
 
                     // make sure that we are not accumulating indels
@@ -241,7 +241,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
             for (final SamLocusIterator.LocusInfo li : sli) {
                 Assert.assertEquals(li.getPosition(), pos++);
                 // make sure we are accumulating normal coverage
-                Assert.assertEquals(li.getRecordAndPositions().size(), coverage);
+                Assert.assertEquals(li.getRecordAndOffsets().size(), coverage);
                 Assert.assertEquals(li.size(), coverage);
 
                 // make sure that we are not accumulating deletions
@@ -278,7 +278,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
             for (final SamLocusIterator.LocusInfo li : sli) {
                 Assert.assertEquals(li.getPosition(), pos);
                 // accumulation of coverage
-                Assert.assertEquals(li.getRecordAndPositions().size(), (indelPosition) ? 0 : coverage);
+                Assert.assertEquals(li.getRecordAndOffsets().size(), (indelPosition) ? 0 : coverage);
                 Assert.assertEquals(li.size(), (indelPosition) ? 0 : coverage);
 
                 // no accumulation of deletions
@@ -319,7 +319,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
             for (final SamLocusIterator.LocusInfo li : sli) {
                 Assert.assertEquals(li.getPosition(), pos);
                 // accumulation of coverage
-                Assert.assertEquals(li.getRecordAndPositions().size(), (indelPosition) ? 0 : coverage);
+                Assert.assertEquals(li.getRecordAndOffsets().size(), (indelPosition) ? 0 : coverage);
                 Assert.assertEquals(li.size(), (indelPosition) ? 0 : coverage);
                 // no accumulation of deletions
                 Assert.assertEquals(li.getDeletedInRecord().size(), 0);
@@ -364,7 +364,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
                 }
                 Assert.assertEquals(li.getPosition(), pos);
                 // accumulation of coverage
-                Assert.assertEquals(li.getRecordAndPositions().size(), (pos == endN) ? 0 : coverage);
+                Assert.assertEquals(li.getRecordAndOffsets().size(), (pos == endN) ? 0 : coverage);
                 Assert.assertEquals(li.size(), (pos == endN) ? 0 : coverage);
                 // no accumulation of deletions
                 Assert.assertEquals(li.getDeletedInRecord().size(), 0);
@@ -416,7 +416,7 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
                 final boolean insideDeletion = incIndels && (pos >= startDel && pos <= endDel);
                 Assert.assertEquals(li.getPosition(), pos);
                 // accumulation of coverage
-                Assert.assertEquals(li.getRecordAndPositions().size(), (insideDeletion) ? 0 : coverage);
+                Assert.assertEquals(li.getRecordAndOffsets().size(), (insideDeletion) ? 0 : coverage);
                 Assert.assertEquals(li.size(), coverage); // either will be all deletions, or all non-deletions, but always of size `coverage`.
                 // accumulation of deletions
                 Assert.assertEquals(li.getDeletedInRecord().size(), (insideDeletion) ? coverage : 0);
@@ -506,18 +506,18 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
                 if (inDelRange) {
                     // check the coverage for insertion and normal records
                     Assert.assertEquals(li.getDeletedInRecord().size(), coverage);
-                    Assert.assertEquals(li.getRecordAndPositions().size(), 0);
+                    Assert.assertEquals(li.getRecordAndOffsets().size(), 0);
                     Assert.assertEquals(li.size(), coverage); // includes deletions
                     // check the offset for the deletion
                     Assert.assertEquals(li.getDeletedInRecord().get(0).getOffset(), expectedReadOffsets[i]);
                     Assert.assertEquals(li.getDeletedInRecord().get(1).getOffset(), expectedReadOffsets[i]);
                 } else {
                     // if it is not a deletion, perform the same test as before
-                    Assert.assertEquals(li.getRecordAndPositions().size(), coverage);
+                    Assert.assertEquals(li.getRecordAndOffsets().size(), coverage);
                     Assert.assertEquals(li.size(), coverage);
                     // Assert.assertEquals(li.getDeletedInRecord().size(), 0);
-                    Assert.assertEquals(li.getRecordAndPositions().get(0).getOffset(), expectedReadOffsets[i]);
-                    Assert.assertEquals(li.getRecordAndPositions().get(1).getOffset(), expectedReadOffsets[i]);
+                    Assert.assertEquals(li.getRecordAndOffsets().get(0).getOffset(), expectedReadOffsets[i]);
+                    Assert.assertEquals(li.getRecordAndOffsets().get(1).getOffset(), expectedReadOffsets[i]);
                 }
                 ++i;
             }
@@ -576,12 +576,12 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
 
         i = 0;
         for (final SamLocusIterator.LocusInfo li : sli) {
-            Assert.assertEquals(li.getRecordAndPositions().size(), expectedDepths[i]);
+            Assert.assertEquals(li.getRecordAndOffsets().size(), expectedDepths[i]);
             Assert.assertEquals(li.size(), expectedDepths[i]);
             Assert.assertEquals(li.getPosition(), expectedReferencePositions[i]);
-            Assert.assertEquals(li.getRecordAndPositions().size(), expectedReadOffsets[i].length);
+            Assert.assertEquals(li.getRecordAndOffsets().size(), expectedReadOffsets[i].length);
             for (int j = 0; j < expectedReadOffsets[i].length; ++j) {
-                Assert.assertEquals(li.getRecordAndPositions().get(j).getOffset(), expectedReadOffsets[i][j]);
+                Assert.assertEquals(li.getRecordAndOffsets().get(j).getOffset(), expectedReadOffsets[i][j]);
             }
             // make sure that we are not accumulating indels
             Assert.assertEquals(li.getDeletedInRecord().size(), 0);
@@ -652,12 +652,12 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
         i = 0;
         for (final SamLocusIterator.LocusInfo li : sli) {
             // checking the same as without indels
-            Assert.assertEquals(li.getRecordAndPositions().size(), expectedDepths[i]);
+            Assert.assertEquals(li.getRecordAndOffsets().size(), expectedDepths[i]);
             Assert.assertEquals(li.size(), expectedDepths[i] + expectedDelDepths[i]); // include deletions
             Assert.assertEquals(li.getPosition(), expectedReferencePositions[i]);
-            Assert.assertEquals(li.getRecordAndPositions().size(), expectedReadOffsets[i].length);
+            Assert.assertEquals(li.getRecordAndOffsets().size(), expectedReadOffsets[i].length);
             for (int j = 0; j < expectedReadOffsets[i].length; ++j) {
-                Assert.assertEquals(li.getRecordAndPositions().get(j).getOffset(), expectedReadOffsets[i][j]);
+                Assert.assertEquals(li.getRecordAndOffsets().get(j).getOffset(), expectedReadOffsets[i][j]);
             }
             // check the deletions
             Assert.assertEquals(li.getDeletedInRecord().size(), expectedDelDepths[i]);
@@ -670,4 +670,4 @@ public class SamLocusIteratorTest extends AbstractLocusIteratorTestTemplate {
         }
     }
 
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/samtools/util/SequenceUtilTest.java b/src/test/java/htsjdk/samtools/util/SequenceUtilTest.java
index 008cca5..ce2233b 100644
--- a/src/test/java/htsjdk/samtools/util/SequenceUtilTest.java
+++ b/src/test/java/htsjdk/samtools/util/SequenceUtilTest.java
@@ -23,8 +23,8 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.*;
-import htsjdk.samtools.reference.ReferenceSequence;
 import htsjdk.samtools.reference.ReferenceSequenceFile;
 import htsjdk.samtools.reference.ReferenceSequenceFileFactory;
 import org.testng.Assert;
@@ -32,14 +32,12 @@ import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.File;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
+import java.util.*;
 
 /**
  * @author alecw at broadinstitute.org
  */
-public class SequenceUtilTest {
+public class SequenceUtilTest extends HtsjdkTest {
     private static final String HEADER = "@HD\tVN:1.0\tSO:unsorted\n";
     private static final String SEQUENCE_NAME=
         "@SQ\tSN:phix174.seq\tLN:5386\tUR:/seq/references/PhiX174/v0/PhiX174.fasta\tAS:PhiX174\tM5:3332ed720ac7eaa9b3655c06f6b9e196";
@@ -61,7 +59,7 @@ public class SequenceUtilTest {
 
         return new Object[][]{ {makeSequenceDictionary(5386, "/seq/references/PhiX174/v0/PhiX174.fasta",
                 "3332ed720ac7eaa9b3655c06f6b9e196"),
-                new SAMTextHeaderCodec().decode(new StringLineReader(s), null).getSequenceDictionary()}};
+                new SAMTextHeaderCodec().decode(BufferedLineReader.fromString(s), null).getSequenceDictionary()}};
     }
 
     @Test(dataProvider = "compatibleNonEqualLists")
@@ -105,7 +103,7 @@ public class SequenceUtilTest {
     private SAMSequenceDictionary makeSequenceDictionary(final int length, final String ur, final String m5) {
         final String s = HEADER +
                 String.format("@SQ\tSN:phix174.seq\tLN:%d\tUR:%s\tAS:PhiX174\tM5:%s\n", length, ur, m5);
-        return new SAMTextHeaderCodec().decode(new StringLineReader(s), null).getSequenceDictionary();
+        return new SAMTextHeaderCodec().decode(BufferedLineReader.fromString(s), null).getSequenceDictionary();
     }
 
     @Test(dataProvider = "makeReferenceFromAlignment")
@@ -144,6 +142,11 @@ public class SequenceUtilTest {
         final SAMRecord rec = new SAMRecord(null);
         rec.setReadName("test");
         rec.setReadString(readString);
+        final byte[] byteArray = new byte[readString.length()];
+
+        Arrays.fill(byteArray, (byte)33);
+
+        rec.setBaseQualities(byteArray);
         rec.setCigarString(cigar);
 
         final byte[] refBases = StringUtil.stringToBytes(reference);
@@ -151,6 +154,9 @@ public class SequenceUtilTest {
         final int nExact = SequenceUtil.countMismatches(rec, refBases, -1, false, false);
         Assert.assertEquals(nExact, expectedMismatchesExact);
 
+        final int sumMismatchesQualityExact = SequenceUtil.sumQualitiesOfMismatches(rec, refBases, -1, false);
+        Assert.assertEquals(sumMismatchesQualityExact, expectedMismatchesExact * 33);
+
         final int nAmbiguous = SequenceUtil.countMismatches(rec, refBases, -1, false, true);
         Assert.assertEquals(nAmbiguous, expectedMismatchesAmbiguous);
     }
@@ -175,6 +181,58 @@ public class SequenceUtilTest {
         };
     }
 
+    @DataProvider(name="mismatchBisulfiteCountsDataProvider")
+    public Object[][] mismatchBisulfiteCountsDataProvider() {
+
+        List<Object[]> tests = new ArrayList<>();
+        final List<String> bases = Arrays.asList("A","C","T","G");
+
+        for (final String base : bases) {
+            for (final String ref : bases) {
+                for (final Boolean strand : Arrays.asList(true, false)) {
+
+                    final Integer count;
+
+                    if (base.equals(ref)) count = 0;
+                    else if (base.equals("A") && ref.equals("G") && !strand) count = 0;
+                    else if (base.equals("T") && ref.equals("C") &&  strand) count = 0;
+                    else count = 1;
+
+                    tests.add(new Object[]{base, "1M", ref, strand, count});
+
+                }
+            }
+        }
+        return tests.toArray(new Object[1][]);
+    }
+
+
+    @Test(dataProvider = "mismatchBisulfiteCountsDataProvider")
+    public void testMismatchBisulfiteCounts(final String readString, final String cigar, final String reference,
+                                            final boolean positiveStrand, final int expectedMismatches) {
+
+        final byte baseQuality = 30;
+        final SAMRecord rec = new SAMRecord(null);
+        rec.setReadName("test");
+        rec.setReadString(readString);
+        rec.setReadNegativeStrandFlag(!positiveStrand);
+        final byte[] byteArray = new byte[readString.length()];
+
+        Arrays.fill(byteArray,baseQuality);
+
+        rec.setBaseQualities(byteArray);
+        rec.setCigarString(cigar);
+
+        final byte[] refBases = StringUtil.stringToBytes(reference);
+
+        final int nExact = SequenceUtil.countMismatches(rec, refBases, -1, true, false);
+        Assert.assertEquals(nExact, expectedMismatches);
+
+        final int sumMismatchesQualityExact = SequenceUtil.sumQualitiesOfMismatches(rec, refBases, -1, true);
+        Assert.assertEquals(sumMismatchesQualityExact, expectedMismatches * baseQuality);
+
+    }
+
     @Test(dataProvider = "countInsertedAndDeletedBasesTestCases")
     public void testCountInsertedAndDeletedBases(final String cigarString, final int insertedBases, final int deletedBases) {
         final Cigar cigar = TextCigarCodec.decode(cigarString);
@@ -204,11 +262,11 @@ public class SequenceUtilTest {
 
     @Test(dataProvider = "testKmerGenerationTestCases")
     public void testKmerGeneration(final int length, final String[] expectedKmers) {
-        final Set<String> actualSet = new HashSet<String>();
+        final Set<String> actualSet = new HashSet<>();
         for (final byte[] kmer : SequenceUtil.generateAllKmers(length)) {
             actualSet.add(StringUtil.bytesToString(kmer));
         }
-        final Set<String> expectedSet = new HashSet<String>(Arrays.asList(expectedKmers));
+        final Set<String> expectedSet = new HashSet<>(Arrays.asList(expectedKmers));
         Assert.assertTrue(actualSet.equals(expectedSet));
     }
 
@@ -450,4 +508,115 @@ public class SequenceUtilTest {
             }
         });
     }
+
+    @DataProvider(name = "testNmFromCigarProvider")
+    Object[][] testNmFromCigar() {
+        return new Object[][]{
+                {"1M", 0},
+                {"1S1D", 1},
+                {"1H3X", 3},
+                {"1H5=3M2X", 2},
+                {"5P5M", 0},
+                {"5S8I", 8}
+        };
+    }
+
+    @Test(dataProvider = "testNmFromCigarProvider")
+    public void testNmTagFromCigar(final String cigarString, final int expectedNmValue) {
+        final SAMRecord rec = new SAMRecord(null);
+        rec.setReadName("test");
+        rec.setCigarString(cigarString);
+
+        Assert.assertEquals(SequenceUtil.calculateSamNmTagFromCigar(rec),expectedNmValue);
+    }
+
+    @Test
+    public void testReverseComplement() {
+        Assert.assertEquals(SequenceUtil.reverseComplement("ABCDEFGHIJKLMNOPQRSTUVWXYZ"),"ZYXWVUASRQPONMLKJIHCFEDGBT");
+        Assert.assertEquals(SequenceUtil.reverseComplement("abcdefghijklmnopqrstuvwxy"),"yxwvuasrqponmlkjihcfedgbt"); //missing "z" on purpose so that we test both even-lengthed and odd-lengthed strings
+    }
+
+    @Test
+    public void testUpperCase() {
+        Assert.assertEquals(SequenceUtil.upperCase(StringUtil.stringToBytes("ABCDEFGHIJKLMNOPQRSTUVWXYZ")), StringUtil.stringToBytes("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
+        Assert.assertEquals(SequenceUtil.upperCase(StringUtil.stringToBytes("abcdefghijklmnopqrstuvwxyz")), StringUtil.stringToBytes("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
+        Assert.assertEquals(SequenceUtil.upperCase(StringUtil.stringToBytes("1234567890!@#$%^&*()")), StringUtil.stringToBytes("1234567890!@#$%^&*()"));
+    }
+
+    @Test
+    public void testReverseQualities() {
+
+        final byte[] qualities1 = new byte[] {10, 20, 30, 40};
+        SequenceUtil.reverseQualities(qualities1);
+        assertEquals(qualities1, new byte[] {40, 30, 20, 10});
+
+        final byte[] qualities2 = {10, 20, 30};
+        SequenceUtil.reverseQualities(qualities2);
+        assertEquals(qualities2, new byte[]{30, 20, 10});
+    }
+
+    private void assertEquals(final byte[] actual, final byte[] expected) {
+        Assert.assertEquals(actual.length, expected.length, "Arrays do not have equal lengths");
+
+        for (int i = 0; i < actual.length; ++i) {
+            Assert.assertEquals(actual[i], expected[i], "Array differ at position " + i);
+        }
+    }
+
+    @Test
+    public void testIsACGTN() {
+        for (byte base = Byte.MIN_VALUE; base < Byte.MAX_VALUE; base++) {
+            if (base == 'A' || base == 'C' || base == 'G' || base == 'T' || base == 'N') {
+                Assert.assertTrue(SequenceUtil.isUpperACGTN(base));
+            } else {
+                Assert.assertFalse(SequenceUtil.isUpperACGTN(base));
+            }
+        }
+    }
+
+    @Test
+    public void testIsIUPAC() {
+        final String iupacString = ".aAbBcCdDgGhHkKmMnNrRsStTvVwWyY";
+        for (byte code=0; code<Byte.MAX_VALUE; code++) {
+            if (iupacString.contains(new String (new char[]{(char) code}))) {
+                Assert.assertTrue(SequenceUtil.isIUPAC(code));
+            } else {
+                Assert.assertFalse(SequenceUtil.isIUPAC(code));
+            }
+        }
+    }
+
+    @Test
+    public void testIUPAC_CODES_STRING() {
+        for (final byte code: SequenceUtil.getIUPACCodesString().getBytes()) {
+            Assert.assertTrue(SequenceUtil.isIUPAC(code));
+        }
+    }
+
+    @Test
+    public void testIsBamReadBase() {
+        final String iupacUpperCasedWithoutDot = "=" + SequenceUtil.getIUPACCodesString().toUpperCase().replaceAll("\\.", "N");
+
+        for (byte code = 0; code < Byte.MAX_VALUE; code++) {
+            if (iupacUpperCasedWithoutDot.contains(new String(new char[]{(char) code}))) {
+                Assert.assertTrue(SequenceUtil.isBamReadBase(code));
+            } else {
+                Assert.assertFalse(SequenceUtil.isBamReadBase(code), "" + code);
+            }
+        }
+        Assert.assertTrue(SequenceUtil.isBamReadBase((byte) '='));
+    }
+
+    @Test
+    public void testToBamReadBases() {
+        final String testInput = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_.-=";
+
+        /**
+         * This can be obtained by :
+         * echo 'blah' | tr a-z A-Z | tr -c '=ABCDGHKMNRSTVWY' N
+         */
+        final String expected = "ABCDNNGHNNKNMNNNNRSTNVWNYNABCDNNGHNNKNMNNNNRSTNVWNYNNNN=";
+
+        Assert.assertEquals(SequenceUtil.toBamReadBasesInPlace(testInput.getBytes()), expected.getBytes());
+    }
 }
diff --git a/src/test/java/htsjdk/samtools/util/SnappyLoaderUnitTest.java b/src/test/java/htsjdk/samtools/util/SnappyLoaderUnitTest.java
new file mode 100644
index 0000000..cad4132
--- /dev/null
+++ b/src/test/java/htsjdk/samtools/util/SnappyLoaderUnitTest.java
@@ -0,0 +1,49 @@
+package htsjdk.samtools.util;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.SAMException;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.*;
+
+public class SnappyLoaderUnitTest extends HtsjdkTest{
+
+    @Test
+    public void testCanLoadAndRoundTripWithSnappy() throws IOException {
+        final SnappyLoader snappyLoader = new SnappyLoader(false);
+        Assert.assertTrue(snappyLoader.isSnappyAvailable());
+        final File tempFile = File.createTempFile("snappyOutput", ".txt");
+        tempFile.deleteOnExit();
+
+        final String toWrite = "Hello Filesystem";
+        try(Writer out = new OutputStreamWriter(snappyLoader.wrapOutputStream(new FileOutputStream(tempFile)))) {
+            out.write(toWrite);
+        }
+        
+        try(LineReader in = new BufferedLineReader(snappyLoader.wrapInputStream(new FileInputStream(tempFile)))){
+            final String recoveredString = in.readLine();
+            Assert.assertEquals(recoveredString, toWrite);
+        }
+    }
+
+    @Test
+    public void testCanDisableSnappy(){
+        final SnappyLoader snappyLoader = new SnappyLoader(true);
+        Assert.assertFalse(snappyLoader.isSnappyAvailable());
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void disabledSnappyCantCreateInputWrappers(){
+        final SnappyLoader snappyLoader = new SnappyLoader(true);
+        Assert.assertFalse(snappyLoader.isSnappyAvailable());
+        snappyLoader.wrapInputStream(new ByteArrayInputStream(new byte[]{ 0,0,0}));
+    }
+
+    @Test(expectedExceptions = SAMException.class)
+    public void disabledSnappyCantCreateOutputWrappers(){
+        final SnappyLoader snappyLoader = new SnappyLoader(true);
+        Assert.assertFalse(snappyLoader.isSnappyAvailable());
+        snappyLoader.wrapOutputStream(new ByteArrayOutputStream(10));
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/htsjdk/samtools/util/SolexaQualityConverterTest.java b/src/test/java/htsjdk/samtools/util/SolexaQualityConverterTest.java
index 09cc829..1e4e146 100644
--- a/src/test/java/htsjdk/samtools/util/SolexaQualityConverterTest.java
+++ b/src/test/java/htsjdk/samtools/util/SolexaQualityConverterTest.java
@@ -1,12 +1,13 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.util.Arrays;
 
-public class SolexaQualityConverterTest {
+public class SolexaQualityConverterTest extends HtsjdkTest {
     //declared as a staic variable because we reuse it in IlluminaUtilTest
     public static Object[][] SOLEXA_QUALS_TO_PHRED_SCORE = new Object[][] {
                 new Object[]{new byte[]{}, new byte[]{}},
diff --git a/src/test/java/htsjdk/samtools/util/SortingCollectionTest.java b/src/test/java/htsjdk/samtools/util/SortingCollectionTest.java
index 1ec928d..29f0120 100644
--- a/src/test/java/htsjdk/samtools/util/SortingCollectionTest.java
+++ b/src/test/java/htsjdk/samtools/util/SortingCollectionTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.AfterMethod;
 import org.testng.annotations.AfterTest;
@@ -41,7 +42,7 @@ import java.util.Comparator;
 import java.util.Iterator;
 import java.util.Random;
 
-public class SortingCollectionTest {
+public class SortingCollectionTest extends HtsjdkTest {
     // Create a separate directory for files so it is possible to confirm that the directory is emptied
     protected File tmpDir() {
         return new File(System.getProperty("java.io.tmpdir") + "/" + System.getProperty("user.name"), getClass().getSimpleName());
@@ -130,19 +131,23 @@ public class SortingCollectionTest {
             this.numElementsToGenerate = numElementsToGenerate;
         }
 
+        @Override
         public Iterator<String> iterator() {
             return this;
         }
 
+        @Override
         public boolean hasNext() {
             return numElementsGenerated < numElementsToGenerate;
         }
 
+        @Override
         public String next() {
             ++numElementsGenerated;
             return Integer.toString(random.nextInt());
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException();
         }
@@ -150,6 +155,7 @@ public class SortingCollectionTest {
 
     static class StringComparator implements Comparator<String> {
 
+        @Override
         public int compare(final String s, final String s1) {
             return s.compareTo(s1);
         }
@@ -160,6 +166,7 @@ public class SortingCollectionTest {
         OutputStream os;
         InputStream is;
 
+        @Override
         public SortingCollection.Codec<String> clone() {
             return new StringCodec();
         }
@@ -169,6 +176,7 @@ public class SortingCollectionTest {
          *
          * @param os
          */
+        @Override
         public void setOutputStream(final OutputStream os) {
             this.os = os;
         }
@@ -178,6 +186,7 @@ public class SortingCollectionTest {
          *
          * @param is
          */
+        @Override
         public void setInputStream(final InputStream is) {
             this.is = is;
         }
@@ -187,6 +196,7 @@ public class SortingCollectionTest {
          *
          * @param val what to write
          */
+        @Override
         public void encode(final String val) {
             try {
                 byteBuffer.clear();
@@ -204,6 +214,7 @@ public class SortingCollectionTest {
          * @return null if no more records.  Should throw exception if EOF is encountered in the middle of
          *         a record.
          */
+        @Override
         public String decode() {
             try {
                 byteBuffer.clear();
diff --git a/src/test/java/htsjdk/samtools/util/SortingLongCollectionTest.java b/src/test/java/htsjdk/samtools/util/SortingLongCollectionTest.java
index 4817ef5..bcfa77e 100644
--- a/src/test/java/htsjdk/samtools/util/SortingLongCollectionTest.java
+++ b/src/test/java/htsjdk/samtools/util/SortingLongCollectionTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
@@ -36,7 +37,7 @@ import java.util.Random;
 /**
  * @author alecw at broadinstitute.org
  */
-public class SortingLongCollectionTest {
+public class SortingLongCollectionTest extends HtsjdkTest {
     // Create a separate directory for files so it is possible to confirm that the directory is emptied
     private final File tmpDir = new File(System.getProperty("java.io.tmpdir") + "/" + System.getProperty("user.name"),
             "SortingCollectionTest");
diff --git a/src/test/java/htsjdk/samtools/util/StringUtilTest.java b/src/test/java/htsjdk/samtools/util/StringUtilTest.java
deleted file mode 100644
index dbb2a07..0000000
--- a/src/test/java/htsjdk/samtools/util/StringUtilTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2009 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package htsjdk.samtools.util;
-
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-/**
- * @author alecw at broadinstitute.org
- */
-public class StringUtilTest {
-    @Test(dataProvider = "provider")
-    public void testSplit(final String input, final String[] expectedResult, final boolean concatenateExcess) {
-        String[] ret = new String[expectedResult.length];
-        int tokensExpected;
-        for (tokensExpected = 0; tokensExpected < expectedResult.length && expectedResult[tokensExpected] != null;
-             ++tokensExpected) {
-        }
-        final int tokensFound;
-        if (concatenateExcess) {
-            tokensFound = StringUtil.splitConcatenateExcessTokens(input, ret, ':');
-        } else {
-           tokensFound = StringUtil.split(input, ret, ':');
-        }
-        Assert.assertEquals(tokensFound, tokensExpected);
-        Assert.assertEquals(ret, expectedResult);
-    }
-
-    @DataProvider(name="provider")
-    public Object[][] splitScenarios() {
-        return new Object[][] {
-                {"A:BB:C", new String[]{"A", "BB", "C"}, false},
-                {"A:BB:C", new String[]{"A", "BB", "C"}, true},
-                {"A:BB", new String[]{"A", "BB", null}, false},
-                {"A:BB", new String[]{"A", "BB", null}, true},
-                {"A:BB:", new String[]{"A", "BB", null}, false},
-                {"A:BB:", new String[]{"A", "BB", null}, true},
-                {"A:BB:C:DDD", new String[]{"A", "BB", "C"}, false},
-                {"A:BB:C:DDD", new String[]{"A", "BB", "C:DDD"}, true},
-                {"A:", new String[]{"A", null, null}, false},
-                {"A:", new String[]{"A", null, null}, true},
-                {"A", new String[]{"A", null, null}, false},
-                {"A", new String[]{"A", null, null}, true},
-                {"A:BB:C", new String[]{"A", "BB", "C"}, false},
-                {"A:BB:C:", new String[]{"A", "BB", "C:"}, true}, 
-        };
-    }
-
-    @DataProvider(name="withinHammingDistanceProvider")
-    public Object[][] isWithinHammingDistanceProvider() {
-        return new Object[][] {
-                {"ATAC", "GCAT", 3, true},
-                {"ATAC", "GCAT", 2, false},
-                {"ATAC", "GCAT", 1, false},
-                {"ATAC", "GCAT", 0, false}
-        };
-    }
-
-    @Test(dataProvider = "withinHammingDistanceProvider")
-    public void testIsWithinHammingDistance(final String s1, final String s2, final int maxHammingDistance, final boolean expectedResult) {
-        Assert.assertEquals(StringUtil.isWithinHammingDistance(s1, s2, maxHammingDistance), expectedResult);
-    }
-
-    @DataProvider(name="withinHammingDistanceExceptionProvider")
-    public Object[][] isWithinHammingDistanceException() {
-        return new Object[][] {
-                {"ATAC", "GCT" , 3},
-                {"ATAC", "AT"  , 2},
-                {"ATAC", "T"   , 1},
-                {""    , "GCAT", 0}
-        };
-    }
-
-    @Test(dataProvider = "withinHammingDistanceExceptionProvider", expectedExceptions = IllegalArgumentException.class)
-    public void testIsWithinHammingDistanceExceptions(final String s1, final String s2, final int maxHammingDistance) {
-        StringUtil.isWithinHammingDistance(s1, s2, maxHammingDistance);
-    }
-
-    @Test(dataProvider = "withinHammingDistanceExceptionProvider", expectedExceptions = IllegalArgumentException.class)
-    public void testHammingDistanceExceptions(final String s1, final String s2, final int maxHammingDistance) {
-        StringUtil.hammingDistance(s1, s2);
-    }
-
-    @DataProvider(name="hammingDistanceProvider")
-    public Object[][] hammingDistance() {
-        return new Object[][] {
-                {"ATAC" , "GCAT" , 3},
-                {"ATAGC", "ATAGC", 0},
-                {"ATAC" , "atac" , 4}, // Hamming distance is case sensitive.
-                {""     , ""     , 0}, // Two empty strings should have Hamming distance of 0.
-                {"nAGTN", "nAGTN", 0} // Ensure that matching Ns are not counted as mismatches.
-        };
-    }
-
-    @Test(dataProvider = "hammingDistanceProvider")
-    public void testHammingDistance(final String s1, final String s2, final int expectedResult) {
-        Assert.assertEquals(StringUtil.hammingDistance(s1, s2), expectedResult);
-    }
-
-}
diff --git a/src/test/java/htsjdk/samtools/util/TrimmingUtilTest.java b/src/test/java/htsjdk/samtools/util/TrimmingUtilTest.java
index 12cffc6..8110839 100644
--- a/src/test/java/htsjdk/samtools/util/TrimmingUtilTest.java
+++ b/src/test/java/htsjdk/samtools/util/TrimmingUtilTest.java
@@ -23,13 +23,14 @@
  */
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 /**
  * Tests for a simple phred-style quality trimming algorithm.
  */
-public class TrimmingUtilTest {
+public class TrimmingUtilTest extends HtsjdkTest {
     @Test
     public void testEasyCases() {
         Assert.assertEquals(TrimmingUtil.findQualityTrimPoint(byteArray(30,30,30,30,30, 2, 2, 2, 2, 2), 15), 5);
diff --git a/src/test/java/htsjdk/samtools/util/TupleTest.java b/src/test/java/htsjdk/samtools/util/TupleTest.java
index bed4550..431466d 100644
--- a/src/test/java/htsjdk/samtools/util/TupleTest.java
+++ b/src/test/java/htsjdk/samtools/util/TupleTest.java
@@ -1,12 +1,13 @@
 package htsjdk.samtools.util;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
 /**
  * Created by farjoun on 1/29/16.
  */
-public class TupleTest {
+public class TupleTest extends HtsjdkTest {
 
     @Test
     public void testEquals() throws Exception {
@@ -59,4 +60,4 @@ public class TupleTest {
         Assert.assertEquals(new Tuple<>(null, null).toString(), "[null, null]");
 
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/tribble/AbstractFeatureReaderTest.java b/src/test/java/htsjdk/tribble/AbstractFeatureReaderTest.java
index 6d65e9d..6d950c6 100644
--- a/src/test/java/htsjdk/tribble/AbstractFeatureReaderTest.java
+++ b/src/test/java/htsjdk/tribble/AbstractFeatureReaderTest.java
@@ -1,5 +1,9 @@
 package htsjdk.tribble;
 
+import com.google.common.jimfs.Configuration;
+import com.google.common.jimfs.Jimfs;
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.FileTruncatedException;
 import htsjdk.samtools.util.TestUtil;
 import htsjdk.tribble.bed.BEDCodec;
 import htsjdk.tribble.bed.BEDFeature;
@@ -15,6 +19,10 @@ import java.io.File;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.ByteBuffer;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.*;
+import java.util.function.Function;
 
 import static org.testng.Assert.*;
 
@@ -22,11 +30,26 @@ import static org.testng.Assert.*;
  * @author jacob
  * @date 2013-Apr-10
  */
-public class AbstractFeatureReaderTest {
+public class AbstractFeatureReaderTest extends HtsjdkTest {
 
     final static String HTTP_INDEXED_VCF_PATH = TestUtil.BASE_URL_FOR_HTTP_TESTS + "ex2.vcf";
     final static String LOCAL_MIRROR_HTTP_INDEXED_VCF_PATH = VariantBaseTest.variantTestDataRoot + "ex2.vcf";
 
+    //the "mangled" versions of the files have an extra byte added to the front of the file that makes them invalid
+    private static final String TEST_PATH = "src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/";
+    private static final String MANGLED_VCF = TEST_PATH + "mangledBaseVariants.vcf";
+    private static final String MANGLED_VCF_INDEX = TEST_PATH + "mangledBaseVariants.vcf.idx";
+    private static final String VCF = TEST_PATH + "baseVariants.vcf";
+    private static final String VCF_INDEX = TEST_PATH + "baseVariants.vcf.idx";
+    private static final String VCF_TABIX_BLOCK_GZIPPED = TEST_PATH + "baseVariants.vcf.gz";
+    private static final String VCF_TABIX_INDEX = TEST_PATH + "baseVariants.vcf.gz.tbi";
+    private static final String MANGLED_VCF_TABIX_BLOCK_GZIPPED = TEST_PATH + "baseVariants.mangled.vcf.gz";
+    private static final String MANGLED_VCF_TABIX_INDEX = TEST_PATH + "baseVariants.mangled.vcf.gz.tbi";
+    private static final String CORRUPTED_VCF_INDEX = TEST_PATH + "corruptedBaseVariants.vcf.idx";
+
+    //wrapper which skips the first byte of a file and leaves the rest unchanged
+    private static final Function<SeekableByteChannel, SeekableByteChannel> WRAPPER = SkippingByteChannel::new;
+
     /**
      * Asserts readability and correctness of VCF over HTTP.  The VCF is indexed and requires and index.
      */
@@ -65,12 +88,12 @@ public class AbstractFeatureReaderTest {
         };
     }
 
-    @Test(enabled = true, dataProvider = "blockCompressedExtensionExtensionStrings")
+    @Test(dataProvider = "blockCompressedExtensionExtensionStrings")
     public void testBlockCompressionExtensionString(final String testString, final boolean expected) {
         Assert.assertEquals(AbstractFeatureReader.hasBlockCompressedExtension(testString), expected);
     }
 
-    @Test(enabled = true, dataProvider = "blockCompressedExtensionExtensionStrings")
+    @Test(dataProvider = "blockCompressedExtensionExtensionStrings")
     public void testBlockCompressionExtensionFile(final String testString, final boolean expected) {
         Assert.assertEquals(AbstractFeatureReader.hasBlockCompressedExtension(new File(testString)), expected);
     }
@@ -103,10 +126,148 @@ public class AbstractFeatureReaderTest {
         };
     }
 
-    @Test(enabled = true, dataProvider = "blockCompressedExtensionExtensionURIStrings")
+    @Test(dataProvider = "blockCompressedExtensionExtensionURIStrings")
     public void testBlockCompressionExtension(final String testURIString, final boolean expected) throws URISyntaxException {
         URI testURI = URI.create(testURIString);
         Assert.assertEquals(AbstractFeatureReader.hasBlockCompressedExtension(testURI), expected);
     }
 
+
+    @DataProvider(name = "vcfFileAndWrapperCombinations")
+    private static Object[][] vcfFileAndWrapperCombinations(){
+        return new Object[][] {
+                {VCF, VCF_INDEX, null, null},
+                {MANGLED_VCF, MANGLED_VCF_INDEX, WRAPPER, WRAPPER},
+                {VCF, MANGLED_VCF_INDEX, null, WRAPPER},
+                {MANGLED_VCF, VCF_INDEX, WRAPPER, null},
+                {MANGLED_VCF_TABIX_BLOCK_GZIPPED, MANGLED_VCF_TABIX_INDEX, WRAPPER, WRAPPER},
+                {VCF_TABIX_BLOCK_GZIPPED, MANGLED_VCF_TABIX_INDEX, null, WRAPPER},
+                {MANGLED_VCF_TABIX_BLOCK_GZIPPED, VCF_TABIX_INDEX, WRAPPER, null},
+                {VCF_TABIX_BLOCK_GZIPPED, VCF_TABIX_INDEX, null, null},
+        };
+    }
+
+    @Test(dataProvider = "vcfFileAndWrapperCombinations")
+    public void testGetFeatureReaderWithPathAndWrappers(String file, String index,
+                                                        Function<SeekableByteChannel, SeekableByteChannel> wrapper,
+                                                        Function<SeekableByteChannel, SeekableByteChannel> indexWrapper) throws IOException, URISyntaxException {
+        try(FileSystem fs = Jimfs.newFileSystem("test", Configuration.unix());
+            final AbstractFeatureReader<VariantContext, ?> featureReader = getFeatureReader(file, index, wrapper,
+                                                                                            indexWrapper,
+                                                                                            new VCFCodec(),
+                                                                                            fs)){
+            Assert.assertTrue(featureReader.hasIndex());
+            Assert.assertEquals(featureReader.iterator().toList().size(), 26);
+            Assert.assertEquals(featureReader.query("1", 190, 210).toList().size(), 3);
+            Assert.assertEquals(featureReader.query("2", 190, 210).toList().size(), 1);
+        }
+    }
+
+    @DataProvider(name = "failsWithoutWrappers")
+    private static Object[][] failsWithoutWrappers(){
+        return new Object[][] {
+                {MANGLED_VCF, MANGLED_VCF_INDEX},
+                {VCF, CORRUPTED_VCF_INDEX},
+                {VCF, MANGLED_VCF_INDEX},
+                {MANGLED_VCF, VCF_INDEX},
+                {MANGLED_VCF_TABIX_BLOCK_GZIPPED, MANGLED_VCF_TABIX_INDEX},
+                {VCF_TABIX_BLOCK_GZIPPED, MANGLED_VCF_TABIX_INDEX},
+                {MANGLED_VCF_TABIX_BLOCK_GZIPPED, VCF_TABIX_INDEX},
+        };
+    }
+
+    @Test(dataProvider = "failsWithoutWrappers", expectedExceptions = {TribbleException.class, FileTruncatedException.class})
+    public void testFailureIfNoWrapper(String file, String index) throws IOException, URISyntaxException {
+        try(final FileSystem fs = Jimfs.newFileSystem("test", Configuration.unix());
+            final FeatureReader<?> reader = getFeatureReader(file, index, null, null, new VCFCodec(), fs)){
+            // should have exploded by now
+        }
+    }
+
+    private static <T extends Feature> AbstractFeatureReader<T, ?> getFeatureReader(String vcf, String index,
+                                                                                    Function<SeekableByteChannel, SeekableByteChannel> wrapper,
+                                                                                    Function<SeekableByteChannel, SeekableByteChannel> indexWrapper,
+                                                                                    FeatureCodec<T, ?> codec,
+                                                                                    FileSystem fileSystem) throws IOException, URISyntaxException {
+        final Path vcfInJimfs = getTribbleFileInJimfs(vcf, index, fileSystem);
+        return AbstractFeatureReader.getFeatureReader(
+                vcfInJimfs.toUri().toString(),
+                null,
+                codec,
+                true,
+                wrapper,
+                indexWrapper);
+    }
+
+    /**
+     * skip the first byte of a SeekableByteChannel
+     */
+    private static class SkippingByteChannel implements SeekableByteChannel{
+        private final int toSkip;
+        private final SeekableByteChannel input;
+
+       private SkippingByteChannel(SeekableByteChannel input) {
+           this.toSkip = 1;
+           try {
+               this.input = input;
+               input.position(toSkip);
+           } catch (final IOException e){
+               throw new RuntimeException(e);
+           }
+       }
+
+       @Override
+        public boolean isOpen() {
+            return input.isOpen();
+        }
+
+        @Override
+        public void close() throws IOException {
+            input.close();
+        }
+
+        @Override
+        public int read(ByteBuffer dst) throws IOException {
+           return input.read(dst);
+        }
+
+        @Override
+        public int write(ByteBuffer src) throws IOException {
+            throw new UnsupportedOperationException("Read only");
+        }
+
+        @Override
+        public long position() throws IOException {
+            return input.position() - toSkip;
+        }
+
+        @Override
+        public SeekableByteChannel position(long newPosition) throws IOException {
+            if (newPosition < 0 ){
+                throw new RuntimeException("negative position not allowed");
+            }
+            return input.position( newPosition + toSkip);
+        }
+
+        @Override
+        public long size() throws IOException {
+            return input.size() - toSkip;
+        }
+
+        @Override
+        public SeekableByteChannel truncate(long size) throws IOException {
+            return input.truncate(size + toSkip);
+        }
+    };
+
+    private static Path getTribbleFileInJimfs(String vcf, String index, FileSystem fileSystem) throws IOException, URISyntaxException {
+        final FileSystem fs = fileSystem;
+        final Path root = fs.getPath("/");
+        final Path vcfPath = Paths.get(vcf);
+        final Path idxPath = Paths.get(index);
+        final Path idxDestination = Paths.get(AbstractFeatureReader.isTabix(vcf, index) ? Tribble.tabixIndexFile(vcf) : Tribble.indexFile(vcf));
+        Files.copy(idxPath, root.resolve(idxDestination.getFileName().toString()));
+        return Files.copy(vcfPath, root.resolve(vcfPath.getFileName().toString()));
+    }
+
 }
diff --git a/src/test/java/htsjdk/tribble/AsciiFeatureCodecTest.java b/src/test/java/htsjdk/tribble/AsciiFeatureCodecTest.java
new file mode 100644
index 0000000..b62e02e
--- /dev/null
+++ b/src/test/java/htsjdk/tribble/AsciiFeatureCodecTest.java
@@ -0,0 +1,35 @@
+package htsjdk.tribble;
+
+import htsjdk.samtools.util.LocationAware;
+import htsjdk.tribble.readers.LineIterator;
+import htsjdk.variant.variantcontext.VariantContext;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+
+public class AsciiFeatureCodecTest {
+
+    @Test
+    public void testMakeIndexableSourceFromUnknownStream() {
+        // test the case where we try to create a codec using a stream that is neither a
+        // BlockCompressedInputStream nor a PositionalBufferedStream
+        final ByteArrayInputStream is = new ByteArrayInputStream(new byte[10]);
+        LocationAware locationAware = new AsciiFeatureCodec<VariantContext>(VariantContext.class) {
+            public Object readActualHeader(final LineIterator reader) {
+                return new Object();
+            }
+
+            @Override
+            public VariantContext decode(String s) {
+                return null;
+            }
+
+            @Override
+            public boolean canDecode(String path) {
+                return false;
+            }
+        }.makeIndexableSourceFromStream(is);
+        Assert.assertEquals(locationAware.getPosition(), 0);
+    }
+}
diff --git a/src/test/java/htsjdk/tribble/BinaryFeaturesTest.java b/src/test/java/htsjdk/tribble/BinaryFeaturesTest.java
index eff8939..eac1974 100644
--- a/src/test/java/htsjdk/tribble/BinaryFeaturesTest.java
+++ b/src/test/java/htsjdk/tribble/BinaryFeaturesTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.bed.BEDCodec;
 import htsjdk.tribble.example.ExampleBinaryCodec;
 import htsjdk.tribble.readers.LineIterator;
@@ -13,7 +14,7 @@ import java.util.Iterator;
 import java.util.List;
 
 
-public class BinaryFeaturesTest {
+public class BinaryFeaturesTest extends HtsjdkTest {
     @DataProvider(name = "BinaryFeatureSources")
     public Object[][] createData1() {
         return new Object[][] {
diff --git a/src/test/java/htsjdk/tribble/FeatureReaderTest.java b/src/test/java/htsjdk/tribble/FeatureReaderTest.java
index d62693c..f43b5b1 100644
--- a/src/test/java/htsjdk/tribble/FeatureReaderTest.java
+++ b/src/test/java/htsjdk/tribble/FeatureReaderTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.seekablestream.SeekableFileStream;
 import htsjdk.samtools.util.CloserUtil;
 import htsjdk.samtools.util.LocationAware;
@@ -22,7 +23,7 @@ import java.util.Iterator;
 import java.util.List;
 
 
-public class FeatureReaderTest {
+public class FeatureReaderTest extends HtsjdkTest {
     private final static File asciiBedFile = new File(TestUtils.DATA_DIR + "test.bed");
     private File binaryBedFile;
     private final static File tabixBedFile = new File(TestUtils.DATA_DIR + "test.tabix.bed.gz");
diff --git a/src/test/java/htsjdk/tribble/TribbleIndexFeatureReaderTest.java b/src/test/java/htsjdk/tribble/TribbleIndexFeatureReaderTest.java
index afdd827..37a5295 100644
--- a/src/test/java/htsjdk/tribble/TribbleIndexFeatureReaderTest.java
+++ b/src/test/java/htsjdk/tribble/TribbleIndexFeatureReaderTest.java
@@ -1,7 +1,7 @@
 package htsjdk.tribble;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.readers.LineIterator;
-import htsjdk.tribble.TestUtils;
 import htsjdk.variant.variantcontext.VariantContext;
 import htsjdk.variant.vcf.VCFCodec;
 import org.testng.Assert;
@@ -9,56 +9,9 @@ import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.IOException;
-import java.net.URISyntaxException;
 
-import static org.testng.Assert.assertEquals;
 
-
-public class TribbleIndexFeatureReaderTest {
-
-    @DataProvider(name = "extensionURIStrings")
-    public Object[][] createBlockCompressedExtensionURIs() {
-        return new Object[][]{
-                {"testzip.gz", true},
-                {"testzip.GZ", true},
-                {"testzip.gZ", true},
-                {"testzip.Gz", true},
-
-                {"test", false},
-                {"test.gzip", false},
-                {"test.bgz", false},
-                {"test.bgzf", false},
-                {"test.bzip2", false},
-
-                {"file://testzip.gz", true},
-                {"file://apath/testzip.gz", true},
-
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.gz", true},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.GZ", true},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.gzip", false},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.bgz", false},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.bgzf", false},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.bzip2", false},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877", false},
-
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.gz?alt=media", true},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.GZ?alt=media", true},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.gzip?alt=media", false},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.bgz?alt=media", false},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.bgzf?alt=media", false},
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.bzip2?alt=media", false},
-
-                {"ftp://ftp.broadinstitute.org/distribution/igv/TEST/cpgIslands.hg18.gz", true},
-                {"ftp://ftp.broadinstitute.org/distribution/igv/TEST/cpgIslands.hg18.bed", false},
-
-                {"https://www.googleapis.com/download/storage/v1/b/deflaux-public-test/o/NA12877.vcf.gz", true},
-        };
-    }
-
-    @Test(enabled = true, dataProvider = "extensionURIStrings")
-    public void testGZExtension(final String testString, final boolean expected) throws URISyntaxException {
-        Assert.assertEquals(TribbleIndexedFeatureReader.isGZIPPath(testString), expected);
-    }
+public class TribbleIndexFeatureReaderTest extends HtsjdkTest {
 
     @DataProvider(name = "featureFileStrings")
     public Object[][] createFeatureFileStrings() {
@@ -74,7 +27,7 @@ public class TribbleIndexFeatureReaderTest {
     public void testIndexedGZIPVCF(final String testPath, final int expectedCount) throws IOException {
         final VCFCodec codec = new VCFCodec();
         try (final TribbleIndexedFeatureReader<VariantContext, LineIterator> featureReader =
-                new TribbleIndexedFeatureReader(testPath, codec, false)) {
+                new TribbleIndexedFeatureReader<>(testPath, codec, false)) {
             final CloseableTribbleIterator<VariantContext> localIterator = featureReader.iterator();
             int count = 0;
             for (final Feature feat : featureReader.iterator()) {
diff --git a/src/test/java/htsjdk/tribble/TribbleTest.java b/src/test/java/htsjdk/tribble/TribbleTest.java
index e8366c4..3874c7f 100644
--- a/src/test/java/htsjdk/tribble/TribbleTest.java
+++ b/src/test/java/htsjdk/tribble/TribbleTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.util.TabixUtils;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -7,7 +8,7 @@ import org.testng.annotations.Test;
 import java.io.File;
 
 
-public class TribbleTest {
+public class TribbleTest extends HtsjdkTest {
 
     @Test
     public void testStandardIndex() {
diff --git a/src/test/java/htsjdk/tribble/bed/BEDCodecTest.java b/src/test/java/htsjdk/tribble/bed/BEDCodecTest.java
index dbf23a0..df8a95e 100644
--- a/src/test/java/htsjdk/tribble/bed/BEDCodecTest.java
+++ b/src/test/java/htsjdk/tribble/bed/BEDCodecTest.java
@@ -24,26 +24,63 @@
 
 package htsjdk.tribble.bed;
 
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.util.BlockCompressedFilePointerUtil;
+import htsjdk.samtools.util.BlockCompressedInputStream;
 import htsjdk.tribble.AbstractFeatureReader;
 import htsjdk.tribble.Feature;
+import htsjdk.tribble.FeatureReader;
 import htsjdk.tribble.TestUtils;
 import htsjdk.tribble.annotation.Strand;
 import htsjdk.tribble.bed.FullBEDFeature.Exon;
-import htsjdk.tribble.index.IndexFactory;
-import htsjdk.tribble.index.linear.LinearIndex;
 import htsjdk.tribble.index.tabix.TabixFormat;
-import htsjdk.tribble.util.LittleEndianOutputStream;
+import htsjdk.tribble.readers.AsciiLineReaderIterator;
+import htsjdk.tribble.readers.LineIterator;
+import htsjdk.tribble.readers.PositionalBufferedStream;
+import htsjdk.tribble.util.ParsingUtils;
 import org.testng.Assert;
+import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.awt.*;
-import java.io.BufferedOutputStream;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStream;
 import java.util.List;
 
-public class BEDCodecTest {
+public class BEDCodecTest extends HtsjdkTest {
+
+    @DataProvider(name = "gzippedBedTestData")
+    public Object[][] getBedTestData(){
+        return new Object[][] {
+                {
+                    // BGZP BED file with no header, 2 features
+                    new File(TestUtils.DATA_DIR, "bed/2featuresNoHeader.bed.gz"), 0  // header has length 0
+                },
+                {
+                    // BGZP BED file with one line header, 2 features
+                    new File(TestUtils.DATA_DIR, "bed/2featuresWithHeader.bed.gz"), 10 // header has length 10
+
+                }
+        };
+    }
+
+    @Test(dataProvider = "gzippedBedTestData")
+    public void testReadActualHeader(final File gzippedBedFile, final int firstFeatureOffset) throws IOException {
+        // Given an indexable SOURCE on a BED file, test that readActualHeader retains the correct offset
+        // of the first feature, whether there is a header or not
+        BEDCodec bedCodec = new BEDCodec();
+        try (final InputStream is = ParsingUtils.openInputStream(gzippedBedFile.getPath());
+             final BlockCompressedInputStream bcis = new BlockCompressedInputStream(is))
+        {
+            AsciiLineReaderIterator it = (AsciiLineReaderIterator) bedCodec.makeIndexableSourceFromStream(bcis);
+            Object header = bedCodec.readActualHeader(it);
+            // BEDCodec doesn't model or return the BED header, even when there is one!
+            Assert.assertNull(header);
+            Assert.assertEquals(BlockCompressedFilePointerUtil.getBlockAddress(it.getPosition()), 0);
+            Assert.assertEquals(BlockCompressedFilePointerUtil.getBlockOffset(it.getPosition()), firstFeatureOffset);
+        }
+    }
 
     @Test
     public void testSimpleDecode() {
@@ -204,24 +241,6 @@ public class BEDCodecTest {
         reader.close();
     }
 
-    private void createIndex(File testFile, File idxFile) throws IOException {
-        // Create an index if missing
-        if (idxFile.exists()) {
-            idxFile.delete();
-        }
-        LinearIndex idx = (LinearIndex) IndexFactory.createLinearIndex(testFile, new BEDCodec());
-
-        LittleEndianOutputStream stream = null;
-        try {
-            stream = new LittleEndianOutputStream(new BufferedOutputStream(new FileOutputStream(idxFile)));
-            idx.write(stream);
-        } finally {
-            if (stream != null) {
-                stream.close();
-            }
-        }
-    }
-
     @Test
     public void testGetTabixFormat() {
         Assert.assertEquals(new BEDCodec().getTabixFormat(), TabixFormat.BED);
diff --git a/src/test/java/htsjdk/tribble/gelitext/GeliTextTest.java b/src/test/java/htsjdk/tribble/gelitext/GeliTextTest.java
deleted file mode 100644
index c670bf1..0000000
--- a/src/test/java/htsjdk/tribble/gelitext/GeliTextTest.java
+++ /dev/null
@@ -1,100 +0,0 @@
-package htsjdk.tribble.gelitext;
-
-import htsjdk.tribble.AbstractFeatureReader;
-import htsjdk.tribble.FeatureReader;
-import htsjdk.tribble.TestUtils;
-import htsjdk.tribble.index.Index;
-import htsjdk.tribble.index.IndexFactory;
-import org.testng.Assert;
-import org.testng.annotations.BeforeSuite;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-
-
-/**
- * @author aaron
- *         <p/>
- *         Class GeliTextTest
- *         <p/>
- *         test out the geli text source codec and feature
- */
- at Deprecated
-public class GeliTextTest {
-    public static final File testFile = new File(TestUtils.DATA_DIR + "testGeliText.txt");
-    public static Index index;
-    private FeatureReader<GeliTextFeature> source;
-
-    // setup a new source before each class
-
-    @BeforeSuite
-    public void beforeTest() {
-        index = IndexFactory.createLinearIndex(testFile, new GeliTextCodec());
-        source = AbstractFeatureReader.getFeatureReader(testFile.getAbsolutePath(), new GeliTextCodec(), index);
-    }
-
-    @Test
-    public void testReadAllLines() {
-        // Query
-        try {
-            Iterator<GeliTextFeature> iter = source.query("22", 14438070, 14592250);
-            int count = 0;
-            while (iter.hasNext()) {
-                GeliTextFeature feat = iter.next();
-                count++;
-            }
-            Assert.assertEquals(count, 50);
-        } catch (IOException e) {
-            Assert.fail("failed to generate iterator from feature source");
-        }
-    }
-
-    @Test
-    public void testGetSubRegion() {
-        // Query
-        try {
-            Iterator<GeliTextFeature> iter = source.query("22", 14438070, 14539060); // should be the first 41 records
-            int count = 0;
-            while (iter.hasNext()) {
-                GeliTextFeature feat = iter.next();
-                count++;
-            }
-            Assert.assertEquals(count, 41);
-        } catch (IOException e) {
-            Assert.fail("failed to generate iterator from feature source");
-        }
-    }
-
-    @Test
-    public void testFirstRecord() {
-        // Query
-        try {
-            Iterator<GeliTextFeature> iter = source.query("22", 14438070, 14592250);
-            int count = 0;
-
-            GeliTextFeature feat = iter.next();
-            // check the first records contents
-            // 22 14438070 A   0 0     GG      33.2618 33.2618 0       0       0       0     0 0       0       33.2618 0       0
-            Assert.assertTrue("22".equals(feat.getContig()));
-            Assert.assertEquals(feat.getStart(), 14438070);
-            Assert.assertEquals('A', feat.getRefBase());
-            Assert.assertEquals(feat.getDepthOfCoverage(), 0.0, 0.0001);
-            Assert.assertEquals(feat.getMaximumMappingQual(), 0.0, 0.0001);
-            Assert.assertTrue(DiploidGenotype.GG.equals(feat.getGenotype()));
-            Assert.assertEquals(feat.getDepthOfCoverage(), 0.0, 0.0001);
-            Assert.assertEquals(feat.getLODBestToReference(), 33.2618, 0.0001);
-            Assert.assertEquals(feat.getLODBestToNext(), 33.2618, 0.0001);
-            for (int x = 0; x < feat.getLikelihoods().length; x++) {
-                if (x == DiploidGenotype.GG.ordinal())
-                    Assert.assertEquals(feat.getLikelihoods()[x], 33.2618, 0.0001);
-                else
-                    Assert.assertEquals(feat.getLikelihoods()[x], 0, 0.0001);
-            }
-
-        } catch (IOException e) {
-            Assert.fail("failed to generate iterator from feature source");
-        }
-    }
-}
diff --git a/src/test/java/htsjdk/tribble/index/IndexFactoryTest.java b/src/test/java/htsjdk/tribble/index/IndexFactoryTest.java
index 016049f..59038cf 100644
--- a/src/test/java/htsjdk/tribble/index/IndexFactoryTest.java
+++ b/src/test/java/htsjdk/tribble/index/IndexFactoryTest.java
@@ -23,16 +23,20 @@
  */
 package htsjdk.tribble.index;
 
+import com.google.common.io.Files;
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMSequenceDictionary;
 import htsjdk.samtools.SAMSequenceRecord;
 import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Interval;
 import htsjdk.tribble.TestUtils;
+import htsjdk.tribble.Tribble;
 import htsjdk.tribble.TribbleException;
 import htsjdk.tribble.bed.BEDCodec;
-import htsjdk.tribble.index.linear.LinearIndex;
 import htsjdk.tribble.index.tabix.TabixFormat;
 import htsjdk.tribble.index.tabix.TabixIndex;
-import htsjdk.tribble.util.LittleEndianOutputStream;
+import htsjdk.variant.bcf2.BCF2Codec;
+import htsjdk.variant.variantcontext.VariantContext;
 import htsjdk.variant.vcf.VCFCodec;
 import htsjdk.variant.vcf.VCFFileReader;
 import org.testng.Assert;
@@ -41,23 +45,26 @@ import org.testng.annotations.Test;
 
 import java.io.File;
 import java.io.IOException;
-import java.io.OutputStream;
+import java.util.Iterator;
 import java.util.List;
 
 /**
  * User: jacob
  * Date: 2012-Aug-23
  */
-public class IndexFactoryTest {
+public class IndexFactoryTest extends HtsjdkTest {
 
-    final File sortedBedFile = new File(TestUtils.DATA_DIR + "bed/Unigene.sample.bed");
-    final File unsortedBedFile = new File(TestUtils.DATA_DIR + "bed/unsorted.bed");
-    final File discontinuousFile = new File(TestUtils.DATA_DIR + "bed/disconcontigs.bed");
-    final BEDCodec bedCodec = new BEDCodec();
+    @DataProvider(name = "bedDataProvider")
+    public Object[][] getLinearIndexFactoryTypes() {
+        return new Object[][] {
+                { new File(TestUtils.DATA_DIR, "bed/Unigene.sample.bed") },
+                { new File(TestUtils.DATA_DIR, "bed/Unigene.sample.bed.gz") }
+        };
+    }
 
-    @Test
-    public void testCreateLinearIndex() throws Exception {
-        Index index = IndexFactory.createLinearIndex(sortedBedFile, bedCodec);
+    @Test(dataProvider = "bedDataProvider")
+    public void testCreateLinearIndexFromBED(final File inputBEDFIle) throws Exception {
+        Index index = IndexFactory.createLinearIndex(inputBEDFIle, new BEDCodec());
         String chr = "chr2";
 
         Assert.assertTrue(index.getSequenceNames().contains(chr));
@@ -72,17 +79,20 @@ public class IndexFactoryTest {
 
     @Test(expectedExceptions = TribbleException.MalformedFeatureFile.class, dataProvider = "indexFactoryProvider")
     public void testCreateIndexUnsorted(IndexFactory.IndexType type) throws Exception{
-        Index index = IndexFactory.createIndex(unsortedBedFile, bedCodec, type);
+        final File unsortedBedFile = new File(TestUtils.DATA_DIR, "bed/unsorted.bed");
+        IndexFactory.createIndex(unsortedBedFile, new BEDCodec(), type);
     }
 
     @Test(expectedExceptions = TribbleException.MalformedFeatureFile.class, dataProvider = "indexFactoryProvider")
     public void testCreateIndexDiscontinuousContigs(IndexFactory.IndexType type) throws Exception{
-        Index index = IndexFactory.createIndex(discontinuousFile, bedCodec, type);
+        final File discontinuousFile = new File(TestUtils.DATA_DIR,"bed/disconcontigs.bed");
+        IndexFactory.createIndex(discontinuousFile, new BEDCodec(), type);
     }
 
     @DataProvider(name = "indexFactoryProvider")
     public Object[][] getIndexFactoryTypes(){
         return new Object[][] {
+                new Object[] { IndexFactory.IndexType.TABIX },
                 new Object[] { IndexFactory.IndexType.LINEAR },
                 new Object[] { IndexFactory.IndexType.INTERVAL_TREE }
         };
@@ -96,7 +106,7 @@ public class IndexFactoryTest {
         final SAMSequenceDictionary vcfDict = readerVcf.getFileHeader().getSequenceDictionary();
         final TabixIndex tabixIndexVcf =
                 IndexFactory.createTabixIndex(inputFileVcf, new VCFCodec(), TabixFormat.VCF,
-                vcfDict);
+                        vcfDict);
 
         // index the same bgzipped VCF
         final File inputFileVcfGz = new File("src/test/resources/htsjdk/tribble/tabix/testTabixIndex.vcf.gz");
@@ -116,5 +126,98 @@ public class IndexFactoryTest {
                     tabixIndexVcfGz.containsChromosome(samSequenceRecord.getSequenceName()),
                     "Tabix indexed (bgzipped) VCF does not contain sequence: " + samSequenceRecord.getSequenceName());
         }
+
+    }
+
+    @DataProvider(name = "vcfDataProvider")
+    public Object[][] getVCFIndexData(){
+        return new Object[][] {
+                new Object[] {
+                        new File(TestUtils.DATA_DIR, "tabix/4featuresHG38Header.vcf.gz"),
+                        new Interval("chr6", 33414233, 118314029)
+                },
+                new Object[] {
+                        new File(TestUtils.DATA_DIR, "tabix/4featuresHG38Header.vcf"),
+                        new Interval("chr6", 33414233, 118314029)
+                },
+        };
+    }
+
+    @Test(dataProvider = "vcfDataProvider")
+    public void testCreateTabixIndexFromVCF(
+            final File inputVCF,
+            final Interval queryInterval) throws IOException {
+        // copy the original file and create the index for the copy
+        final File tempDir = IOUtil.createTempDir("testCreateTabixIndexFromVCF", null);
+        tempDir.deleteOnExit();
+        final File tmpVCF = new File(tempDir, inputVCF.getName());
+        Files.copy(inputVCF, tmpVCF);
+        tmpVCF.deleteOnExit();
+
+        // this test creates a TABIX index (.tbi)
+        final TabixIndex tabixIndexGz = IndexFactory.createTabixIndex(tmpVCF, new VCFCodec(), null);
+        tabixIndexGz.writeBasedOnFeatureFile(tmpVCF);
+        final File tmpIndex = Tribble.tabixIndexFile(tmpVCF);
+        tmpIndex.deleteOnExit();
+
+        try (final VCFFileReader originalReader = new VCFFileReader(inputVCF,false);
+            final VCFFileReader tmpReader = new VCFFileReader(tmpVCF, tmpIndex,true)) {
+            Iterator<VariantContext> originalIt = originalReader.iterator();
+            Iterator<VariantContext> tmpIt = tmpReader.query(queryInterval.getContig(), queryInterval.getStart(), queryInterval.getEnd());
+            while (originalIt.hasNext()) {
+                Assert.assertTrue(tmpIt.hasNext(), "variants missing from gzip query");
+                VariantContext vcTmp = tmpIt.next();
+                VariantContext vcOrig = originalIt.next();
+                Assert.assertEquals(vcOrig.getContig(), vcTmp.getContig());
+                Assert.assertEquals(vcOrig.getStart(), vcTmp.getStart());
+                Assert.assertEquals(vcOrig.getEnd(), vcTmp.getEnd());
+            }
+        }
+    }
+
+    @DataProvider(name = "bcfDataFactory")
+    public Object[][] getBCFData(){
+        return new Object[][] {
+                //TODO: this needs more test cases, including block compressed and indexed, but bcftools can't
+                // generate indices for BCF2.1 files, which is all HTSJDK can read, and htsjdk also can't read/write
+                // block compressed BCFs (https://github.com/samtools/htsjdk/issues/946)
+                new Object[] {
+                        new File("src/test/resources/htsjdk/variant/serialization_test.bcf")
+                }
+        };
+    }
+
+    @Test(dataProvider = "bcfDataFactory")
+    public void testCreateLinearIndexFromBCF(final File inputBCF) throws IOException {
+        // copy the original file and create the index for the copy
+        final File tempDir = IOUtil.createTempDir("testCreateIndexFromBCF", null);
+        tempDir.deleteOnExit();
+        final File tmpBCF = new File(tempDir, inputBCF.getName());
+        Files.copy(inputBCF, tmpBCF);
+        tmpBCF.deleteOnExit();
+
+        // NOTE: this test creates a LINEAR index (.idx)
+        final Index index = IndexFactory.createIndex(tmpBCF, new BCF2Codec(), IndexFactory.IndexType.LINEAR);
+        index.writeBasedOnFeatureFile(tmpBCF);
+        final File tempIndex = Tribble.indexFile(tmpBCF);
+        tempIndex.deleteOnExit();
+
+        try (final VCFFileReader originalReader = new VCFFileReader(inputBCF,false);
+            final VCFFileReader tmpReader = new VCFFileReader(tmpBCF, tempIndex,true)) {
+            final Iterator<VariantContext> originalIt = originalReader.iterator();
+            while (originalIt.hasNext()) {
+                // we don't have an externally generated index file for the original input, so iterate through each variant
+                // and use the generated index to query for the same variant in the indexed copy of the input
+                final VariantContext vcOrig = originalIt.next();
+                final Interval queryInterval = new Interval(vcOrig.getContig(), vcOrig.getStart(), vcOrig.getEnd());
+                final Iterator<VariantContext> tmpIt = tmpReader.query(queryInterval.getContig(), queryInterval.getStart(), queryInterval.getEnd());
+                Assert.assertTrue(tmpIt.hasNext(), "Variant not returned from indexed file");
+                final VariantContext vcTmp = tmpIt.next();
+                Assert.assertEquals(vcOrig.getContig(), vcTmp.getContig());
+                Assert.assertEquals(vcOrig.getStart(), vcTmp.getStart());
+                Assert.assertEquals(vcOrig.getEnd(), vcTmp.getEnd());
+                Assert.assertFalse(tmpIt.hasNext()); // make sure there is only one matching variant
+            }
+        }
     }
 }
diff --git a/src/test/java/htsjdk/tribble/index/IndexTest.java b/src/test/java/htsjdk/tribble/index/IndexTest.java
index aa179a9..2764f28 100644
--- a/src/test/java/htsjdk/tribble/index/IndexTest.java
+++ b/src/test/java/htsjdk/tribble/index/IndexTest.java
@@ -1,12 +1,15 @@
 package htsjdk.tribble.index;
 
+import com.google.common.jimfs.Configuration;
+import com.google.common.jimfs.Jimfs;
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.IOUtil;
 import htsjdk.tribble.FeatureCodec;
 import htsjdk.tribble.TestUtils;
 import htsjdk.tribble.Tribble;
 import htsjdk.tribble.bed.BEDCodec;
+import htsjdk.tribble.index.interval.IntervalTreeIndex;
 import htsjdk.tribble.index.linear.LinearIndex;
-import htsjdk.tribble.index.tabix.TabixFormat;
 import htsjdk.tribble.index.tabix.TabixIndex;
 import htsjdk.tribble.util.LittleEndianOutputStream;
 import htsjdk.tribble.util.TabixUtils;
@@ -18,21 +21,19 @@ import org.testng.annotations.Test;
 import java.io.File;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.nio.file.FileSystem;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.List;
 
 
-public class IndexTest {
+public class IndexTest extends HtsjdkTest {
     private final static String CHR = "1";
     private final static File MassiveIndexFile = new File(TestUtils.DATA_DIR + "Tb.vcf.idx");
 
     @DataProvider(name = "StartProvider")
     public Object[][] makeStartProvider() {
-        List<Object[]> tests = new ArrayList<Object[]>();
-
-//        for ( int mid = 0; mid <= end; mid += 1000000 ) {
-//            tests.add(new Object[]{0, mid, mid+1000000, end});
-//        }
+        List<Object[]> tests = new ArrayList<>();
 
         tests.add(new Object[]{1226943, 1226943, 1226943, 2000000});
 
@@ -87,11 +88,44 @@ public class IndexTest {
         Assert.assertTrue(tempIndex.exists());
         // load the generated index
         final Index loadedIndex = IndexFactory.loadIndex(tempIndex.getAbsolutePath());
-        // tess that the sequences and properties are the same
+        //TODO: This is just a smoke test; it can pass even if the generated index is unusable for queries.
+        // test that the sequences and properties are the same
         Assert.assertEquals(loadedIndex.getSequenceNames(), index.getSequenceNames());
         Assert.assertEquals(loadedIndex.getProperties(), index.getProperties());
         // test that write to a stream does not blows ip
         index.write(new LittleEndianOutputStream(nullOutputStrem));
     }
 
+    @Test(dataProvider = "writeIndexData")
+    public void testWritePathIndex(final File inputFile, final IndexFactory.IndexType type, final  FeatureCodec codec) throws Exception {
+        try (final FileSystem fs = Jimfs.newFileSystem("test", Configuration.unix())) {
+            // create the index
+            final Index index = IndexFactory.createIndex(inputFile, codec, type);
+            final Path path = fs.getPath(inputFile.getName() + ".index");
+            // write the index to a file
+            index.write(path);
+
+            // test if the index does not blow up with the path constructor
+            switch (type) {
+                case TABIX:
+                    new TabixIndex(path);
+                    break;
+                case LINEAR:
+                    new LinearIndex(path);
+                    break;
+                case INTERVAL_TREE:
+                    new IntervalTreeIndex(path);
+                    break;
+            }
+        }
+    }
+
+    @Test(dataProvider = "writeIndexData")
+    public void testWriteBasedOnNonRegularFeatureFile(final File inputFile, final IndexFactory.IndexType type, final  FeatureCodec codec) throws Exception {
+        final File tmpFolder = IOUtil.createTempDir("NonRegultarFeatureFile", null);
+        // create the index
+        final Index index = IndexFactory.createIndex(inputFile, codec, type);
+        // try to write based on the tmpFolder
+        Assert.assertThrows(IOException.class, () -> index.writeBasedOnFeatureFile(tmpFolder));
+    }
 }
diff --git a/src/test/java/htsjdk/tribble/index/interval/IntervalTreeTest.java b/src/test/java/htsjdk/tribble/index/interval/IntervalTreeTest.java
index ca47089..9a8a0a6 100644
--- a/src/test/java/htsjdk/tribble/index/interval/IntervalTreeTest.java
+++ b/src/test/java/htsjdk/tribble/index/interval/IntervalTreeTest.java
@@ -18,6 +18,7 @@
 
 package htsjdk.tribble.index.interval;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.AbstractFeatureReader;
 import htsjdk.tribble.CloseableTribbleIterator;
 import htsjdk.tribble.FeatureReader;
@@ -42,7 +43,7 @@ import java.util.Set;
  * User: jrobinso
  * Date: Mar 24, 2010
  */
-public class IntervalTreeTest {
+public class IntervalTreeTest extends HtsjdkTest {
 
     static IntervalTree tree;
 
diff --git a/src/test/java/htsjdk/tribble/index/linear/LinearIndexTest.java b/src/test/java/htsjdk/tribble/index/linear/LinearIndexTest.java
index 09f920e..e20dc15 100644
--- a/src/test/java/htsjdk/tribble/index/linear/LinearIndexTest.java
+++ b/src/test/java/htsjdk/tribble/index/linear/LinearIndexTest.java
@@ -18,6 +18,7 @@
 
 package htsjdk.tribble.index.linear;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.AbstractFeatureReader;
 import htsjdk.tribble.CloseableTribbleIterator;
 import htsjdk.tribble.FeatureReader;
@@ -38,7 +39,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-public class LinearIndexTest {
+public class LinearIndexTest extends HtsjdkTest {
     private static final File RANDOM_FILE = new File("notMeaningful");
 
     private final static Block CHR1_B1 = new Block(1, 10);
diff --git a/src/test/java/htsjdk/tribble/index/tabix/TabixIndexTest.java b/src/test/java/htsjdk/tribble/index/tabix/TabixIndexTest.java
index 6981b87..077f79d 100644
--- a/src/test/java/htsjdk/tribble/index/tabix/TabixIndexTest.java
+++ b/src/test/java/htsjdk/tribble/index/tabix/TabixIndexTest.java
@@ -23,7 +23,17 @@
  */
 package htsjdk.tribble.index.tabix;
 
+import htsjdk.HtsjdkTest;
+import com.google.common.io.Files;
 import htsjdk.samtools.util.BlockCompressedOutputStream;
+import htsjdk.samtools.util.IOUtil;
+import htsjdk.samtools.util.Interval;
+import htsjdk.tribble.AbstractFeatureReader;
+import htsjdk.tribble.FeatureReader;
+import htsjdk.tribble.TestUtils;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.bed.BEDCodec;
+import htsjdk.tribble.bed.BEDFeature;
 import htsjdk.tribble.index.IndexFactory;
 import htsjdk.tribble.util.LittleEndianOutputStream;
 import htsjdk.tribble.util.TabixUtils;
@@ -38,11 +48,13 @@ import org.testng.annotations.Test;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.Iterator;
+import java.util.List;
 
-public class TabixIndexTest {
-    private static final File SMALL_TABIX_FILE = new File("src/test/resources/htsjdk/tribble/tabix/trioDup.vcf.gz.tbi");
-    private static final File BIGGER_TABIX_FILE = new File("src/test/resources/htsjdk/tribble/tabix/bigger.vcf.gz.tbi");
+public class TabixIndexTest extends HtsjdkTest {
+    private static final File SMALL_TABIX_FILE = new File(TestUtils.DATA_DIR, "tabix/trioDup.vcf.gz.tbi");
+    private static final File BIGGER_TABIX_FILE = new File(TestUtils.DATA_DIR, "tabix/bigger.vcf.gz.tbi");
 
     /**
      * Read an existing index from disk, write it to a temp file, read that in, and assert that both in-memory
@@ -71,6 +83,8 @@ public class TabixIndexTest {
         };
     }
 
+    //TODO: This test reads an existing .tbi on a .gz, but only writes a .tbi index for a plain vcf, which
+    // tabix doesn't appear to even allow.
     @Test
     public void testQueryProvidedItemsAmount() throws IOException {
         final String VCF = "src/test/resources/htsjdk/tribble/tabix/YRI.trio.2010_07.indel.sites.vcf";
@@ -101,6 +115,8 @@ public class TabixIndexTest {
         ) // create TabixIndex straight from plaintext VCF
                 .write(plainTextVcfIndexFile); // write it
 
+        //TODO: you can pass in a .tbi file as the index for a plain .vcf, but if you *don't* pass in the file name and
+        //just require an index, VCFFleREader will only look for a .idx on a plain vcf
         final VCFFileReader plainTextVcfReader = new VCFFileReader(plainTextVcfInputFile, plainTextVcfIndexFile);
         // Now we have both plaintext and compressed VCFs with provided TabixIndex-es and could test their "queryability"
 
@@ -133,7 +149,79 @@ public class TabixIndexTest {
         }
     }
 
-    private static int countIteratedElements(Iterator iterator) {
+    @DataProvider(name = "bedTabixIndexTestData")
+    public Object[][] getBedIndexFactory(){
+        // These files have accompanying .tbi files created with tabix.
+        return new Object[][] {
+                new Object[] {
+                        // BGZP BED file with no header, 2 features
+                        new File(TestUtils.DATA_DIR, "bed/2featuresNoHeader.bed.gz"),
+                        Arrays.asList(
+                                new Interval("chr1", 1, 10),
+                                new Interval("chr1", 100, 1000000))
+                },
+                new Object[] {
+                        // BGZP BED file with no header, 3 features; one feature falls in between the query intervals
+                        new File(TestUtils.DATA_DIR, "bed/3featuresNoHeader.bed.gz"),
+                        Arrays.asList(
+                                new Interval("chr1", 1, 10),
+                                new Interval("chr1", 100, 1000000))
+                },
+                new Object[] {
+                        // same file as above (BGZP BED file with no header, 3 features), but change the query to return
+                        // only the single interval for the feature that falls in between the other features
+                        new File(TestUtils.DATA_DIR, "bed/3featuresNoHeader.bed.gz"),
+                        Arrays.asList(
+                                new Interval("chr1", 15, 20))
+                },
+                new Object[] {
+                        // BGZP BED file with one line header, 2 features
+                        new File(TestUtils.DATA_DIR, "bed/2featuresWithHeader.bed.gz"),
+                        Arrays.asList(
+                                new Interval("chr1", 1, 10),
+                                new Interval("chr1", 100, 1000000))
+                },
+        };
+    }
+
+    @Test(dataProvider = "bedTabixIndexTestData")
+    public void testBedTabixIndex(
+            final File inputBed,
+            final List<Interval> queryIntervals
+    ) throws Exception {
+        // copy the input file and create an index for the copy
+        final File tempDir = IOUtil.createTempDir("testBedTabixIndex", null);
+        tempDir.deleteOnExit();
+        final File tmpBed = new File(tempDir, inputBed.getName());
+        Files.copy(inputBed, tmpBed);
+        tmpBed.deleteOnExit();
+        final TabixIndex tabixIndexGz = IndexFactory.createTabixIndex(tmpBed, new BEDCodec(), null);
+        tabixIndexGz.writeBasedOnFeatureFile(tmpBed);
+        final File tmpIndex = Tribble.tabixIndexFile(tmpBed);
+        tmpIndex.deleteOnExit();
+
+        // iterate over the query intervals and validate the query results
+        try(final FeatureReader<BEDFeature> originalReader =
+                    AbstractFeatureReader.getFeatureReader(inputBed.getAbsolutePath(), new BEDCodec());
+            final FeatureReader<BEDFeature> createdReader =
+                    AbstractFeatureReader.getFeatureReader(tmpBed.getAbsolutePath(), new BEDCodec()))
+        {
+            for (final Interval interval: queryIntervals) {
+                final Iterator<BEDFeature> originalIt = originalReader.query(interval.getContig(), interval.getStart(), interval.getEnd());
+                final Iterator<BEDFeature> createdIt = createdReader.query(interval.getContig(), interval.getStart(), interval.getEnd());
+                while(originalIt.hasNext()) {
+                    Assert.assertTrue(createdIt.hasNext(), "some features not returned from query");
+                    BEDFeature bedOrig = originalIt.next();
+                    BEDFeature bedTmp = createdIt.next();
+                    Assert.assertEquals(bedOrig.getContig(), bedTmp.getContig());
+                    Assert.assertEquals(bedOrig.getStart(), bedTmp.getStart());
+                    Assert.assertEquals(bedOrig.getEnd(), bedTmp.getEnd());
+                }
+            }
+        }
+    }
+
+    private static int countIteratedElements(final Iterator iterator) {
         int counter = 0;
         while (iterator.hasNext()) {
             iterator.next();
diff --git a/src/test/java/htsjdk/tribble/readers/AsciiLineReaderTest.java b/src/test/java/htsjdk/tribble/readers/AsciiLineReaderTest.java
index 822f6cf..8583801 100644
--- a/src/test/java/htsjdk/tribble/readers/AsciiLineReaderTest.java
+++ b/src/test/java/htsjdk/tribble/readers/AsciiLineReaderTest.java
@@ -1,10 +1,13 @@
 package htsjdk.tribble.readers;
 
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.util.BlockCompressedInputStream;
 import htsjdk.tribble.TestUtils;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
+import org.testng.Assert;
+import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
+import java.io.ByteArrayInputStream;
 import java.io.FileInputStream;
 import java.io.InputStream;
 
@@ -15,17 +18,7 @@ import static org.testng.Assert.assertTrue;
  * User: jacob
  * Date: 2012/05/09
  */
-public class AsciiLineReaderTest {
-    @BeforeMethod
-    public void setUp() throws Exception {
-
-    }
-
-    @AfterMethod
-    public void tearDown() throws Exception {
-
-    }
-
+public class AsciiLineReaderTest extends HtsjdkTest {
     /**
      * Test that we read the correct number of lines
      * from a file
@@ -35,7 +28,7 @@ public class AsciiLineReaderTest {
     public void testReadLines() throws Exception {
         String filePath = TestUtils.DATA_DIR + "gwas/smallp.gwas";
         InputStream is = new FileInputStream(filePath);
-        AsciiLineReader reader = new AsciiLineReader(is);
+        AsciiLineReader reader = AsciiLineReader.from(is);
         int actualLines = 0;
         int expectedNumber = 20;
         String nextLine = "";
@@ -49,4 +42,48 @@ public class AsciiLineReaderTest {
         assertEquals(expectedNumber, actualLines);
 
     }
+
+    @Test public void voidTestLineEndingLength() throws Exception {
+        final String input = "Hello\nThis\rIs A Silly Test\r\nSo There";
+        final InputStream is = new ByteArrayInputStream(input.getBytes());
+        final AsciiLineReader in = AsciiLineReader.from(is);
+
+        Assert.assertEquals(in.getLineTerminatorLength(), -1);
+        Assert.assertEquals(in.readLine(), "Hello");
+        Assert.assertEquals(in.getLineTerminatorLength(), 1);
+        Assert.assertEquals(in.readLine(), "This");
+        Assert.assertEquals(in.getLineTerminatorLength(), 1);
+        Assert.assertEquals(in.readLine(), "Is A Silly Test");
+        Assert.assertEquals(in.getLineTerminatorLength(), 2);
+        Assert.assertEquals(in.readLine(), "So There");
+        Assert.assertEquals(in.getLineTerminatorLength(), 0);
+    }
+
+    @Test public void voidTestLineEndingLengthAtEof() throws Exception {
+        final String input = "Hello\nWorld\r\n";
+        final InputStream is = new ByteArrayInputStream(input.getBytes());
+        final AsciiLineReader in = AsciiLineReader.from(is);
+
+        Assert.assertEquals(in.getLineTerminatorLength(), -1);
+        Assert.assertEquals(in.readLine(), "Hello");
+        Assert.assertEquals(in.getLineTerminatorLength(), 1);
+        Assert.assertEquals(in.readLine(), "World");
+        Assert.assertEquals(in.getLineTerminatorLength(), 2);
+    }
+
+    @DataProvider(name = "fromStream")
+    public Object[][] getFromStreamData() {
+        return new Object[][]{
+                { new BlockCompressedInputStream(new ByteArrayInputStream(new byte[10])), BlockCompressedAsciiLineReader.class },
+                { new PositionalBufferedStream(new ByteArrayInputStream(new byte[10])), AsciiLineReader.class },
+                { new ByteArrayInputStream(new byte[10]), AsciiLineReader.class }
+        };
+    }
+
+    @Test(dataProvider="fromStream")
+    public void testFromStream(final InputStream inStream, final Class expectedClass) {
+        AsciiLineReader alr = AsciiLineReader.from(inStream);
+        Assert.assertEquals(alr.getClass(), expectedClass);
+    }
+
 }
diff --git a/src/test/java/htsjdk/tribble/readers/BlockCompressedAsciiLineReaderTest.java b/src/test/java/htsjdk/tribble/readers/BlockCompressedAsciiLineReaderTest.java
new file mode 100644
index 0000000..a55c8a0
--- /dev/null
+++ b/src/test/java/htsjdk/tribble/readers/BlockCompressedAsciiLineReaderTest.java
@@ -0,0 +1,76 @@
+package htsjdk.tribble.readers;
+
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.util.BlockCompressedFilePointerUtil;
+import htsjdk.samtools.util.BlockCompressedInputStream;
+import htsjdk.samtools.util.BlockCompressedOutputStream;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+
+public class BlockCompressedAsciiLineReaderTest extends HtsjdkTest {
+
+    private static final String sentinelLine = "Sentinel line";
+
+    @Test
+    public void testLineReaderPosition() throws IOException {
+        final File multiBlockFile = File.createTempFile("BlockCompressedAsciiLineReaderTest", ".gz");
+        multiBlockFile.deleteOnExit();
+
+        // write a file that has more than a single compressed block
+        final long expectedFinalLineOffset = populateMultiBlockCompressedFile(multiBlockFile);
+
+        try (final BlockCompressedInputStream bcis = new BlockCompressedInputStream(multiBlockFile);
+            final BlockCompressedAsciiLineReader asciiLineReader = new BlockCompressedAsciiLineReader(bcis))
+        {
+            String line = null;
+            long actualFinalLineOffset = -1;
+
+            do {
+                actualFinalLineOffset = asciiLineReader.getPosition();
+                line = asciiLineReader.readLine();
+            } while (line != null && !line.equals(sentinelLine));
+
+            // test that we read the sentinel line; its at the expected offset, and that offset
+            // represents a virtual file pointer
+            Assert.assertNotNull(line);
+            Assert.assertEquals(line, sentinelLine);
+            Assert.assertEquals(expectedFinalLineOffset, actualFinalLineOffset);
+            Assert.assertTrue(BlockCompressedFilePointerUtil.getBlockAddress(actualFinalLineOffset) != 0);
+        }
+    }
+
+    @Test(expectedExceptions = UnsupportedOperationException.class)
+    public void testRejectPositionalInputStream() throws IOException {
+        final File multiBlockFile = File.createTempFile("BlockCompressedAsciiLineReaderTest", ".gz");
+        multiBlockFile.deleteOnExit();
+        populateMultiBlockCompressedFile(multiBlockFile);
+
+        try (final BlockCompressedInputStream bcis = new BlockCompressedInputStream(multiBlockFile);
+             final BlockCompressedAsciiLineReader asciiLineReader = new BlockCompressedAsciiLineReader(bcis)) {
+            asciiLineReader.readLine(new PositionalBufferedStream(new ByteArrayInputStream(new byte[1100])));
+        }
+    }
+
+    // Populate a block compressed file so that has more than a single compressed block
+    private long populateMultiBlockCompressedFile(final File tempBlockCompressedFile) throws IOException {
+        long sentinelLineOffset = -1;
+
+        try (BlockCompressedOutputStream bcos = new BlockCompressedOutputStream(tempBlockCompressedFile)) {
+            // write lines until we exceed the size of the first block (block address != 0)
+            do {
+                bcos.write("Write this line enough times to exceed the size or a compressed block\n".getBytes());
+            } while (BlockCompressedFilePointerUtil.getBlockAddress(bcos.getFilePointer()) == 0);
+
+            sentinelLineOffset = bcos.getFilePointer();
+
+            // write a terminating line that is guaranteed to not be in the first block
+            bcos.write(sentinelLine.getBytes());
+        }
+
+        return sentinelLineOffset;
+    }
+}
diff --git a/src/test/java/htsjdk/tribble/readers/LongLineBufferedReaderTest.java b/src/test/java/htsjdk/tribble/readers/LongLineBufferedReaderTest.java
index 6c4c946..3e498e1 100644
--- a/src/test/java/htsjdk/tribble/readers/LongLineBufferedReaderTest.java
+++ b/src/test/java/htsjdk/tribble/readers/LongLineBufferedReaderTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble.readers;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.TestUtils;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -11,7 +12,7 @@ import java.io.InputStreamReader;
 /**
  * @author mccowan
  */
-public class LongLineBufferedReaderTest {
+public class LongLineBufferedReaderTest extends HtsjdkTest {
 
     /**
      * Test that we read the correct number of lines
diff --git a/src/test/java/htsjdk/tribble/readers/PositionalBufferedStreamTest.java b/src/test/java/htsjdk/tribble/readers/PositionalBufferedStreamTest.java
index 3dd7cf3..8d9db2a 100644
--- a/src/test/java/htsjdk/tribble/readers/PositionalBufferedStreamTest.java
+++ b/src/test/java/htsjdk/tribble/readers/PositionalBufferedStreamTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble.readers;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.TestUtils;
 import org.testng.Assert;
 import org.testng.annotations.AfterMethod;
@@ -20,7 +21,7 @@ import java.util.List;
  * User: jacob
  * Date: 2012/05/09
  */
-public class PositionalBufferedStreamTest {
+public class PositionalBufferedStreamTest extends HtsjdkTest {
 
     InputStream FileIs;
     long expectedBytes;
diff --git a/src/test/java/htsjdk/tribble/readers/ReaderTest.java b/src/test/java/htsjdk/tribble/readers/ReaderTest.java
index d700e04..7ac1d57 100644
--- a/src/test/java/htsjdk/tribble/readers/ReaderTest.java
+++ b/src/test/java/htsjdk/tribble/readers/ReaderTest.java
@@ -1,6 +1,7 @@
 package htsjdk.tribble.readers;
 
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
@@ -18,7 +19,7 @@ import java.util.List;
 /**
  * Tests for streams and readers
  */
-public class ReaderTest {
+public class ReaderTest extends HtsjdkTest {
     @BeforeClass
     public void setup() throws IOException {
     }
diff --git a/src/test/java/htsjdk/tribble/readers/SynchronousLineReaderUnitTest.java b/src/test/java/htsjdk/tribble/readers/SynchronousLineReaderUnitTest.java
index fbb5d18..0c0deab 100644
--- a/src/test/java/htsjdk/tribble/readers/SynchronousLineReaderUnitTest.java
+++ b/src/test/java/htsjdk/tribble/readers/SynchronousLineReaderUnitTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble.readers;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.TestUtils;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -12,7 +13,7 @@ import java.io.InputStreamReader;
 /**
  * @author mccowan
  */
-public class SynchronousLineReaderUnitTest {
+public class SynchronousLineReaderUnitTest extends HtsjdkTest {
     @Test
     public void testLineReaderIterator_streamConstructor() throws Exception {
         final File filePath = new File(TestUtils.DATA_DIR + "gwas/smallp.gwas");
diff --git a/src/test/java/htsjdk/tribble/readers/TabixReaderTest.java b/src/test/java/htsjdk/tribble/readers/TabixReaderTest.java
index d7b36df..b8ec677 100644
--- a/src/test/java/htsjdk/tribble/readers/TabixReaderTest.java
+++ b/src/test/java/htsjdk/tribble/readers/TabixReaderTest.java
@@ -1,6 +1,7 @@
 package htsjdk.tribble.readers;
 
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.TestUtil;
 import htsjdk.tribble.TestUtils;
 import org.testng.Assert;
@@ -13,9 +14,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 
-import static org.testng.AssertJUnit.assertTrue;
-
-
 /**
  * Created by IntelliJ IDEA.
  * User: jrobinso
@@ -23,7 +21,7 @@ import static org.testng.AssertJUnit.assertTrue;
  * Time: 8:57:40 PM
  * To change this template use File | Settings | File Templates.
  */
-public class TabixReaderTest {
+public class TabixReaderTest extends HtsjdkTest {
 
     static String tabixFile = TestUtils.DATA_DIR + "tabix/trioDup.vcf.gz";
     static TabixReader tabixReader;
@@ -128,10 +126,10 @@ public class TabixReaderTest {
         int nRecords = 0;
         String nextLine;
         while ((nextLine = lineReader.readLine()) != null) {
-            assertTrue(nextLine.startsWith("4"));
+            Assert.assertTrue(nextLine.startsWith("4"));
             nRecords++;
         }
-        assertTrue(nRecords > 0);
+        Assert.assertTrue(nRecords > 0);
 
 
     }
@@ -153,10 +151,22 @@ public class TabixReaderTest {
         int nRecords = 0;
         String nextLine;
         while ((nextLine = lineReader.readLine()) != null) {
-            assertTrue(nextLine.startsWith("4"));
+            Assert.assertTrue(nextLine.startsWith("4"));
             nRecords++;
         }
-        assertTrue(nRecords > 0);
+        Assert.assertTrue(nRecords > 0);
 
     }
+    
+    /**
+     * Test TabixReader.readLine
+     *
+     * @throws java.io.IOException
+     */
+    @Test
+    public void testTabixReaderReadLine() throws IOException {
+        TabixReader tabixReader = new TabixReader(tabixFile);
+        Assert.assertNotNull(tabixReader.readLine());
+        tabixReader.close();
+    }
 }
diff --git a/src/test/java/htsjdk/tribble/util/ParsingUtilsTest.java b/src/test/java/htsjdk/tribble/util/ParsingUtilsTest.java
index 1c3ad1f..c974790 100644
--- a/src/test/java/htsjdk/tribble/util/ParsingUtilsTest.java
+++ b/src/test/java/htsjdk/tribble/util/ParsingUtilsTest.java
@@ -1,12 +1,17 @@
 package htsjdk.tribble.util;
 
 
+import com.google.common.jimfs.Configuration;
+import com.google.common.jimfs.Jimfs;
+import htsjdk.HtsjdkTest;
+import htsjdk.samtools.util.IOUtil;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
+import java.io.*;
+import java.nio.file.FileSystem;
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.util.Arrays;
 import java.util.List;
 
@@ -14,7 +19,7 @@ import java.util.List;
 /**
  * Parsing utils tests
  */
-public class ParsingUtilsTest {
+public class ParsingUtilsTest extends HtsjdkTest {
 
     static final String AVAILABLE_FTP_URL = "ftp://ftp.broadinstitute.org/pub/igv/TEST/test.txt";
     static final String UNAVAILABLE_FTP_URL = "ftp://www.example.com/file.txt";
@@ -118,6 +123,37 @@ public class ParsingUtilsTest {
     }
 
     @Test
+    public void testFileDoesExist() throws IOException{
+        File tempFile = File.createTempFile(getClass().getSimpleName(), ".tmp");
+        tempFile.deleteOnExit();
+        tstExists(tempFile.getAbsolutePath(), true);
+        tstExists(tempFile.toURI().toString(), true);
+    }
+
+    @Test
+    public void testFileDoesNotExist() throws IOException{
+        File tempFile = File.createTempFile(getClass().getSimpleName(), ".tmp");
+        tempFile.delete();
+        tstExists(tempFile.getAbsolutePath(), false);
+        tstExists(tempFile.toURI().toString(), false);
+    }
+
+    @Test
+    public void testInMemoryNioFileDoesExist() throws IOException{
+        FileSystem fs = Jimfs.newFileSystem(Configuration.unix());
+        Path file = fs.getPath("/file");
+        Files.createFile(file);
+        tstExists(file.toUri().toString(), true);
+    }
+
+    @Test
+    public void testInMemoryNioFileDoesNotExist() throws IOException{
+        FileSystem fs = Jimfs.newFileSystem(Configuration.unix());
+        Path file = fs.getPath("/file");
+        tstExists(file.toUri().toString(), false);
+    }
+
+    @Test
     public void testFTPDoesExist() throws IOException{
         tstExists(AVAILABLE_FTP_URL, true);
     }
@@ -143,6 +179,26 @@ public class ParsingUtilsTest {
     }
 
     @Test
+    public void testFileOpenInputStream() throws IOException{
+        File tempFile = File.createTempFile(getClass().getSimpleName(), ".tmp");
+        tempFile.deleteOnExit();
+        OutputStream os = IOUtil.openFileForWriting(tempFile);
+        BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os));
+        writer.write("hello");
+        writer.close();
+        tstStream(tempFile.getAbsolutePath());
+        tstStream(tempFile.toURI().toString());
+    }
+
+    @Test
+    public void testInMemoryNioFileOpenInputStream() throws IOException{
+        FileSystem fs = Jimfs.newFileSystem(Configuration.unix());
+        Path file = fs.getPath("/file");
+        Files.write(file, "hello".getBytes("UTF-8"));
+        tstStream(file.toUri().toString());
+    }
+
+    @Test
     public void testFTPOpenInputStream() throws IOException{
         tstStream(AVAILABLE_FTP_URL);
     }
diff --git a/src/test/java/htsjdk/tribble/util/ftp/FTPClientTest.java b/src/test/java/htsjdk/tribble/util/ftp/FTPClientTest.java
index 3979b08..6b77f91 100644
--- a/src/test/java/htsjdk/tribble/util/ftp/FTPClientTest.java
+++ b/src/test/java/htsjdk/tribble/util/ftp/FTPClientTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble.util.ftp;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.ftp.FTPClient;
 import htsjdk.samtools.util.ftp.FTPReply;
 import org.testng.Assert;
@@ -15,7 +16,7 @@ import java.net.UnknownHostException;
 * @author Jim Robinson
 * @since 10/3/11
 */
-public class FTPClientTest {
+public class FTPClientTest extends HtsjdkTest {
 
     static String host = "ftp.broadinstitute.org";
     static String file = "/pub/igv/TEST/test.txt";
diff --git a/src/test/java/htsjdk/tribble/util/ftp/FTPUtilsTest.java b/src/test/java/htsjdk/tribble/util/ftp/FTPUtilsTest.java
index a5f3b0e..87000ee 100644
--- a/src/test/java/htsjdk/tribble/util/ftp/FTPUtilsTest.java
+++ b/src/test/java/htsjdk/tribble/util/ftp/FTPUtilsTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble.util.ftp;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.ftp.FTPUtils;
 import org.testng.annotations.Test;
 
@@ -12,7 +13,7 @@ import static org.testng.Assert.assertTrue;
 * @author Jim Robinson
 * @since 10/4/11
 */
-public class FTPUtilsTest {
+public class FTPUtilsTest extends HtsjdkTest {
 
     @Test
     public void testResourceAvailable() throws Exception {
diff --git a/src/test/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculationTest.java b/src/test/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculationTest.java
index fcf1bea..d2b5455 100644
--- a/src/test/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculationTest.java
+++ b/src/test/java/htsjdk/tribble/util/popgen/HardyWeinbergCalculationTest.java
@@ -1,5 +1,6 @@
 package htsjdk.tribble.util.popgen;
 
+import htsjdk.HtsjdkTest;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -7,7 +8,7 @@ import org.testng.annotations.Test;
 /**
  * Created by farjoun on 7/18/14.
  */
-public class HardyWeinbergCalculationTest {
+public class HardyWeinbergCalculationTest extends HtsjdkTest {
 
     @DataProvider
     public Object[][] testHwCalculateData() {
diff --git a/src/test/java/htsjdk/variant/PrintVariantsExampleTest.java b/src/test/java/htsjdk/variant/PrintVariantsExampleTest.java
index c82f2db..9f273a9 100644
--- a/src/test/java/htsjdk/variant/PrintVariantsExampleTest.java
+++ b/src/test/java/htsjdk/variant/PrintVariantsExampleTest.java
@@ -25,20 +25,19 @@
 
 package htsjdk.variant;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.util.IOUtil;
 import htsjdk.variant.example.PrintVariantsExample;
 import org.testng.Assert;
-import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.List;
-import java.util.OptionalInt;
 import java.util.stream.IntStream;
 
-public class PrintVariantsExampleTest {
+public class PrintVariantsExampleTest extends HtsjdkTest {
     @Test
     public void testExampleWriteFile() throws IOException {
         final File tempFile = File.createTempFile("example", ".vcf");
diff --git a/src/test/java/htsjdk/variant/VariantBaseTest.java b/src/test/java/htsjdk/variant/VariantBaseTest.java
index 87345a0..7a3417b 100644
--- a/src/test/java/htsjdk/variant/VariantBaseTest.java
+++ b/src/test/java/htsjdk/variant/VariantBaseTest.java
@@ -25,6 +25,7 @@
 
 package htsjdk.variant;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMSequenceDictionary;
 import htsjdk.samtools.SAMSequenceRecord;
 import htsjdk.variant.variantcontext.Genotype;
@@ -44,7 +45,7 @@ import java.util.Set;
 /**
  * Base class for test classes within org.broadinstitute.variant
  */
-public class VariantBaseTest {
+public class VariantBaseTest extends HtsjdkTest {
 
     public static final String variantTestDataRoot = new File("src/test/resources/htsjdk/variant/").getAbsolutePath() + "/";
 
diff --git a/src/test/java/htsjdk/variant/bcf2/BCF2WriterUnitTest.java b/src/test/java/htsjdk/variant/bcf2/BCF2WriterUnitTest.java
new file mode 100644
index 0000000..31d62dd
--- /dev/null
+++ b/src/test/java/htsjdk/variant/bcf2/BCF2WriterUnitTest.java
@@ -0,0 +1,282 @@
+/*
+* Copyright (c) 2017 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.bcf2;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.TestUtil;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.readers.PositionalBufferedStream;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.Genotype;
+import htsjdk.variant.variantcontext.GenotypeBuilder;
+import htsjdk.variant.variantcontext.GenotypesContext;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import htsjdk.variant.variantcontext.VariantContextTestProvider;
+import htsjdk.variant.variantcontext.writer.Options;
+import htsjdk.variant.variantcontext.writer.VariantContextWriter;
+import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
+import htsjdk.variant.vcf.VCFFormatHeaderLine;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLine;
+import htsjdk.variant.vcf.VCFHeaderLineType;
+import htsjdk.variant.vcf.VCFInfoHeaderLine;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * @author amila
+ *         <p/>
+ *         Class BCF2WriterUnitTest
+ *         <p/>
+ *         This class tests out the ability of the BCF writer to correctly write BCF files
+ */
+public class BCF2WriterUnitTest extends VariantBaseTest {
+
+    private File tempDir;
+
+    /**
+     * create a fake header of known quantity
+     *
+     * @return a fake VCF header
+     */
+    private static VCFHeader createFakeHeader() {
+        final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
+        final Set<VCFHeaderLine> metaData = new HashSet<>();
+        final Set<String> additionalColumns = new HashSet<>();
+        metaData.add(new VCFHeaderLine("two", "2"));
+        additionalColumns.add("extra1");
+        additionalColumns.add("extra2");
+        final VCFHeader header = new VCFHeader(metaData, additionalColumns);
+        header.addMetaDataLine(new VCFInfoHeaderLine("DP", 1, VCFHeaderLineType.String, "x"));
+        header.addMetaDataLine(new VCFFormatHeaderLine("GT", 1, VCFHeaderLineType.String, "x"));
+        header.addMetaDataLine(new VCFFormatHeaderLine("BB", 1, VCFHeaderLineType.String, "x"));
+        header.addMetaDataLine(new VCFFormatHeaderLine("GQ", 1, VCFHeaderLineType.String, "x"));
+        header.setSequenceDictionary(sequenceDict);
+        return header;
+    }
+
+    @BeforeClass
+    private void createTemporaryDirectory() {
+        tempDir = TestUtil.getTempDirectory("BCFWriter", "StaleIndex");
+        tempDir.deleteOnExit();
+    }
+
+
+    /**
+     * test, using the writer and reader, that we can output and input BCF without problems
+     */
+    @Test
+    public void testWriteAndReadBCF() throws IOException {
+        final File bcfOutputFile = File.createTempFile("testWriteAndReadVCF.", ".bcf", tempDir);
+        bcfOutputFile.deleteOnExit();
+        final VCFHeader header = createFakeHeader();
+        try (final VariantContextWriter writer = new VariantContextWriterBuilder()
+                .setOutputFile(bcfOutputFile).setReferenceDictionary(header.getSequenceDictionary())
+                .unsetOption(Options.INDEX_ON_THE_FLY)
+                .build()) {
+            writer.writeHeader(header);
+            writer.add(createVC(header));
+            writer.add(createVC(header));
+        }
+        VariantContextTestProvider.VariantContextContainer container = VariantContextTestProvider
+                .readAllVCs(bcfOutputFile, new BCF2Codec());
+        int counter = 0;
+        final Iterator<VariantContext> it = container.getVCs().iterator();
+        while (it.hasNext()) {
+            it.next();
+            counter++;
+        }
+        Assert.assertEquals(counter, 2);
+
+    }
+
+
+    /**
+     * test, with index-on-the-fly option, that we can output and input BCF without problems
+     */
+    @Test
+    public void testWriteAndReadBCFWithIndex() throws IOException {
+        final File bcfOutputFile = File.createTempFile("testWriteAndReadVCF.", ".bcf", tempDir);
+        bcfOutputFile.deleteOnExit();
+        Tribble.indexFile(bcfOutputFile).deleteOnExit();
+        final VCFHeader header = createFakeHeader();
+        try (final VariantContextWriter writer = new VariantContextWriterBuilder()
+                .setOutputFile(bcfOutputFile).setReferenceDictionary(header.getSequenceDictionary())
+                .setOptions(EnumSet.of(Options.INDEX_ON_THE_FLY))
+                .build()) {
+            writer.writeHeader(header);
+            writer.add(createVC(header));
+            writer.add(createVC(header));
+        }
+        VariantContextTestProvider.VariantContextContainer container = VariantContextTestProvider
+                .readAllVCs(bcfOutputFile, new BCF2Codec());
+        int counter = 0;
+        final Iterator<VariantContext> it = container.getVCs().iterator();
+        while (it.hasNext()) {
+            it.next();
+            counter++;
+        }
+        Assert.assertEquals(counter, 2);
+    }
+
+    /**
+     * test, using the writer and reader, that we can output and input a BCF body without header
+     */
+    @Test
+    public void testWriteAndReadBCFHeaderless() throws IOException {
+        final File bcfOutputFile = File.createTempFile("testWriteAndReadBCFWithHeader.", ".bcf", tempDir);
+        bcfOutputFile.deleteOnExit();
+        final File bcfOutputHeaderlessFile = File.createTempFile("testWriteAndReadBCFHeaderless.", ".bcf", tempDir);
+        bcfOutputHeaderlessFile.deleteOnExit();
+
+        final VCFHeader header = createFakeHeader();
+        // we write two files, bcfOutputFile with the header, and bcfOutputHeaderlessFile with just the body
+        try (final VariantContextWriter fakeBCFFileWriter = new VariantContextWriterBuilder()
+                .setOutputFile(bcfOutputFile).setReferenceDictionary(header.getSequenceDictionary())
+                .unsetOption(Options.INDEX_ON_THE_FLY)
+                .build()) {
+            fakeBCFFileWriter.writeHeader(header); // writes header
+        }
+
+        try (final VariantContextWriter fakeBCFBodyFileWriter = new VariantContextWriterBuilder()
+                .setOutputFile(bcfOutputHeaderlessFile).setReferenceDictionary(header.getSequenceDictionary())
+                .unsetOption(Options.INDEX_ON_THE_FLY)
+                .build()) {
+            fakeBCFBodyFileWriter.setHeader(header); // does not write header
+            fakeBCFBodyFileWriter.add(createVC(header));
+            fakeBCFBodyFileWriter.add(createVC(header));
+        }
+
+        VariantContextTestProvider.VariantContextContainer container;
+
+        try (final PositionalBufferedStream headerPbs = new PositionalBufferedStream(new FileInputStream(bcfOutputFile));
+        final PositionalBufferedStream bodyPbs = new PositionalBufferedStream(new FileInputStream(bcfOutputHeaderlessFile))) {
+
+            BCF2Codec codec = new BCF2Codec();
+            codec.readHeader(headerPbs);
+            // we use the header information read from identical file with header+body to read just the body of second file
+
+            int counter = 0;
+            while (!bodyPbs.isDone()) {
+                VariantContext vc = codec.decode(bodyPbs);
+                counter++;
+            }
+            Assert.assertEquals(counter, 2);
+        }
+
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testWriteHeaderTwice() throws IOException {
+        final File bcfOutputFile = File.createTempFile("testWriteAndReadVCF.", ".bcf", tempDir);
+        bcfOutputFile.deleteOnExit();
+
+        final VCFHeader header = createFakeHeader();
+        // prevent writing header twice
+        try (final VariantContextWriter writer = new VariantContextWriterBuilder()
+                .setOutputFile(bcfOutputFile).setReferenceDictionary(header.getSequenceDictionary())
+                .unsetOption(Options.INDEX_ON_THE_FLY)
+                .build()) {
+            writer.writeHeader(header);
+            writer.writeHeader(header);
+        }
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testChangeHeaderAfterWritingHeader() throws IOException {
+        final File bcfOutputFile = File.createTempFile("testWriteAndReadVCF.", ".bcf", tempDir);
+        bcfOutputFile.deleteOnExit();
+
+        final VCFHeader header = createFakeHeader();
+        // prevent changing header if it's already written
+        try (final VariantContextWriter writer = new VariantContextWriterBuilder()
+                .setOutputFile(bcfOutputFile).setReferenceDictionary(header.getSequenceDictionary())
+                .unsetOption(Options.INDEX_ON_THE_FLY)
+                .build()) {
+            writer.writeHeader(header);
+            writer.setHeader(header);
+        }
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testChangeHeaderAfterWritingBody() throws IOException {
+        final File bcfOutputFile = File.createTempFile("testWriteAndReadVCF.", ".bcf", tempDir);
+        bcfOutputFile.deleteOnExit();
+
+        final VCFHeader header = createFakeHeader();
+        // prevent changing header if part of body is already written
+        try (final VariantContextWriter writer = new VariantContextWriterBuilder()
+                .setOutputFile(bcfOutputFile).setReferenceDictionary(header.getSequenceDictionary())
+                .unsetOption(Options.INDEX_ON_THE_FLY)
+                .build()) {
+            writer.setHeader(header);
+            writer.add(createVC(header));
+            writer.setHeader(header);
+        }
+    }
+
+    /**
+     * create a fake VCF record
+     *
+     * @param header the VCF header
+     * @return a VCFRecord
+     */
+    private VariantContext createVC(final VCFHeader header) {
+        final List<Allele> alleles = new ArrayList<>();
+        final Map<String, Object> attributes = new HashMap<>();
+        final GenotypesContext genotypes = GenotypesContext.create(header.getGenotypeSamples().size());
+
+        alleles.add(Allele.create("A", true));
+        alleles.add(Allele.create("ACC", false));
+
+        attributes.put("DP", "50");
+        for (final String name : header.getGenotypeSamples()) {
+            final Genotype gt = new GenotypeBuilder(name, alleles.subList(1, 2)).GQ(0).attribute("BB", "1").phased(true)
+                    .make();
+            genotypes.add(gt);
+        }
+        return new VariantContextBuilder("RANDOM", "1", 1, 1, alleles)
+                .genotypes(genotypes).attributes(attributes).make();
+    }
+
+
+}
+
diff --git a/src/test/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java b/src/test/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java
index 9fb13e8..af32411 100644
--- a/src/test/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java
+++ b/src/test/java/htsjdk/variant/utils/SAMSequenceDictionaryExtractorTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.variant.utils;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.samtools.SAMSequenceDictionary;
 import htsjdk.samtools.util.SequenceUtil;
 import org.testng.annotations.DataProvider;
@@ -34,7 +35,7 @@ import java.io.File;
 /**
  * @author farjoun on 4/9/14.
  */
-public class SAMSequenceDictionaryExtractorTest {
+public class SAMSequenceDictionaryExtractorTest extends HtsjdkTest {
     String path = "src/test/resources/htsjdk/variant/utils/SamSequenceDictionaryExtractor/";
 
     @DataProvider(name = "testExtractDictionaries")
diff --git a/src/test/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java b/src/test/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java
index 613dec5..b847659 100644
--- a/src/test/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java
+++ b/src/test/java/htsjdk/variant/variantcontext/VariantContextTestProvider.java
@@ -25,6 +25,7 @@
 
 package htsjdk.variant.variantcontext;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.FeatureCodec;
 import htsjdk.tribble.FeatureCodecHeader;
 import htsjdk.tribble.Tribble;
@@ -69,7 +70,7 @@ import java.util.TreeSet;
  * @author Your Name
  * @since Date created
  */
-public class VariantContextTestProvider {
+public class VariantContextTestProvider extends HtsjdkTest {
     final private static boolean ENABLE_GENOTYPE_TESTS = true;
     final private static boolean ENABLE_A_AND_G_TESTS = true;
     final private static boolean ENABLE_VARARRAY_TESTS = true;
@@ -1011,4 +1012,4 @@ public class VariantContextTestProvider {
             throw new RuntimeException(e);
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java
index 14056f8..3d68515 100644
--- a/src/test/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/VariantContextUnitTest.java
@@ -178,8 +178,8 @@ public class VariantContextUnitTest extends VariantBaseTest {
         final List<Allele> allelesUnnaturalOrder = Arrays.asList(Aref, T, C);
         VariantContext naturalVC = snpBuilder.alleles(allelesNaturalOrder).make();
         VariantContext unnaturalVC = snpBuilder.alleles(allelesUnnaturalOrder).make();
-        Assert.assertEquals(new ArrayList<Allele>(naturalVC.getAlleles()), allelesNaturalOrder);
-        Assert.assertEquals(new ArrayList<Allele>(unnaturalVC.getAlleles()), allelesUnnaturalOrder);
+        Assert.assertEquals(new ArrayList<>(naturalVC.getAlleles()), allelesNaturalOrder);
+        Assert.assertEquals(new ArrayList<>(unnaturalVC.getAlleles()), allelesUnnaturalOrder);
     }
 
     @Test
@@ -371,7 +371,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
 
     @Test (expectedExceptions = Throwable.class)
     public void testBadConstructorArgs4() {
-        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Collections.<Allele>emptyList()).make();
+        new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, Collections.emptyList()).make();
     }
 
     @Test (expectedExceptions = Exception.class)
@@ -528,7 +528,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         Assert.assertTrue(vc.filtersWereApplied());
         Assert.assertNotNull(vc.getFiltersMaybeNull());
 
-        Set<String> filters = new HashSet<String>(Arrays.asList("BAD_SNP_BAD!", "REALLY_BAD_SNP", "CHRIST_THIS_IS_TERRIBLE"));
+        Set<String> filters = new HashSet<>(Arrays.asList("BAD_SNP_BAD!", "REALLY_BAD_SNP", "CHRIST_THIS_IS_TERRIBLE"));
         vc = new VariantContextBuilder(vc).filters(filters).make();
 
         Assert.assertFalse(vc.isNotFiltered());
@@ -570,12 +570,16 @@ public class VariantContextUnitTest extends VariantBaseTest {
         Genotype g5 = GenotypeBuilder.create("AC", Arrays.asList(Aref, C));
         VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, alleles).genotypes(g1,g2,g3,g4,g5).make();
 
-        VariantContext vc12 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g2.getSampleName())), true);
-        VariantContext vc1 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName())), true);
-        VariantContext vc23 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g2.getSampleName(), g3.getSampleName())), true);
-        VariantContext vc4 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g4.getSampleName())), true);
-        VariantContext vc14 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g4.getSampleName())), true);
-        VariantContext vc125 = vc.subContextFromSamples(new HashSet<String>(Arrays.asList(g1.getSampleName(), g2.getSampleName(), g5.getSampleName())), true);
+        VariantContext vc12 = vc.subContextFromSamples(
+                new HashSet<>(Arrays.asList(g1.getSampleName(), g2.getSampleName())), true);
+        VariantContext vc1 = vc.subContextFromSamples(new HashSet<>(Arrays.asList(g1.getSampleName())), true);
+        VariantContext vc23 = vc.subContextFromSamples(
+                new HashSet<>(Arrays.asList(g2.getSampleName(), g3.getSampleName())), true);
+        VariantContext vc4 = vc.subContextFromSamples(new HashSet<>(Arrays.asList(g4.getSampleName())), true);
+        VariantContext vc14 = vc.subContextFromSamples(
+                new HashSet<>(Arrays.asList(g1.getSampleName(), g4.getSampleName())), true);
+        VariantContext vc125 = vc.subContextFromSamples(
+                new HashSet<>(Arrays.asList(g1.getSampleName(), g2.getSampleName(), g5.getSampleName())), true);
 
         Assert.assertTrue(vc12.isPolymorphicInSamples());
         Assert.assertTrue(vc23.isPolymorphicInSamples());
@@ -676,7 +680,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
 
     @DataProvider(name = "getAlleles")
     public Object[][] mergeAllelesData() {
-        List<Object[]> tests = new ArrayList<Object[]>();
+        List<Object[]> tests = new ArrayList<>();
 
         tests.add(new Object[]{new GetAllelesTest("A*",   Aref)});
         tests.add(new Object[]{new GetAllelesTest("A*/C", Aref, C)});
@@ -747,7 +751,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         VariantContext sites = new VariantContextBuilder("sites", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).make();
         VariantContext genotypes = new VariantContextBuilder(sites).source("genotypes").genotypes(g1, g2, g3).make();
 
-        List<Object[]> tests = new ArrayList<Object[]>();
+        List<Object[]> tests = new ArrayList<>();
 
         tests.add(new Object[]{new SitesAndGenotypesVC("sites", sites)});
         tests.add(new Object[]{new SitesAndGenotypesVC("genotypes", genotypes)});
@@ -822,7 +826,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         boolean updateAlleles;
 
         private SubContextTest(Collection<String> samples, boolean updateAlleles) {
-            this.samples = new HashSet<String>(samples);
+            this.samples = new HashSet<>(samples);
             this.updateAlleles = updateAlleles;
         }
 
@@ -833,10 +837,10 @@ public class VariantContextUnitTest extends VariantBaseTest {
 
     @DataProvider(name = "SubContextTest")
     public Object[][] MakeSubContextTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
+        List<Object[]> tests = new ArrayList<>();
 
         for ( boolean updateAlleles : Arrays.asList(true, false)) {
-            tests.add(new Object[]{new SubContextTest(Collections.<String>emptySet(), updateAlleles)});
+            tests.add(new Object[]{new SubContextTest(Collections.emptySet(), updateAlleles)});
             tests.add(new Object[]{new SubContextTest(Collections.singleton("MISSING"), updateAlleles)});
             tests.add(new Object[]{new SubContextTest(Collections.singleton("AA"), updateAlleles)});
             tests.add(new Object[]{new SubContextTest(Collections.singleton("AT"), updateAlleles)});
@@ -871,7 +875,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         Assert.assertEquals(sub.getID(), vc.getID());
         Assert.assertEquals(sub.getAttributes(), vc.getAttributes());
 
-        Set<Genotype> expectedGenotypes = new HashSet<Genotype>();
+        Set<Genotype> expectedGenotypes = new HashSet<>();
         if ( cfg.samples.contains(g1.getSampleName()) ) expectedGenotypes.add(g1);
         if ( cfg.samples.contains(g2.getSampleName()) ) expectedGenotypes.add(g2);
         if ( cfg.samples.contains(g3.getSampleName()) ) expectedGenotypes.add(g3);
@@ -881,10 +885,10 @@ public class VariantContextUnitTest extends VariantBaseTest {
         // these values depend on the results of sub
         if ( cfg.updateAlleles ) {
             // do the work to see what alleles should be here, and which not
-            List<Allele> expectedAlleles = new ArrayList<Allele>();
+            List<Allele> expectedAlleles = new ArrayList<>();
             expectedAlleles.add(Aref);
 
-            Set<Allele> genotypeAlleles = new HashSet<Allele>();
+            Set<Allele> genotypeAlleles = new HashSet<>();
             for ( final Genotype g : expectedGC )
                 genotypeAlleles.addAll(g.getAlleles());
             genotypeAlleles.remove(Aref);
@@ -925,7 +929,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
 
     @DataProvider(name = "SampleNamesTest")
     public Object[][] MakeSampleNamesTest() {
-        List<Object[]> tests = new ArrayList<Object[]>();
+        List<Object[]> tests = new ArrayList<>();
 
         tests.add(new Object[]{new SampleNamesTest(Arrays.asList("1"), Arrays.asList("1"))});
         tests.add(new Object[]{new SampleNamesTest(Arrays.asList("2", "1"), Arrays.asList("1", "2"))});
@@ -959,7 +963,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).genotypes(gc).make();
 
         // same sample names => success
-        Assert.assertTrue(vc.getSampleNames().equals(new HashSet<String>(cfg.sampleNames)), "vc.getSampleNames() = " + vc.getSampleNames());
+        Assert.assertTrue(vc.getSampleNames().equals(new HashSet<>(cfg.sampleNames)), "vc.getSampleNames() = " + vc.getSampleNames());
         Assert.assertEquals(vc.getSampleNamesOrderedByName(), cfg.sampleNamesInOrder, "vc.getSampleNamesOrderedByName() = " + vc.getSampleNamesOrderedByName());
 
         assertGenotypesAreInOrder(vc.getGenotypesOrderedByName(), cfg.sampleNamesInOrder);
@@ -1147,7 +1151,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
                 fullyDecoded, toValidate);
     }
     private Set<String> makeRsIDsSet(final String... rsIds) {
-        return new HashSet<String>(Arrays.asList(rsIds));
+        return new HashSet<>(Arrays.asList(rsIds));
     }
 
 
@@ -1226,14 +1230,14 @@ public class VariantContextUnitTest extends VariantBaseTest {
 
         /** AN : total number of alleles in called genotypes **/
         // with AN set and hom-ref, we expect AN to be 2 for Aref/Aref
-        final Map<String, Object> attributesAN = new HashMap<String, Object>();
+        final Map<String, Object> attributesAN = new HashMap<>();
         attributesAN.put(VCFConstants.ALLELE_NUMBER_KEY, "2");
         final VariantContext vcANSet =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesAN, homRef);
 
         // with AN set, one no-call (no-calls get ignored by getCalledChrCount() in VariantContext)
         // we expect AN to be 1 for Aref/no-call
-        final Map<String, Object> attributesANNoCall = new HashMap<String, Object>();
+        final Map<String, Object> attributesANNoCall = new HashMap<>();
         attributesANNoCall.put(VCFConstants.ALLELE_NUMBER_KEY, "1");
         final VariantContext vcANSetNoCall =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesANNoCall, homRefNoCall);
@@ -1241,42 +1245,42 @@ public class VariantContextUnitTest extends VariantBaseTest {
 
         /** AC : allele count in genotypes, for each ALT allele, in the same order as listed **/
         // with AC set, and T/T, we expect AC to be 2 (for 2 counts of ALT T)
-        final Map<String, Object> attributesAC = new HashMap<String, Object>();
+        final Map<String, Object> attributesAC = new HashMap<>();
         attributesAC.put(VCFConstants.ALLELE_COUNT_KEY, "2");
         final VariantContext vcACSet =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref, T), attributesAC, homVarT);
 
         // with AC set and no ALT (GT is 0/0), we expect AC count to be 0
-        final Map<String, Object> attributesACNoAlts = new HashMap<String, Object>();
+        final Map<String, Object> attributesACNoAlts = new HashMap<>();
         attributesACNoAlts.put(VCFConstants.ALLELE_COUNT_KEY, "0");
         final VariantContext vcACSetNoAlts =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesACNoAlts, homRef);
 
         // with AC set, and two different ALTs (T and C), with GT of 1/2, we expect a count of 1 for each.
         // With two ALTs, a list is expected, so we set the attribute as a list of 1,1
-        final Map<String, Object> attributesACTwoAlts = new HashMap<String, Object>();
+        final Map<String, Object> attributesACTwoAlts = new HashMap<>();
         attributesACTwoAlts.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1", "1"));
         final VariantContext vcACSetTwoAlts =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAlts, hetVarTC);
 
         // with AC set, and two different ALTs (T and C), with no GT, we expect a 2 count values.
-        final Map<String, Object> attributesACNoGtTwoAlts = new HashMap<String, Object>();
+        final Map<String, Object> attributesACNoGtTwoAlts = new HashMap<>();
         attributesACNoGtTwoAlts.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1", "1"));
         final VariantContext vcACNoGtSetTwoAlts =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACNoGtTwoAlts, null);
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACNoGtTwoAlts, (Genotype[]) null);
 
         // with AF set, and two different ALTs (T and C), with GT of 1/2, we expect two frequncy values.
         // With two ALTs, a list is expected, so we set the attribute as a list of 0.5,0.5
-        final Map<String, Object> attributesAFTwoAlts = new HashMap<String, Object>();
+        final Map<String, Object> attributesAFTwoAlts = new HashMap<>();
         attributesAFTwoAlts.put(VCFConstants.ALLELE_FREQUENCY_KEY, Arrays.asList("0.5", "0.5"));
         final VariantContext vcAFSetTwoAlts =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesAFTwoAlts, hetVarTC);
 
         // with AF set, and two different ALTs (T and C), with no GT, we expect two frequency values.
-        final Map<String, Object> attributesAFNoGtTwoAlts = new HashMap<String, Object>();
+        final Map<String, Object> attributesAFNoGtTwoAlts = new HashMap<>();
         attributesAFNoGtTwoAlts.put(VCFConstants.ALLELE_FREQUENCY_KEY, Arrays.asList("0.5", "0.5"));
         final VariantContext vcAFNoGtSetTwoAlts =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesAFNoGtTwoAlts, null);
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesAFNoGtTwoAlts, (Genotype[]) null);
 
         return new Object[][]{
                 {vcNoGenotypes},
@@ -1284,6 +1288,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
                 {vcANSetNoCall},
                 {vcACSet},
                 {vcACSetNoAlts},
+                {vcACSetTwoAlts},
                 {vcACNoGtSetTwoAlts},
                 {vcAFSetTwoAlts},
                 {vcAFNoGtSetTwoAlts}
@@ -1303,60 +1308,60 @@ public class VariantContextUnitTest extends VariantBaseTest {
 
         /** AN : total number of alleles in called genotypes **/
         // with AN set and hom-ref, we expect AN to be 2 for Aref/Aref, so 3 will fail
-        final Map<String, Object> attributesAN = new HashMap<String, Object>();
+        final Map<String, Object> attributesAN = new HashMap<>();
         attributesAN.put(VCFConstants.ALLELE_NUMBER_KEY, "3");
         final VariantContext vcANSet =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesAN, homRef);
 
         // with AN set, one no-call (no-calls get ignored by getCalledChrCount() in VariantContext)
         // we expect AN to be 1 for Aref/no-call, so 2 will fail
-        final Map<String, Object> attributesANNoCall = new HashMap<String, Object>();
+        final Map<String, Object> attributesANNoCall = new HashMap<>();
         attributesANNoCall.put(VCFConstants.ALLELE_NUMBER_KEY, "2");
         final VariantContext vcANSetNoCall =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesANNoCall, homRefNoCall);
 
         /** AC : allele count in genotypes, for each ALT allele, in the same order as listed **/
         // with AC set but no ALTs, we expect a count of 0, so the wrong count will fail here
-        final Map<String, Object> attributesACWrongCount = new HashMap<String, Object>();
+        final Map<String, Object> attributesACWrongCount = new HashMap<>();
         attributesACWrongCount.put(VCFConstants.ALLELE_COUNT_KEY, "2");
         final VariantContext vcACWrongCount =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref), attributesACWrongCount, homRef);
 
         // with AC set, two ALTs, but AC is not a list with count for each ALT
-        final Map<String, Object> attributesACTwoAlts = new HashMap<String, Object>();
+        final Map<String, Object> attributesACTwoAlts = new HashMap<>();
         attributesACTwoAlts.put(VCFConstants.ALLELE_COUNT_KEY, "1");
         final VariantContext vcACSetTwoAlts =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAlts, hetVarTC);
 
         // with AC set, two ALTs, and a list is correctly used, but wrong counts (we expect counts to be 1,1)
-        final Map<String, Object> attributesACTwoAltsWrongCount = new HashMap<String, Object>();
+        final Map<String, Object> attributesACTwoAltsWrongCount = new HashMap<>();
         attributesACTwoAltsWrongCount.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1", "2"));
         final VariantContext vcACSetTwoAltsWrongCount =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAltsWrongCount, hetVarTC);
 
         // with AC set, two ALTs, but only count for one ALT (we expect two items in the list: 1,1)
-        final Map<String, Object> attributesACTwoAltsOneAltCount = new HashMap<String, Object>();
+        final Map<String, Object> attributesACTwoAltsOneAltCount = new HashMap<>();
         attributesACTwoAltsOneAltCount.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1"));
         final VariantContext vcACSetTwoAltsOneAltCount =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACTwoAltsOneAltCount, hetVarTC);
 
         // with AC set, no GT, two ALTs, but only count for one ALT (we expect two items in the list: 1,1)
-        final Map<String, Object> attributesACNoGtTwoAltsOneAltCount = new HashMap<String, Object>();
+        final Map<String, Object> attributesACNoGtTwoAltsOneAltCount = new HashMap<>();
         attributesACNoGtTwoAltsOneAltCount.put(VCFConstants.ALLELE_COUNT_KEY, Arrays.asList("1"));
         final VariantContext vcACNoGtSetTwoAltsOneAltCount =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACNoGtTwoAltsOneAltCount, null);
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesACNoGtTwoAltsOneAltCount, (Genotype[])null);
 
         // with AF set, two ALTs, but only frequency for one ALT (we expect two items in the list
-        final Map<String, Object> attributesAFTwoAltsWrongFreq = new HashMap<String, Object>();
+        final Map<String, Object> attributesAFTwoAltsWrongFreq = new HashMap<>();
         attributesAFTwoAltsWrongFreq.put(VCFConstants.ALLELE_FREQUENCY_KEY, Arrays.asList("0.5"));
         final VariantContext vcAFSetTwoAltsWrongFreq =
                 createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesAFTwoAltsWrongFreq, hetVarTC);
 
         // with AF set, no GT, two ALTs, but only frequency for one ALT (we expect two items in the list
-        final Map<String, Object> attributesAFNoGtTwoAltsWrongCount = new HashMap<String, Object>();
+        final Map<String, Object> attributesAFNoGtTwoAltsWrongCount = new HashMap<>();
         attributesAFNoGtTwoAltsWrongCount.put(VCFConstants.ALLELE_FREQUENCY_KEY, Arrays.asList("0.5"));
         final VariantContext vcAFNoGtSetTwoAltsWrongFreq =
-                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesAFNoGtTwoAltsWrongCount, null);
+                createValidateChromosomeCountsContext(Arrays.asList(Aref, T, C), attributesAFNoGtTwoAltsWrongCount, (Genotype[])null);
 
         return new Object[][]{
                 {vcANSet},
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/CompoundFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/CompoundFilterTest.java
index 0a49853..efa788e 100644
--- a/src/test/java/htsjdk/variant/variantcontext/filter/CompoundFilterTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/CompoundFilterTest.java
@@ -1,5 +1,6 @@
 package htsjdk.variant.variantcontext.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.VariantContext;
 import htsjdk.variant.variantcontext.VariantContextBuilder;
@@ -15,7 +16,7 @@ import java.util.List;
 /**
  * Created by farjoun on 9/9/15.
  */
-public class CompoundFilterTest {
+public class CompoundFilterTest extends HtsjdkTest {
 
     static AllPassFilter pass = new AllPassFilter();
     static AllFailFilter fail = new AllFailFilter();
@@ -75,4 +76,4 @@ public class CompoundFilterTest {
     public void testCompoundFilter(final VariantContextFilter filter, final boolean shouldPass) {
         Assert.assertEquals(filter.test(vc), shouldPass, filter.toString());
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java
index d8decfd..eeb2213 100644
--- a/src/test/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/FilteringVariantContextIteratorTest.java
@@ -24,6 +24,7 @@
 
 package htsjdk.variant.variantcontext.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.variant.variantcontext.VariantContext;
 import htsjdk.variant.vcf.VCFFileReader;
 import org.testng.Assert;
@@ -36,7 +37,7 @@ import java.io.File;
  * Tests for testing the (VariantContext)FilteringVariantContextIterator, and the HeterozygosityFilter
  */
 
-public class FilteringVariantContextIteratorTest {
+public class FilteringVariantContextIteratorTest extends HtsjdkTest {
     final File testDir = new File("src/test/resources/htsjdk/variant");
 
     @DataProvider
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilterTest.java
index 809133f..a615f81 100644
--- a/src/test/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilterTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/GenotypeQualityFilterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.variant.variantcontext.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.GenotypeBuilder;
 import htsjdk.variant.variantcontext.VariantContext;
@@ -37,7 +38,7 @@ import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 
-public class GenotypeQualityFilterTest {
+public class GenotypeQualityFilterTest extends HtsjdkTest {
 
     Allele refA = Allele.create("A", true);
     Allele G = Allele.create("G", false);
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilterTest.java
index b4cd3a8..e2e9881 100644
--- a/src/test/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilterTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/HeterozygosityFilterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.variant.variantcontext.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.GenotypeBuilder;
 import htsjdk.variant.variantcontext.VariantContext;
@@ -37,7 +38,7 @@ import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 
-public class HeterozygosityFilterTest {
+public class HeterozygosityFilterTest extends HtsjdkTest {
 
     Allele refA = Allele.create("A", true);
     Allele G = Allele.create("G", false);
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java
index 3993b79..7fb98c3 100644
--- a/src/test/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/JavascriptVariantFilterTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.variant.variantcontext.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.variant.vcf.VCFFileReader;
 
 import org.testng.Assert;
@@ -36,7 +37,7 @@ import java.io.IOException;
  * @author Pierre Lindenbaum PhD Institut du Thorax - INSERM - Nantes - France
  */
 
-public class JavascriptVariantFilterTest {
+public class JavascriptVariantFilterTest extends HtsjdkTest {
     final File testDir = new File("src/test/resources/htsjdk/variant");
 
     @DataProvider
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/PassingVariantFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/PassingVariantFilterTest.java
index 3cbb60c..da28264 100644
--- a/src/test/java/htsjdk/variant/variantcontext/filter/PassingVariantFilterTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/PassingVariantFilterTest.java
@@ -1,5 +1,6 @@
 package htsjdk.variant.variantcontext.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.VariantContext;
 import htsjdk.variant.variantcontext.VariantContextBuilder;
@@ -16,7 +17,7 @@ import java.util.List;
 /**
  * Created by farjoun on 9/10/15.
  */
-public class PassingVariantFilterTest {
+public class PassingVariantFilterTest extends HtsjdkTest {
     Allele refA = Allele.create("A", true);
     Allele G = Allele.create("G", false);
 
@@ -43,4 +44,4 @@ public class PassingVariantFilterTest {
 
         Assert.assertEquals(passingVariantFilter.test(vc), shouldPass, vc.toString());
     }
-}
\ No newline at end of file
+}
diff --git a/src/test/java/htsjdk/variant/variantcontext/filter/SnpFilterTest.java b/src/test/java/htsjdk/variant/variantcontext/filter/SnpFilterTest.java
index 74f1bb5..e091ca0 100644
--- a/src/test/java/htsjdk/variant/variantcontext/filter/SnpFilterTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/filter/SnpFilterTest.java
@@ -1,5 +1,6 @@
 package htsjdk.variant.variantcontext.filter;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.VariantContext;
 import htsjdk.variant.variantcontext.VariantContextBuilder;
@@ -16,7 +17,7 @@ import java.util.List;
 /**
  * Created by farjoun on 9/9/15.
  */
-public class SnpFilterTest {
+public class SnpFilterTest extends HtsjdkTest {
     Allele refA = Allele.create("A", true);
     Allele refAG = Allele.create("AG", true);
 
diff --git a/src/test/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriterUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriterUnitTest.java
new file mode 100644
index 0000000..eddb49c
--- /dev/null
+++ b/src/test/java/htsjdk/variant/variantcontext/writer/AsyncVariantContextWriterUnitTest.java
@@ -0,0 +1,146 @@
+/*
+* Copyright (c) 2017 The Broad Institute
+* 
+* Permission is hereby granted, free of charge, to any person
+* obtaining a copy of this software and associated documentation
+* files (the "Software"), to deal in the Software without
+* restriction, including without limitation the rights to use,
+* copy, modify, merge, publish, distribute, sublicense, and/or sell
+* copies of the Software, and to permit persons to whom the
+* Software is furnished to do so, subject to the following
+* conditions:
+* 
+* The above copyright notice and this permission notice shall be
+* included in all copies or substantial portions of the Software.
+* 
+* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
+* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+
+package htsjdk.variant.variantcontext.writer;
+
+import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.TestUtil;
+import htsjdk.tribble.Tribble;
+import htsjdk.tribble.readers.AsciiLineReader;
+import htsjdk.tribble.readers.AsciiLineReaderIterator;
+import htsjdk.variant.VariantBaseTest;
+import htsjdk.variant.variantcontext.Allele;
+import htsjdk.variant.variantcontext.Genotype;
+import htsjdk.variant.variantcontext.GenotypeBuilder;
+import htsjdk.variant.variantcontext.GenotypesContext;
+import htsjdk.variant.variantcontext.VariantContext;
+import htsjdk.variant.variantcontext.VariantContextBuilder;
+import htsjdk.variant.vcf.VCFCodec;
+import htsjdk.variant.vcf.VCFHeader;
+import htsjdk.variant.vcf.VCFHeaderLine;
+import htsjdk.variant.vcf.VCFHeaderVersion;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * @author amila
+ *         <p/>
+ *         Class AsyncVariantContextWriterUnitTest
+ *         <p/>
+ *         This class tests out the ability of the VCF writer to correctly write VCF files with Asynchronous IO
+ */
+public class AsyncVariantContextWriterUnitTest extends VariantBaseTest {
+
+    @BeforeClass
+    private void createTemporaryDirectory() {
+        File tempDir = TestUtil.getTempDirectory("VCFWriter", "StaleIndex");
+        tempDir.deleteOnExit();
+    }
+
+    /** test, using the writer and reader, that we can output and input a VCF body without problems */
+    @Test
+    public void testWriteAndReadAsyncVCFHeaderless() throws IOException {
+        final File fakeVCFFile = VariantBaseTest.createTempFile("testWriteAndReadAsyncVCFHeaderless.", ".vcf");
+        fakeVCFFile.deleteOnExit();
+
+        Tribble.indexFile(fakeVCFFile).deleteOnExit();
+        final Set<VCFHeaderLine> metaData = new HashSet<>();
+        final Set<String> additionalColumns = new HashSet<>();
+        final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
+        final VCFHeader header = createFakeHeader(metaData, additionalColumns, sequenceDict);
+        try (final VariantContextWriter writer = new VariantContextWriterBuilder()
+                .setOutputFile(fakeVCFFile).setReferenceDictionary(sequenceDict)
+                .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY, Options.USE_ASYNC_IO))
+                .build()) {
+            writer.setHeader(header);
+            writer.add(createVC(header));
+            writer.add(createVC(header));
+        }
+        final VCFCodec codec = new VCFCodec();
+        codec.setVCFHeader(header, VCFHeaderVersion.VCF4_2);
+
+        try (final FileInputStream fis = new FileInputStream(fakeVCFFile)) {
+            final AsciiLineReaderIterator iterator = new AsciiLineReaderIterator(new AsciiLineReader(fis));
+            int counter = 0;
+            while (iterator.hasNext()) {
+                VariantContext context = codec.decode(iterator.next());
+                counter++;
+            }
+            Assert.assertEquals(counter, 2);
+        }
+    }
+
+    /**
+     * create a fake header of known quantity
+     * @param metaData           the header lines
+     * @param additionalColumns  the additional column names
+     * @return a fake VCF header
+     */
+    public static VCFHeader createFakeHeader(final Set<VCFHeaderLine> metaData, final Set<String> additionalColumns,
+                                             final SAMSequenceDictionary sequenceDict) {
+        metaData.add(new VCFHeaderLine("two", "2"));
+        additionalColumns.add("extra1");
+        additionalColumns.add("extra2");
+        final VCFHeader ret = new VCFHeader(metaData, additionalColumns);
+        ret.setSequenceDictionary(sequenceDict);
+        return ret;
+    }
+
+    /**
+     * create a fake VCF record
+     * @param header the VCF header
+     * @return a VCFRecord
+     */
+    private VariantContext createVC(final VCFHeader header) {
+
+        final List<Allele> alleles = new ArrayList<Allele>();
+        final Map<String, Object> attributes = new HashMap<String,Object>();
+        final GenotypesContext genotypes = GenotypesContext.create(header.getGenotypeSamples().size());
+
+        alleles.add(Allele.create("A",true));
+        alleles.add(Allele.create("ACC",false));
+
+        attributes.put("DP","50");
+        for (final String name : header.getGenotypeSamples()) {
+            final Genotype gt = new GenotypeBuilder(name,alleles.subList(1,2)).GQ(0).attribute("BB", "1").phased(true).make();
+            genotypes.add(gt);
+        }
+        return new VariantContextBuilder("RANDOM", "1", 1, 1, alleles)
+                .genotypes(genotypes).attributes(attributes).make();
+    }
+}
+
diff --git a/src/test/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java b/src/test/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java
index 2fd1520..f8c8fd1 100644
--- a/src/test/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/writer/TabixOnTheFlyIndexCreationTest.java
@@ -23,6 +23,7 @@
  */
 package htsjdk.variant.variantcontext.writer;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.AbstractFeatureReader;
 import htsjdk.tribble.CloseableTribbleIterator;
 import htsjdk.tribble.FeatureReader;
@@ -36,7 +37,7 @@ import org.testng.annotations.Test;
 import java.io.File;
 import java.util.EnumSet;
 
-public class TabixOnTheFlyIndexCreationTest {
+public class TabixOnTheFlyIndexCreationTest extends HtsjdkTest {
     private static final File SMALL_VCF = new File("src/test/resources/htsjdk/tribble/tabix/trioDup.vcf.gz");
     @Test
     public void simpleTest() throws Exception {
diff --git a/src/test/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java
index eaf2f95..f705bb7 100644
--- a/src/test/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/writer/VCFWriterUnitTest.java
@@ -26,10 +26,13 @@
 package htsjdk.variant.variantcontext.writer;
 
 import htsjdk.samtools.SAMSequenceDictionary;
+import htsjdk.samtools.util.BlockCompressedInputStream;
 import htsjdk.samtools.util.TestUtil;
 import htsjdk.tribble.AbstractFeatureReader;
 import htsjdk.tribble.FeatureReader;
 import htsjdk.tribble.Tribble;
+import htsjdk.tribble.readers.AsciiLineReader;
+import htsjdk.tribble.readers.AsciiLineReaderIterator;
 import htsjdk.tribble.util.TabixUtils;
 import htsjdk.variant.VariantBaseTest;
 import htsjdk.variant.variantcontext.Allele;
@@ -45,6 +48,7 @@ import htsjdk.variant.vcf.VCFHeaderLine;
 import htsjdk.variant.vcf.VCFHeaderVersion;
 
 import java.io.File;
+import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -57,7 +61,6 @@ import java.util.Map;
 import java.util.Set;
 
 import org.testng.Assert;
-import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -77,28 +80,22 @@ public class VCFWriterUnitTest extends VariantBaseTest {
     @BeforeClass
     private void createTemporaryDirectory() {
         tempDir = TestUtil.getTempDirectory("VCFWriter", "StaleIndex");
+        tempDir.deleteOnExit();
     }
 
-    @AfterClass
-    private void deleteTemporaryDirectory() {
-        for (File f : tempDir.listFiles()) {
-            f.delete();
-        }
-        tempDir.delete();
-    }
 
     /** test, using the writer and reader, that we can output and input a VCF file without problems */
     @Test(dataProvider = "vcfExtensionsDataProvider")
     public void testBasicWriteAndRead(final String extension) throws IOException {
-        final File fakeVCFFile = File.createTempFile("testBasicWriteAndRead.", extension);
+        final File fakeVCFFile = File.createTempFile("testBasicWriteAndRead.", extension, tempDir);
         fakeVCFFile.deleteOnExit();
         if (".vcf.gz".equals(extension)) {
-            new File(fakeVCFFile.getAbsolutePath() + ".tbi").deleteOnExit();
+            new File(fakeVCFFile.getAbsolutePath() + ".tbi");
         } else {
             Tribble.indexFile(fakeVCFFile).deleteOnExit();
         }
-        metaData = new HashSet<VCFHeaderLine>();
-        additionalColumns = new HashSet<String>();
+        metaData = new HashSet<>();
+        additionalColumns = new HashSet<>();
         final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
         final VCFHeader header = createFakeHeader(metaData, additionalColumns, sequenceDict);
         final VariantContextWriter writer = new VariantContextWriterBuilder()
@@ -118,7 +115,7 @@ public class VCFWriterUnitTest extends VariantBaseTest {
 
         // validate what we're reading in
         validateHeader(headerFromFile, sequenceDict);
-        
+
         try {
             final Iterator<VariantContext> it = reader.iterator();
             while(it.hasNext()) {
@@ -133,13 +130,102 @@ public class VCFWriterUnitTest extends VariantBaseTest {
 
     }
 
+    /** test, using the writer and reader, that we can output and input a VCF body without problems */
+    @Test(dataProvider = "vcfExtensionsDataProvider")
+    public void testWriteAndReadVCFHeaderless(final String extension) throws IOException {
+        final File fakeVCFFile = File.createTempFile("testWriteAndReadVCFHeaderless.", extension, tempDir);
+        fakeVCFFile.deleteOnExit();
+        if (".vcf.gz".equals(extension)) {
+            new File(fakeVCFFile.getAbsolutePath() + ".tbi");
+        } else {
+            Tribble.indexFile(fakeVCFFile).deleteOnExit();
+        }
+        metaData = new HashSet<>();
+        additionalColumns = new HashSet<>();
+        final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
+        final VCFHeader header = createFakeHeader(metaData, additionalColumns, sequenceDict);
+        try (final VariantContextWriter writer = new VariantContextWriterBuilder()
+                .setOutputFile(fakeVCFFile).setReferenceDictionary(sequenceDict)
+                .setOptions(EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER, Options.INDEX_ON_THE_FLY))
+                .build()) {
+            writer.setHeader(header);
+            writer.add(createVC(header));
+            writer.add(createVC(header));
+        }
+        final VCFCodec codec = new VCFCodec();
+        codec.setVCFHeader(header, VCFHeaderVersion.VCF4_2);
+
+        try (BlockCompressedInputStream bcis = new BlockCompressedInputStream(fakeVCFFile);
+                FileInputStream fis = new FileInputStream(fakeVCFFile)) {
+            AsciiLineReaderIterator iterator =
+                    new AsciiLineReaderIterator(new AsciiLineReader(".vcf.gz".equals(extension) ? bcis : fis));
+            int counter = 0;
+            while (iterator.hasNext()) {
+                VariantContext context = codec.decode(iterator.next());
+                counter++;
+            }
+            Assert.assertEquals(counter, 2);
+        }
+
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testWriteHeaderTwice() {
+        final File fakeVCFFile = VariantBaseTest.createTempFile("testBasicWriteAndRead.", ".vcf");
+        fakeVCFFile.deleteOnExit();
+        final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
+        final VCFHeader header = createFakeHeader(metaData, additionalColumns, sequenceDict);
+        // prevent writing header twice
+        try (final VariantContextWriter writer1 = new VariantContextWriterBuilder()
+                .setOutputFile(fakeVCFFile)
+                .setReferenceDictionary(sequenceDict)
+                .build()) {
+            writer1.writeHeader(header);
+            writer1.writeHeader(header);
+        }
+    }
+
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testChangeHeaderAfterWritingHeader() {
+        final File fakeVCFFile = VariantBaseTest.createTempFile("testBasicWriteAndRead.", ".vcf");
+        fakeVCFFile.deleteOnExit();
+        final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
+        final VCFHeader header = createFakeHeader(metaData, additionalColumns, sequenceDict);
+        // prevent changing header if it's already written
+        try (final VariantContextWriter writer2 = new VariantContextWriterBuilder()
+                .setOutputFile(fakeVCFFile)
+                .setReferenceDictionary(sequenceDict)
+                .build()) {
+            writer2.writeHeader(header);
+            writer2.setHeader(header);
+        }
+    }
+
+    @Test(expectedExceptions = IllegalStateException.class)
+    public void testChangeHeaderAfterWritingBody() {
+        final File fakeVCFFile = VariantBaseTest.createTempFile("testBasicWriteAndRead.", ".vcf");
+        fakeVCFFile.deleteOnExit();
+        final SAMSequenceDictionary sequenceDict = createArtificialSequenceDictionary();
+        final VCFHeader header = createFakeHeader(metaData, additionalColumns, sequenceDict);
+        // prevent changing header if part of body is already written
+        try (final VariantContextWriter writer3 = new VariantContextWriterBuilder()
+                .setOutputFile(fakeVCFFile)
+                .setReferenceDictionary(sequenceDict)
+                .build()) {
+            writer3.setHeader(header);
+            writer3.add(createVC(header));
+            writer3.setHeader(header);
+        }
+    }
+
     /**
      * create a fake header of known quantity
      * @param metaData           the header lines
      * @param additionalColumns  the additional column names
      * @return a fake VCF header
      */
-    public static VCFHeader createFakeHeader(final Set<VCFHeaderLine> metaData, final Set<String> additionalColumns,
+    private static VCFHeader createFakeHeader(final Set<VCFHeaderLine> metaData, final Set<String> additionalColumns,
                                              final SAMSequenceDictionary sequenceDict) {
         metaData.add(new VCFHeaderLine(VCFHeaderVersion.VCF4_0.getFormatString(), VCFHeaderVersion.VCF4_0.getVersionString()));
         metaData.add(new VCFHeaderLine("two", "2"));
@@ -182,7 +268,7 @@ public class VCFWriterUnitTest extends VariantBaseTest {
      * validate a VCF header
      * @param header the header to validate
      */
-    public void validateHeader(final VCFHeader header, final SAMSequenceDictionary sequenceDictionary) {
+    private void validateHeader(final VCFHeader header, final SAMSequenceDictionary sequenceDictionary) {
         // check the fields
         int index = 0;
         for (final VCFHeader.HEADER_FIELDS field : header.getHeaderFields()) {
diff --git a/src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java b/src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java
index 179c4cb..5e33e5c 100644
--- a/src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java
+++ b/src/test/java/htsjdk/variant/variantcontext/writer/VariantContextWriterBuilderUnitTest.java
@@ -396,4 +396,12 @@ public class VariantContextWriterBuilderUnitTest extends VariantBaseTest {
             Assert.assertFalse(builder.isOptionSet(option)); // has been unset
         }
     }
+
+    @Test
+    public void testStdOut() {
+        final VariantContextWriter writer = new VariantContextWriterBuilder().setOutputFile("/dev/stdout").clearOptions().build();
+        OutputStream s = ((VCFWriter) writer).getOutputStream();
+        Assert.assertNotNull(((VCFWriter) writer).getOutputStream());
+        Assert.assertNotEquals(((VCFWriter) writer).getStreamName(), IndexingVariantContextWriter.DEFAULT_READER_NAME);
+    }
 }
diff --git a/src/test/java/htsjdk/variant/vcf/VCFEncoderTest.java b/src/test/java/htsjdk/variant/vcf/VCFEncoderTest.java
index 2c4ff0f..6d4c23b 100644
--- a/src/test/java/htsjdk/variant/vcf/VCFEncoderTest.java
+++ b/src/test/java/htsjdk/variant/vcf/VCFEncoderTest.java
@@ -1,5 +1,6 @@
 package htsjdk.variant.vcf;
 
+import htsjdk.HtsjdkTest;
 import htsjdk.tribble.util.ParsingUtils;
 import htsjdk.variant.variantcontext.Allele;
 import htsjdk.variant.variantcontext.GenotypeBuilder;
@@ -18,7 +19,7 @@ import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
 
-public class VCFEncoderTest {
+public class VCFEncoderTest extends HtsjdkTest {
 
 	@DataProvider(name = "VCFWriterDoubleFormatTestData")
 	public Object[][] makeVCFWriterDoubleFormatTestData() {
diff --git a/src/test/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java b/src/test/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java
index e9135cc..94da596 100644
--- a/src/test/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java
+++ b/src/test/java/htsjdk/variant/vcf/VCFHeaderUnitTest.java
@@ -37,6 +37,7 @@ import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder;
 import org.testng.Assert;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
+import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.File;
@@ -104,7 +105,7 @@ public class VCFHeaderUnitTest extends VariantBaseTest {
     public void testVCFHeaderSampleRenamingSingleSampleVCF() throws Exception {
         final VCFCodec codec = new VCFCodec();
         codec.setRemappedSampleName("FOOSAMPLE");
-        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "HiSeq.10000.vcf")));
+        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(AsciiLineReader.from(new FileInputStream(variantTestDataRoot + "HiSeq.10000.vcf")));
         final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
 
         Assert.assertEquals(header.getNGenotypeSamples(), 1, "Wrong number of samples in remapped header");
@@ -120,10 +121,18 @@ public class VCFHeaderUnitTest extends VariantBaseTest {
         }
     }
 
-    @Test
-    public void testVCFHeaderDictionaryMerging() {
-        VCFHeader headerOne = new VCFFileReader(new File(variantTestDataRoot + "dbsnp_135.b37.1000.vcf"), false).getFileHeader();
-        VCFHeader headerTwo = new VCFHeader(headerOne); // deep copy
+    @DataProvider
+    public Object[][] testVCFHeaderDictionaryMergingData() {
+        return new Object[][]{
+                {"diagnosis_targets_testfile.vcf"},  // numerically ordered contigs
+                {"dbsnp_135.b37.1000.vcf"}          // lexicographically ordered contigs
+        };
+    }
+
+    @Test(dataProvider = "testVCFHeaderDictionaryMergingData")
+    public void testVCFHeaderDictionaryMerging(final String vcfFileName) {
+        final VCFHeader headerOne = new VCFFileReader(new File(variantTestDataRoot + vcfFileName), false).getFileHeader();
+        final VCFHeader headerTwo = new VCFHeader(headerOne); // deep copy
         final List<String> sampleList = new ArrayList<String>();
         sampleList.addAll(headerOne.getSampleNamesInOrder());
 
@@ -141,7 +150,7 @@ public class VCFHeaderUnitTest extends VariantBaseTest {
     public void testVCFHeaderSampleRenamingMultiSampleVCF() throws Exception {
         final VCFCodec codec = new VCFCodec();
         codec.setRemappedSampleName("FOOSAMPLE");
-        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "ex2.vcf")));
+        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(AsciiLineReader.from(new FileInputStream(variantTestDataRoot + "ex2.vcf")));
         final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
     }
 
@@ -149,7 +158,7 @@ public class VCFHeaderUnitTest extends VariantBaseTest {
     public void testVCFHeaderSampleRenamingSitesOnlyVCF() throws Exception {
         final VCFCodec codec = new VCFCodec();
         codec.setRemappedSampleName("FOOSAMPLE");
-        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(new AsciiLineReader(new FileInputStream(variantTestDataRoot + "dbsnp_135.b37.1000.vcf")));
+        final AsciiLineReaderIterator vcfIterator = new AsciiLineReaderIterator(AsciiLineReader.from(new FileInputStream(variantTestDataRoot + "dbsnp_135.b37.1000.vcf")));
         final VCFHeader header = (VCFHeader) codec.readHeader(vcfIterator).getHeaderValue();
     }
 
@@ -214,7 +223,8 @@ public class VCFHeaderUnitTest extends VariantBaseTest {
     @Test
     public void testVCFHeaderAddContigLine() {
         final VCFHeader header = getHiSeqVCFHeader();
-        final VCFContigHeaderLine contigLine = new VCFContigHeaderLine("<ID=chr1,length=1234567890,assembly=FAKE,md5=f126cdf8a6e0c7f379d618ff66beb2da,species=\"Homo sapiens\">", VCFHeaderVersion.VCF4_0, "chr1", 0);
+        final VCFContigHeaderLine contigLine = new VCFContigHeaderLine(
+                "<ID=chr1,length=1234567890,assembly=FAKE,md5=f126cdf8a6e0c7f379d618ff66beb2da,species=\"Homo sapiens\">", VCFHeaderVersion.VCF4_0, VCFHeader.CONTIG_KEY, 0);
         header.addMetaDataLine(contigLine);
 
         Assert.assertTrue(header.getContigLines().contains(contigLine), "Test contig line not found in contig header lines");
@@ -227,6 +237,36 @@ public class VCFHeaderUnitTest extends VariantBaseTest {
     }
 
     @Test
+    public void testVCFHeaderHonorContigLineOrder() throws IOException {
+        try (final VCFFileReader vcfReader = new VCFFileReader(new File(variantTestDataRoot + "dbsnp_135.b37.1000.vcf"), false)) {
+            // start with a header with a bunch of contig lines
+            final VCFHeader header = vcfReader.getFileHeader();
+            final List<VCFContigHeaderLine> originalHeaderList = header.getContigLines();
+            Assert.assertTrue(originalHeaderList.size() > 0);
+
+            // copy the contig lines to a new list, sticking an extra contig line in the middle
+            final List<VCFContigHeaderLine> orderedList = new ArrayList<>();
+            final int splitInTheMiddle = originalHeaderList.size() / 2;
+            orderedList.addAll(originalHeaderList.subList(0, splitInTheMiddle));
+            final VCFContigHeaderLine outrageousContigLine = new VCFContigHeaderLine(
+                    "<ID=outrageousID,length=1234567890,assembly=FAKE,md5=f126cdf8a6e0c7f379d618ff66beb2da,species=\"Homo sapiens\">",
+                    VCFHeaderVersion.VCF4_2,
+                    VCFHeader.CONTIG_KEY,
+                    0);
+            orderedList.add(outrageousContigLine);
+            // make sure the extra contig line is outrageous enough to not collide with a real contig ID
+            Assert.assertTrue(orderedList.contains(outrageousContigLine));
+            orderedList.addAll(originalHeaderList.subList(splitInTheMiddle, originalHeaderList.size()));
+            Assert.assertEquals(originalHeaderList.size() + 1, orderedList.size());
+
+            // crete a new header from the ordered list, and test that getContigLines honors the input order
+            final VCFHeader orderedHeader = new VCFHeader();
+            orderedList.forEach(hl -> orderedHeader.addMetaDataLine(hl));
+            Assert.assertEquals(orderedList, orderedHeader.getContigLines());
+        }
+    }
+
+    @Test
     public void testVCFHeaderAddOtherLine() {
         final VCFHeader header = getHiSeqVCFHeader();
         final VCFHeaderLine otherLine = new VCFHeaderLine("TestOtherLine", "val");
diff --git a/src/test/resources/htsjdk/samtools/SequenceUtil/upper_and_lowercase_read.sam b/src/test/resources/htsjdk/samtools/SequenceUtil/upper_and_lowercase_read.sam
index 82efe85..335d815 100644
--- a/src/test/resources/htsjdk/samtools/SequenceUtil/upper_and_lowercase_read.sam
+++ b/src/test/resources/htsjdk/samtools/SequenceUtil/upper_and_lowercase_read.sam
@@ -7,4 +7,4 @@ read1	0	chr1	1	0	16M	*	0	0	AcGtAcGTaCGtAcGt	AAAAAAAAAAAAAAAA	NM:i:0
 read2	0	chr1	1	0	16M	*	0	0	AcGtAcGTaCGtAcGt	AAAAAAAAAAAAAAAA	NM:i:0
 read3	0	chr2	1	0	16M	*	0	0	AcGtAcGTaCGtAcGt	AAAAAAAAAAAAAAAA	NM:i:8	MD:Z:0T2A0T2A0t2a0t2a0
 read4	0	chr2	1	0	8M	*	0	0	TCGATCGA	AAAAAAAA	NM:i:0
-read5	0	chr2	1	0	4M1D2M1S	*	0	0	TCGACGAA	AAAAAAAA	NM:i:1	MD:Z:4^T2
+read5	0	chr2	1	0	4M1D2M2S	*	0	0	TCGACGAA	AAAAAAAA	NM:i:1	MD:Z:4^T2
diff --git a/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.cram b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.cram
new file mode 100644
index 0000000..57c58df
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.cram differ
diff --git a/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.fa b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.fa
new file mode 100644
index 0000000..7c2ec2a
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.fa
@@ -0,0 +1,71 @@
+>17 17:1-4200
+AAGCTTCTCACCCTGTTCCTGCATAGATAATTGCATGACAATTGCCTTGTCCCTGCTGAA
+TGTGCTCTGGGGTCTCTGGGGTCTCACCCACGACCAACTCCCTGGGCCTGGCACCAGGGA
+GCTTAACAAACATCTGTCCAGCGAATACCTGCATCCCTAGAAGTGAAGCCACCGCCCAAA
+GACACGCCCATGTCCAGCTTAACCTGCATCCCTAGAAGTGAAGGCACCGCCCAAAGACAC
+GCCCATGTCCAGCTTATTCTGCCCAGTTCCTCTCCAGAAAGGCTGCATGGTTGACACACA
+GTGCCTGCGACAAAGCTGAATGCTATCATTTAAAAACTCCTTGCTGGTTTGAGAGGCAGA
+AAATGATATCTCATAGTTGCTTTACTTTGCATATTTTAAAATTGTGACTTTCATGGCATA
+AATAATACTGGTTTATTACAGAAGCACTAGAAAATGCATGTGGACAAAAGTTGGGATTAG
+GAGAGAGAAATGAAGACATATGTCCACACAAAAACCTGTTCATTGCAGCTTTCTACCATC
+ACCAAAAATTGCAAACAACCACACGCCCTTCAACTGGGGAACTCATCAACAACAAACTTG
+TGGTTTACCCACACAATGGAAGACCACTTAGCAACAAAAAGGACCAAACTCCTGGTACAT
+GCAACTGACAGATGAATCTCAAACGCATTCCTCCGTGTGAAAGAAGCCGGACTCACAGGG
+CAACACACTATCTGACTGTTTCATGGGAAAGTCTGGAAACGGCAACACCATTGAGACAGA
+AAACAGGTGAGTGGTTGCCTGGGGCCAGGGAACTTTCTGGGGTCATATTCTCTGTGTTGA
+TTCTGGTGGTGGAAACAAGACTGTCCCAGCCTGGGTGATACAGCGAGACCCCATCTCTAC
+CAAAAAATTAAAAATTAGCTGGGCATGGTGGTGCATGCCTGTAGTCCCAGCTATTCACAG
+TGCTGAGGTGGGAAGATGCTTGAGCCCAGGAGTTCAAGGCTGCAATGAGCTATGATTGCG
+CCACTGCACTTTGGCCTGGACAACAGAGCAAAACCCTGTCTCTAAAAAAAGAAAAGAAAA
+GAAAAACTCACTGGATATGAATGATACAGGTTGAGGATCCATTATCTGAAATGCTTGGAC
+CAGATGTTTTGAATTTTGGATTTTTTCATATTTTGTAATCTTTGCAGTATATTTACCAGT
+TCAGCATCCCTAACTCAAAAATTCAAAAATCTGAAATCCCAAACGCGCCAATAAGCATTC
+CCTTTGAGCGTCATGTCGGTGCTTGGAATGTTTGGGGTTTTGGATTTACAGCTTTGGGAC
+GCTCAACCTGTACCTCAATAAACCTGATTTTAAAAAAGTTTGGGGGGATTCCCCTAAGCC
+CGCCACCCGGAGACAGCGGATTTCCTTAGTTACTTACTATGCTCCTTGGCCATTTCTCTA
+GGTATTGGTATATTGTGTCTGCTGTGAACTGTCCTTGGCCTGTTTGGTGACGGGTGAGGA
+GCAGGGACAGAAGGGTCCTGCGTGCCCTGCCTTCACAAGCCCCTGGAAGGAAAGTTGTTT
+TGGGATCTCTGCACCCTCAGCCTGGACAACTTGTGCCCATCTGGTGACCCCTCACTCAGC
+CACCAGACTTCCACGACAGGCTCCAGCCTCGGCACCTTCAGCCATGGACAGTTCCGCCAG
+CGTTGCCCTCTGTTCTGCTGTTTTCTCTACCAGAAGTGCCCTTCCCTCCTCACCTGACCA
+CTCTGGGGAAATCCCTCAGCACCCTCCCTGAGCATACCCTACTCTGGCACAAGCCCACCC
+TGCAAAGCCCCTGAGGCCCGCCCTGTGGCGTCTCTCCCTCCCTTGCTGTCAGGACAGTGG
+TCCTGGCCACCGGGGCTCACGGAGCCGCCCTGTGCCGTGTACCTCTGAGCCCTCTGCACA
+GTGCCTTCTGCTTGCCTGTGGCTTTGAGAAGAAACCCCTTCTGGTTATACATAAGACAGC
+CAGAGAAGGGAGTTGCCCAGGGTGGCACAGCACGTTGCTGCCAGTTACTGCCATTTTCAC
+GGGCATGAAATGGAGATAACAACAGGAGCGACCGCACAGGCTGCTGAGCGCGTCACACGC
+AGCCATCGCGCAGCTCAGGGATATTACGTGTAACTCGACATGTCAGCGATTGTCACAGGC
+ACTGCTACTCCTGGGGTTTTCCATCAAACCCTCAAGAGCTGGGCCTGGGGTCAACTTCCG
+GCCTGGGGAAACTGGGGCAAGTATCACCAGAGATGAGCTTTATAAAAATAATGGTGCTAG
+CTGGGCATGGTGGCTTGCACCTGTAATCCCAGCACTTTGGGAGGCCGAGCTAGGAGGATC
+GTTTGAGTCCAGCAGTTTGAGACCAGCCTGGCCAATACGGCAAAACCCAGTCTCTACAAA
+AAATACAAAAAACAACTAGCCAGGCGTGGTGGTGCACACCTGTAGTCCCAGCTACTCAGG
+AGGCTGAGGGGGAAGGACTGCTTGAGCCCAGGAGTTTGAGGCTGCTGTGAGCTGTGATCG
+CATCACTGCATTCCAGCCCGGTGACAGAGTGAGTCACTGTCTCAAAAAAGAAAGGAAGAA
+ATAAAGAAAACAAATAAAAATAATAGTGCAGACAAAAGGCCTTGACCCATCTAGCTTTGG
+CCCTCAGCATCAACCGCTAGATACGTCCCTCCCTTTCTTCTGGGGCACAGGTCACACTCT
+CTTCCAGGTCTAGGATGCAGCTGAGGGGTGCCCCTCTTACCATCTAATCTGTGCCCTTAT
+TTCCTCTGCTTTAGTGAGGAAGAGGCCCCTGGTCCATGAAGGGGCCTTTCAGAGACGGGG
+ACCCCTGAGGAGCCCCGAGCAGCAGCCGTCGTGTCTCACCCAGGGTGTCTGAAACAGATG
+TGGAGGTCTCGGGTGAGGCGTGGCTCAGATACAGGGAGTGGCCCACAGCTCGGCCTGTCT
+TTGAAAGGCCACGTGACCTGGCCCACGGCTGGCAGGTGGGACCCAGCTGCAGGGGTCCAG
+CAGCACCCACAGCAGCCACCTGTGGCAGGGAGGAGCTTGTGGTACAGTGGACAGGCCCTG
+CCCAGATGGCCCCCCGCCTGCCTGTGGAAGTTGACCAGACCATCTGTCACAGCAGGTAAG
+ACTCTGCTTTCTGGGCAACCCAGCAGGTGACCCTGGAATTCCTGTCCATCTGGCAGGTGG
+GCATTGAAACTGGTTTAAAAATGTCACACCATAGGCCGGGCACAGTGGCTCACGCCTGTA
+ATCCCAGCCCTTTGGGAGGCCAGGGTGGGTGGATCACTTGAGGTCAGGAGTTCAAGACCA
+GCCTGGCCAACATGGTGAAACCCCGTCTACTAAAAATACAAAAATTAGCCTGGCGTGGTG
+GCGCATGCCTGTAATCCCAGCTACTTGGGAAGCTGAGGGATGAGAACTGCTTGAACCTGG
+GAGGCAGACGTTGCAGTGAGCTGAGATCACGCCACTGCACTCCAGCCTGGGCAACAGAGT
+AAGACTCTGTCTCAAAAAAAAAAAAATCACACCATTTTGGCTTCAGATTGCATATCCTCC
+TGCAAGGATATATACGCGTGAAATTCAAGTCAATGACAAATCAGAAGAAAAAACATATAT
+ATACGCAAACCAGTATCCTACTGTGTGTGTCGTTTGTTGTGTTTTCGACAGCTGTCCGTG
+TTATAATAATTCCTCTAGTTCAAATTTATTCATTTTTAACTTCATAGTACCACATTCTAC
+ACACTGCCCATGTCCCCTCAAGCTTCCCCTGGCTCCTGCAACCACAAATCTACTCTCTGC
+CTCTGTGGGTTGACCTATTCTGGACACGTCATAGAAATAGAGTCCTGCAACACGTGGCCG
+TCTGTGTCTGGCTTCTCTCGCTTAGCATCTTGTTTCCAAGGTCCTCCCACAGTGTAGCAT
+GCACCTGCTACACTCCTTCTTAGGGCTGATATTCCACGCACCTGCTACACTCCTTCTTAT
+GGCTGATATTCCACGCACCTGCTACACTCCTTCTTAGGGCTGATATTCCACACACCCGCT
+ACACTCCTTCTTAGGGCTGATATTCCACGCACCCGCTACACTCCTTCTTAGGGCTGATAT
+TCCACGCACCTGCTACACTCCTTCTTAGGGCTGATATTCCACGCACCTGCTACACTCCTT
+CTTAGGGCTGATATTCCACGCACCTGCTACACTCCTTCTTAGGGCTGATATTCCACGCAC
diff --git a/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.fa.fai b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.fa.fai
new file mode 100644
index 0000000..c211266
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/nm_tag_validation.fa.fai
@@ -0,0 +1 @@
+17	4200	14	60	61
diff --git a/src/test/resources/htsjdk/samtools/ValidateSamFileTest/seq_qual_len_mismatch.sam b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/seq_qual_len_mismatch.sam
new file mode 100644
index 0000000..3c689b1
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/ValidateSamFileTest/seq_qual_len_mismatch.sam
@@ -0,0 +1,21 @@
+ at HD	VN:1.0	SO:coordinate
+ at SQ	SN:chr1	LN:101
+ at SQ	SN:chr2	LN:101
+ at SQ	SN:chr3	LN:101
+ at SQ	SN:chr4	LN:101
+ at SQ	SN:chr5	LN:101
+ at SQ	SN:chr6	LN:101
+ at SQ	SN:chr7	LN:404
+ at SQ	SN:chr8	LN:202
+ at RG	ID:0	SM:Hi,Mom!	LB:my-library	PL:ILLUMINA
+ at RG	ID:1	SM:Hi,Mom!	LB:my-library	PL:ILLUMINA
+ at RG	ID:2	SM:Hi,Mom!	LB:my-library	PL:Illumina
+ at PG	ID:1	PN:Hey!	VN:2.0
+both_reads_align_clip_marked	1107	chr7	1	255	101M	=	302	201	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/	RG:Z:0	PG:Z:1	NM:i:0	MQ:i:255	XT:Z:foo	OQ:Z:11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
+both_reads_present_only_first_aligns	89	chr7	1	255	101M	*	0	0	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/	RG:Z:1	PG:Z:1	NM:i:3	MQ:i:255	XT:Z:foo	OQ:Z:11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
+read_2_too_many_gaps	83	chr7	1	255	101M	=	302	201	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/	RG:Z:2	PG:Z:1	NM:i:8	MQ:i:255	XT:Z:foo2	OQ:Z:11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
+both_reads_align_clip_adapter	147	chr7	16	255	101M	=	21	-96	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/	RG:Z:1	PG:Z:1	NM:i:1	MQ:i:255	XT:Z:foo2	OQ:Z:11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
+both_reads_align_clip_adapter	99	chr7	21	255	101M	=	16	96	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/	RG:Z:1	PG:Z:1	NM:i:1	MQ:i:255	XT:Z:foo2	OQ:Z:11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
+both_reads_align_clip_marked	163	chr7	302	255	101M	=	1	-201	NCGCGGCATCNCGATTTCTTTCCGCAGCTAACCTCCCGACAGATCGGCAGCGCGTCGTGTAGGTTATTATGGTACATCTTGTCGTGCGGCNAGAGCATACA	&/15445666651/566666553+2/14/&/555512+3/)-'/-&-'*+))*''13+3)'//++''/'))/3+&*5++)&'2+&+/*&-&&*)&-./1'	RG:Z:0	PG:Z:1	NM:i:5	MQ:i:255	OQ:Z:11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
+read_2_too_many_gaps	163	chr7	302	255	10M1D10M5I76M	=	1	-201	NCGCGGCATCNCGATTTCTTTCCGCAGCTAACCTCCCGACAGATCGGCAGCGCGTCGTGTAGGTTATTATGGTACATCTTGTCGTGCGGCNAGAGCATACA	&/15445666651/566666553+2/14/&/555512+3/)-'/-&-'*+))*''13+3)'//++''/'))/3+&*5++)&'2+&+/*&-&&*)&-./1'	RG:Z:2	PG:Z:1	NM:i:6	MQ:i:255	OQ:Z:11111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111
+both_reads_present_only_first_aligns	165	*	0	0	*	chr7	1	0	NCGCGGCATCNCGATTTCTTTCCGCAGCTAACCTCCCGACAGATCGGCAGCGCGTCGTGTAGGTTATTATGGTACATCTTGTCGTGCGGCNAGAGCATACA	&/15445666651/566666553+2/14/&/555512+3/)-'/-&-'*+))*''13+3)'//++''/'))/3+&*5++)&'2+&+/*&-&&*)&-./1'	RG:Z:1	PG:Z:1
diff --git a/src/test/resources/htsjdk/samtools/cram/CEUTrio.HiSeq.WGS.b37.NA12878.20.first.8000.bam b/src/test/resources/htsjdk/samtools/cram/CEUTrio.HiSeq.WGS.b37.NA12878.20.first.8000.bam
new file mode 100644
index 0000000..9df4f69
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/CEUTrio.HiSeq.WGS.b37.NA12878.20.first.8000.bam differ
diff --git a/src/test/resources/htsjdk/samtools/cram/amb#amb.2.1.cram b/src/test/resources/htsjdk/samtools/cram/amb#amb.2.1.cram
new file mode 100644
index 0000000..6b19d5c
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/amb#amb.2.1.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/amb#amb.3.0.cram b/src/test/resources/htsjdk/samtools/cram/amb#amb.3.0.cram
new file mode 100644
index 0000000..e683dc8
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/amb#amb.3.0.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/amb#amb.sam b/src/test/resources/htsjdk/samtools/cram/amb#amb.sam
new file mode 100644
index 0000000..0640c90
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/amb#amb.sam
@@ -0,0 +1,57 @@
+ at HD	VN:1.4	GO:none	SO:coordinate
+ at SQ	SN:iupac	LN:31	M5:f88a72084e90c68cc7aa569bbf257e70
+ at RG	ID:ID	SM:foo
+read_A	0	iupac	1	86	30M	*	0	0	AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA	******************************
+read_B	0	iupac	1	86	30M	*	0	0	BBBBBBBBBBBBBBBBBBBBBBBBBBBBBB	******************************
+read_C	0	iupac	1	86	30M	*	0	0	CCCCCCCCCCCCCCCCCCCCCCCCCCCCCC	******************************
+read_D	0	iupac	1	86	30M	*	0	0	DDDDDDDDDDDDDDDDDDDDDDDDDDDDDD	******************************
+read_E	4	iupac	1	86	30M	*	0	0	EEEEEEEEEEEEEEEEEEEEEEEEEEEEEE	******************************
+read_F	0	iupac	1	86	30M	*	0	0	FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF	******************************
+read_G	0	iupac	1	86	30M	*	0	0	GGGGGGGGGGGGGGGGGGGGGGGGGGGGGG	******************************
+read_H	0	iupac	1	86	30M	*	0	0	HHHHHHHHHHHHHHHHHHHHHHHHHHHHHH	******************************
+read_I	0	iupac	1	86	30M	*	0	0	IIIIIIIIIIIIIIIIIIIIIIIIIIIIII	******************************
+read_J	0	iupac	1	86	30M	*	0	0	JJJJJJJJJJJJJJJJJJJJJJJJJJJJJJ	******************************
+read_K	0	iupac	1	86	30M	*	0	0	KKKKKKKKKKKKKKKKKKKKKKKKKKKKKK	******************************
+read_L	0	iupac	1	86	30M	*	0	0	LLLLLLLLLLLLLLLLLLLLLLLLLLLLLL	******************************
+read_M	0	iupac	1	86	30M	*	0	0	MMMMMMMMMMMMMMMMMMMMMMMMMMMMMM	******************************
+read_N	0	iupac	1	86	30M	*	0	0	NNNNNNNNNNNNNNNNNNNNNNNNNNNNNN	******************************
+read_O	0	iupac	1	86	30M	*	0	0	OOOOOOOOOOOOOOOOOOOOOOOOOOOOOO	******************************
+read_P	0	iupac	1	86	30M	*	0	0	PPPPPPPPPPPPPPPPPPPPPPPPPPPPPP	******************************
+read_Q	0	iupac	1	86	30M	*	0	0	QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ	******************************
+read_R	0	iupac	1	86	30M	*	0	0	RRRRRRRRRRRRRRRRRRRRRRRRRRRRRR	******************************
+read_S	0	iupac	1	86	30M	*	0	0	SSSSSSSSSSSSSSSSSSSSSSSSSSSSSS	******************************
+read_T	0	iupac	1	86	30M	*	0	0	TTTTTTTTTTTTTTTTTTTTTTTTTTTTTT	******************************
+read_U	0	iupac	1	86	30M	*	0	0	UUUUUUUUUUUUUUUUUUUUUUUUUUUUUU	******************************
+read_V	0	iupac	1	86	30M	*	0	0	VVVVVVVVVVVVVVVVVVVVVVVVVVVVVV	******************************
+read_W	0	iupac	1	86	30M	*	0	0	WWWWWWWWWWWWWWWWWWWWWWWWWWWWWW	******************************
+read_X	0	iupac	1	86	30M	*	0	0	XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX	******************************
+read_Y	0	iupac	1	86	30M	*	0	0	YYYYYYYYYYYYYYYYYYYYYYYYYYYYYY	******************************
+read_Z	0	iupac	1	86	30M	*	0	0	ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ	******************************
+read_a	0	iupac	1	86	30M	*	0	0	aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa	******************************
+read_b	0	iupac	1	86	30M	*	0	0	bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb	******************************
+read_c	0	iupac	1	86	30M	*	0	0	cccccccccccccccccccccccccccccc	******************************
+read_d	0	iupac	1	86	30M	*	0	0	dddddddddddddddddddddddddddddd	******************************
+read_e	0	iupac	1	86	30M	*	0	0	eeeeeeeeeeeeeeeeeeeeeeeeeeeeee	******************************
+read_f	0	iupac	1	86	30M	*	0	0	ffffffffffffffffffffffffffffff	******************************
+read_g	0	iupac	1	86	30M	*	0	0	gggggggggggggggggggggggggggggg	******************************
+read_h	0	iupac	1	86	30M	*	0	0	hhhhhhhhhhhhhhhhhhhhhhhhhhhhhh	******************************
+read_i	0	iupac	1	86	30M	*	0	0	iiiiiiiiiiiiiiiiiiiiiiiiiiiiii	******************************
+read_j	0	iupac	1	86	30M	*	0	0	jjjjjjjjjjjjjjjjjjjjjjjjjjjjjj	******************************
+read_k	0	iupac	1	86	30M	*	0	0	kkkkkkkkkkkkkkkkkkkkkkkkkkkkkk	******************************
+read_l	0	iupac	1	86	30M	*	0	0	llllllllllllllllllllllllllllll	******************************
+read_m	0	iupac	1	86	30M	*	0	0	mmmmmmmmmmmmmmmmmmmmmmmmmmmmmm	******************************
+read_n	0	iupac	1	86	30M	*	0	0	nnnnnnnnnnnnnnnnnnnnnnnnnnnnnn	******************************
+read_o	0	iupac	1	86	30M	*	0	0	oooooooooooooooooooooooooooooo	******************************
+read_p	0	iupac	1	86	30M	*	0	0	pppppppppppppppppppppppppppppp	******************************
+read_q	0	iupac	1	86	30M	*	0	0	qqqqqqqqqqqqqqqqqqqqqqqqqqqqqq	******************************
+read_r	0	iupac	1	86	30M	*	0	0	rrrrrrrrrrrrrrrrrrrrrrrrrrrrrr	******************************
+read_s	0	iupac	1	86	30M	*	0	0	ssssssssssssssssssssssssssssss	******************************
+read_t	0	iupac	1	86	30M	*	0	0	tttttttttttttttttttttttttttttt	******************************
+read_u	0	iupac	1	86	30M	*	0	0	uuuuuuuuuuuuuuuuuuuuuuuuuuuuuu	******************************
+read_v	0	iupac	1	86	30M	*	0	0	vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv	******************************
+read_w	0	iupac	1	86	30M	*	0	0	wwwwwwwwwwwwwwwwwwwwwwwwwwwwww	******************************
+read_x	0	iupac	1	86	30M	*	0	0	xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx	******************************
+read_y	0	iupac	1	86	30M	*	0	0	yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy	******************************
+read_z	0	iupac	1	86	30M	*	0	0	zzzzzzzzzzzzzzzzzzzzzzzzzzzzzz	******************************
+read_dot	0	iupac	1	86	30M	*	0	0	..............................	******************************
+read_equals	0	iupac	1	86	30M	*	0	0	==============================	******************************
diff --git a/src/test/resources/htsjdk/samtools/cram/amb.fa b/src/test/resources/htsjdk/samtools/cram/amb.fa
new file mode 100644
index 0000000..040dd1c
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/amb.fa
@@ -0,0 +1,2 @@
+>iupac  31      857152f076709b2c6067edcbaaba65c7
+.aAbBcCdDgGhHkKmMnNrRsStTvVwWyY
diff --git a/src/test/resources/htsjdk/samtools/cram/amb.fa.fai b/src/test/resources/htsjdk/samtools/cram/amb.fa.fai
new file mode 100644
index 0000000..89701fd
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/amb.fa.fai
@@ -0,0 +1 @@
+iupac	31	49	31	32
diff --git a/src/test/resources/htsjdk/samtools/cram/ambiguityCodes.fasta b/src/test/resources/htsjdk/samtools/cram/ambiguityCodes.fasta
new file mode 100644
index 0000000..430b59a
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/ambiguityCodes.fasta
@@ -0,0 +1,2 @@
+>Sheila
+GCTAGCRM.gactAAAAAAAAAA
diff --git a/src/test/resources/htsjdk/samtools/cram/ambiguityCodes.fasta.fai b/src/test/resources/htsjdk/samtools/cram/ambiguityCodes.fasta.fai
new file mode 100644
index 0000000..d35aa7e
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/ambiguityCodes.fasta.fai
@@ -0,0 +1 @@
+Sheila	23	8	23	24
diff --git a/src/test/resources/htsjdk/samtools/cram/fieldarith.sam b/src/test/resources/htsjdk/samtools/cram/fieldarith.sam
deleted file mode 100644
index 180d1e8..0000000
--- a/src/test/resources/htsjdk/samtools/cram/fieldarith.sam
+++ /dev/null
@@ -1,15 +0,0 @@
- at SQ	SN:one	LN:1000
- at SQ	SN:two	LN:500
- at CO	For each SAM record that has each listed aux field, performs these tests:
- at CO	XQ is the expected result for bam_cigar2qlen()
- at CO	XR is the expected result for bam_cigar2rlen()
- at CO	XE is the expected result for bam_endpos()
- at CO	(Note that these are all zero-based, while POS is one-based in SAM)
-r1	0	one	50	20	8M	*	0	0	ATGCATGC	qqqqqqqq	XQ:i:8	XR:i:8	XE:i:57
-r2	0	one	100	20	50M	*	0	0	ATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCAT	qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq	XQ:i:50	XR:i:50	XE:i:149
-unmapped	5	two	200	0	*	two	200	0	ATGCATGC	qqqqqqqq	XQ:i:0	XR:i:0	XE:i:200
-hascigar	5	two	200	0	6M2S	two	200	0	ATGCATGC	qqqqqqqq	XQ:i:8	XR:i:6	XE:i:200
-s1	0	one	300	20	2M	*	0	0	AT	qq	XQ:i:2	XR:i:2	XE:i:301
-su1	4	*	0	0	*	*	0	0	AT	qq	XQ:i:0	XR:i:0	XE:i:0
-su2	5	two	400	0	*	two	400	0	AT	qq	XQ:i:0	XR:i:0	XE:i:400
-su3	4	one	500	0	2M	*	0	0	AT	qq	XQ:i:2	XR:i:2	XE:i:500
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.dict b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.dict
new file mode 100644
index 0000000..cfab320
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.dict
@@ -0,0 +1,2 @@
+ at HD	VN:1.0	SO:unsorted
+ at SQ	SN:20	LN:9000	M5:46ad963a88a95089707c4639ad196126	UR:file:///Users/cmn/projects/cram/final/hum.20.fasta
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.fasta b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.fasta
new file mode 100644
index 0000000..27ed994
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.fasta
@@ -0,0 +1,151 @@
+>20
+TTGTCCCTGTATCATTTACTGAAAAGACTCTTCTTTTCCCCATTAGATGGTCTTGAAACG
+TTGTTAAAATTCAACTGACCATAGGTGTATTGGTTTATTTCTGTACTCTTAGTAGATTCC
+ATTGACCTATATCTCTATCCTTATGCCAGTACCACAGTTTTGTTTACTACAGCTTTGTAG
+TAAATTTTGAACTCTAAAGTGTTAGTTCTCTAACTTTGTTTGTTTTTCAAGAGTGTTTTG
+ACTCTTCTTACTGCATCCCTTGCATTTCCATATGGACTTTATAATCAGCCATGTCAACTT
+CTGCAAGAAAGACAGCTAGGATTTTGATAAGGATTGTGTTGAATCTGTAGTCCAATTTTG
+GGAATACTACCATGTTAACATCGTCTTCCCATCCATGCACGTGCAATAGCTTACCATTTA
+TTTGGTCTTCCTCAATTTCTGTCAACAATGATTTGTAGTTTTCAGTTGCAAGTCTTGCAC
+TTCTTTTGTTAAACTTTTTCCAAATATTTATTCTTCTTAATTACATAATTCTCATAATTA
+ATAAAATTCTGAAATTTTCTTAATTTCATTTTTGTGGCCATGCACTTTAAAACTTGCCTT
+TTAACAAGACTTCCAGATGATTCTTGTGAACATTAAGTTTGTGAAGTGCTTCTCTATTGA
+CAAACTGAACTCACGTGACATTCCACACAGCACTGGCAAATTCTGTCCCGTAACTCCGCT
+AGCTCTCCAACAAGAAGTGACTTGACGCAGCCCAAGGTTACTACTTAACACAATGAATTA
+AATGTTTTTAAATAAGAGGAAGCAATAAGATTCTAAAGGCTTTTCTGTTTTAATTTTCAT
+GCAATGGAAAACTGGTATTAAATATCTATTTAATTAGGAGGAAACTACAATGCTGACTTT
+TGTCTGAATTATGTAGATAAGTGATTCATTTGAAACAATTATTTTGATAATTGTCAATTA
+TCCATTTCATTTTAATGCATTTTTTATTCTTTTTTCAAAAATAGCAACAATTACAACAGT
+TAAACCTTATAATGAATATGTTTCCTAAACCCTGTTCTACTTTCTGGTTCCAGATCTGAC
+ACCAATTACCTTTCTGATTTTGGACAAACCACTTAATATTTGTAACTTACAATTACTTCA
+ACTGAATAATAAAAGAATTGGACTAGATTTCTCCAACATCTCTCTCTTTTGGCTTTATGT
+TAGATAATGCTAAATTTTCATCATATCCAAACATGCTATATAATTTTATGAACTGTTACA
+GAGTCAGACATAAGCAATATAAAGTATGATTCTGAATAAAGGCTGTGAAGTCTCGTTTTT
+CATACATAATTACAGGAACCGATCAAATTCAATAAAGCATTTATAGTCATGACAATATAT
+TCTCTTAACTTGCAATGTGGTTTTAGGAAACAATGAAACAAATTAAACAAATGCATGATT
+CCTTAAATTTTGTTGACTCGGGATTTAGTTACATATAAGACTCTTTTCCCCCAGCCAGAT
+TAACCCTGCTCTGTATATGTAATACGCATCTTCCCCCCAAATTTCACAATAATAATTTTA
+TGAAAATCAGTATGTAAGTTGCTGCATTGGCATGAAAGGCTACTAATGTGATATCTTTCA
+AAGATAAAGTGAACGTTTTTGTAAAAAGCCACCACACTGGAGGCATTTAACAATATTCAT
+CTTCCTATGGCATTACTACCCTAGATGTACTTTGCAATATTAATCAAGCCTTGTCTTACT
+TCAGGCTTCCTAGAAATAGAGCACAAGGCAGGGATTCTTTCTTGCTCGGTGATTTATTGA
+GGAAGTGCTCTCAGAAGAAATTGGTAAGATGCTAAGGAGAACAGCAAAGGACAGGAGAAG
+GGGGCTGAGCAGAGATGGGGATCTAACTGGAATCTGGCCTTTGCTGATTACCAGAGCAGC
+TCTGGGTGCGAATGGTGAAGTGCTGTTCTATTGACAAACTCCACTCAAGTGACATTCCAT
+GCAGCACTGCCAAATTCTGTCCCATAACTCTGCTAGCTCTTCAACAAGAAGTAACTTGAG
+GCAGCACAGTTACAGAAAACAGCACAAGAACCAAGGATACTAAACAGTGACTTAAATGTT
+TTTAGATAAGAGGAAGCAATGAGATACTATAGGGTTTTAATTTCTGTGCAATGGAAAACT
+AGTATTAAATATTTATTAATTAGGAGGAAACGACAATGCTGACTTCTGTCTGCATTATAG
+ACAAAATTGAGTTCTCCAACCATGAGGCAAGGTGCTGGCTTTTTGTACCCCTGCACTATT
+CATTTGATGGGTGAGTGCACTGTAACTTCCAGACGTCTCTTGGTAGGTGGGCCCCACTGG
+CCAAGGGCATTTCCCGGGAGAGTGAACGGCTGTGAGCTACTAGCAGTAGCAGTCACAGCA
+GCTGGGAGGAAACACACTGGTCAGGTAAAGGGCCAGCATCTACCAAGGTCCAGTTCCACA
+ATTAGTGGAGAAATATTTAATAACATTATTTTTGAAAATAATTAGGCCATATGACTTTGA
+CACTTTTTCTCTAGCAAAGTGACTAAAGAGAAGCAGGTTTTTAACTGGGTTTTTATTTCT
+GTTGTGTCTCTAGCCAAAGCCATGCTGATGTTTGATAGTTTTTTTTTTTCTTTTTTTAAG
+AGATGGGGTTTCACCATGTTGGCCATGCTGGTCGTGAACTCCTGACCTCAATGGCCTCCC
+AAACTGTTGGGATTACAGACGTGAGCCACCACGCCCAGCCAGAATTTTTTTTCCTAAAGA
+ATAGAACAACATTATAAAATTTTAGGCATTAAGGACAAATTTTATTTTATGATTTTCATT
+TCGGTGAGTCACAAGATATTCAACACAAAATGAAACTTCACAAAATTTCACAAAATGAAA
+AATTCATTTCAGATTCAATATCCACCTCCATCATCCATATTCATTCTTCCAATGTCTCAG
+GCCAGAAATTTGGAGTATGTGGCTTCTCCACCTCACACAATTTGCCCTAACTTTAATATA
+TACTCAGAAATTACTGGCTTTTCACTGTTTCTATGATTCCCATGTGTAATATACAATACT
+CACCATGCATACAATAATACAATAATTCTGTGTCACAACCACACCTAAATTGGTAAGTTT
+ATAAGGTTATAAGCTGAGAGGTTTTGCTGATCTTGGCTGAGCTCAGCTGGGCAGGTCTTC
+CGGTCTTGGCTGGGGTTCACTGACACACAAGCAGCTGACAGTTGGCTGATCTAGGATGGC
+CTCAGCTGGGATGACAGGCTGTTTCCTCACCTTCCAGCAGGCAAGCCAGTCCCAAGAAAG
+AGAAGGGTGAAACATGGAGGCCATTAATTGAGCCCATCGCATGAAACGCATCTGTGACAC
+CATCACCGTCTGATATCAACTCTCACCTGGATGTTTGCAATCGCCTAACTTTTCTCCTTT
+CATTCACTATGCTGCCTTACAAACCTATTCTCCACAAATCAGCTAGAGCAAACCTTTTAA
+ATCCTAAGTAGAATGCTACCATTCCTCTGCTCAAACTACTGGAGAGGAGATGCCACAGTC
+TTTACTATGGTCTTCAAGACCCTATGGGAGGTAGCCCTGTCTTACAACCATAGCCTCCTA
+CCACTGCCCCAACATGCACACTGAGTTCCGGCCACAGCATCTGCTTCCTCTTTATTGATC
+TGCCAAGGACATGAATGACTCAGAATCTCAAACTTATTCTTCCCTCAGAATCCACTGAGC
+CTACTCCCTCTTTTTCTCAAATGGCAACTTCTCAGAGATGCCTTCTCTGGCTAACGTATA
+TGTACTAAAACCTCCATCCAGCACTCTATGTCATCCTTACTATGGTTTATTTTTCTTCAT
+AGCACCTATCAATTGGTGAAGTATTAAATATGCATTTTTGTTTGTATGCATCTCTCTCCC
+CGCACCAGTATGTGAACTCCACCAGACTGGAGAGCGTGTTTATTTTGTTCACTGCTGTAA
+CTCCAGTGTCTAGAACAGTGCCTGGCACACAGTAGGTGTTTAATAATGATTTGTTAAGCT
+AAGCCAATGAATAAATATTCTTTTGCCCATGAGATGATGTTAAAAATTTTTCAATTATTC
+ATAACTGCCTGTAAGGAATGAATGTTAGTGAATTACTGTGATACCAATGAAAGTTAAATG
+ATGCAATGAAGATTGGGTGGATCACAAGGTCAGGAGTTTGAGACCATCCTGGCCAATATG
+GTGAAACCCCGTCTCTACTAAAAATACCAAAATTAGCTGGGCATCGTGGCAGGACCCTGT
+AGTCCCAGCTACTCAGGAGGCTGAGGCAGGAGAATCGCTTGAACCCAGGAGGCGGAGGTT
+GCAGTGAGCCGAGATCACGCCACTGCACTCCAGCCTGGCCGACAGAGTGAGATTCTGTCT
+CCAAAAAAAAAAAAAAAAAAGATGTAAATATTATTATTGTTTGCCATCACCCTTATGTGC
+TGTCTTGGCTTTATGTCGTGCTTAGATCTTTTCCAGGTGCAATTTGAATTGATGCAACTA
+ATGGAAACTACAACAGAAGTTCATATTGCCCTATTGTATTACACTATTTGACTCATCTTT
+CAGTATCACCAGGTGCATAAGGAAAATTTCAAAAGTAAATCAAGAGAAAGAAATATGATC
+GCAGCTTAACACAATGTAAATTTATTATTTGTACTTTTTGTCTAAATGGTTTGCCTAAAA
+GACTGAAAGACATTTTATATTAGTTAGAATACTTGAGGATAATAACATAAAAACTTTCCT
+TTCCAACTTGTTTATAAAAGGAAATCTTCACTGTTTTGAACATCAGTTATTTTAAACTTT
+TAAGTTGTTAGCACAGCAAAAGCAACAAAATTCTAAGTGCAGTAATCACTTTACTGCGTG
+GTCATATGAAATCAAGGCAATGTTATGAGTATTACTGGAAAGCTGGACAGAGTAACGGGA
+AAAGTGACTAAAACTATGCAAAACTATGCAAAACTAAGCAGATTGTGTCTCTAGAGTATT
+TCCCATCTCAAGTTTAGTTATTTACTAATTTGGCAACATCTGACCTATCTTTAATTGTGA
+GAAAATAAACAAACACATAAGCCAACTCTCAGAATATGGTTATACATAGGTGTAGCCTAT
+GACTTTGAATGTATTTGTTTGAATAGCGTAAAACAAAATAAAAATAAAATCTTGTTACAG
+TGCAAGAAACGGCAGTCATCAAACTAAGATGAGGCAAGTGTCATGAAGTATGAAAATATG
+GTACCTGAATTCTATTTATTAGAAAGTCTTCACTGAGCTGAGCATGTTTTTTTTAACAAA
+TTCAATTACTGATTTGAATATTTATTATACTTAATTATTGCAGCCATGAAAAGAGGTGCT
+GGCTGAGGCTGCATTTAATAAAAACATTTAATCAGCTTGAGGTTAGTAAACCATTTAATT
+TGTTTTTTCATGAAGATTTAACTTCTAGAATAATTTCATTTATGTATTTTTAGGTATAGC
+CCTAGATTCTGGTCTACATAGTATACAAATCATTTTAGAATGACACTAGGTTATTTCAAC
+TGCTTTTCTACAGAAGTGTTAAATAAGGGAGTAAAGTGTTGGCTTTTTCCATAATTGAAA
+TAAATGCACAATGAGCAGTAACATCCTGATTTCACTGCTATTTTGTTTAATCAACATAAT
+GATGTAGATTTACTCTGTATATATATGGAAGAGTGAAAGAAGGTTGGGAAGGAATAACTA
+TCAATTAATATAGGTGATATAGTAGTTATTTTTGCAAATCAACTATAATTTCTGAATGGA
+TATTCAGACCATATTTACATTACATAGAAGAGGCACACACCAAAAGATTTAACAAATGTG
+CCAAATATTGGTGAATATTTAGTTAGGTACCAAAAGGATGTTGTATAAATTAGGATGCTT
+TCAACCATAAGAGACTCATCTCAAAAATGGCTTGAAAATGTGGGGAATTTTTATCTCAGT
+GTGAAGTTAAAGGTAGGGCAAGTCCAAATCAATTCAAGTTAATTAAAGCCCCAAGTTCTT
+GGAATTTGCTCTGCTAGTCTTAGCTGGTTAGCCTTTGTCCTGAAGTTTGTAACTTCATGA
+CCAGAAGATGATCGCAATATTTTCTAAGTATAAAGCCTGAAGGTGTAAACCAGGTAGTCT
+CAGCAAAACCAATGATGCATGGTCACCTTTCCCTTAGTTGACAAATACTTGCCTTCCAGA
+TACTTTGAATCAGAATGGGCATTTTAACTAAGATCCAGTGCAACTAAAGGAATAATCAAA
+CAAATGATAAAATAATTATTCTGAGCTGAAAACACAAGTCTGAATATTGGAAGAGGTTCC
+TGATTTCCAGGCAGAATAGATAAGCAAAGATATCAACCTAAACACATCCTGGTTATAGTC
+TAAAATTTAGAGAATAAATGGGAAAAGATTATGAACTTTAAGGAAGAAATAACAACTTAC
+ACATAAAACAAAAAGGAAAAACTATCTGTTGACAGATTTATCACCTGCTAGATAAGAGCA
+GAATAACCATTCACTAGGAGAAAATGGGAGAAGCTAGAAGATACTGGAGTAATATTTATA
+GAGTACTGAAGAAAAAACAAAAAACAGAAATTCAATACACAGCTAAGATATCATTTACCT
+GTAAATGATACATGGTAAAATAAAGGGTAAAATAAGGATATTTGCAGATTCACAAGGAGA
+AGTTAGCCTTCACATACTCAGCTGAGGAAAATCCTAGAGACAAAACTCTAACAAAAACAA
+ACTAGCCGGGTGCAGTGGTGCAGGCCTGTAGTTCCAGCTACTCAAGAGGCTGAAGTAAGA
+GGATCACTGGAGCCCAGGGGTTCTGGGCTGTACTGTGCTATGCTGATCAGGTGTCCACAC
+TTAGCTCGCCATCAATATGATGACCTCCCAGGTGGGAAATGGAGCAGGTGAAAACTCCCA
+TGCTGATCAGTAGTGGGATCATGTCTGTGAATAATCACTGCACTCCAACTGGGAAACATA
+GCAAGACCCCATCTCTAAAAAACAAACAAACAAACAAACAAACAAACAAAAATGTTTCAG
+ACAAATGTCAAGATAGAGTAAAAGAAGAAAGTACTGAAGGCCCTTCAACATAAATTGGAT
+CAGATAATAAAAATAATAGCAAAGTTCTTTTCATGCTGTATCCTTAATTCTTCACCATAA
+TCTTAGGAAGTGAATGTATTAATTATCTTTTGCTATATAACAAATTACTCCCAAAACTTG
+GCGGCTTAAAACAACAAATATTATTTCACAATTTCTGTGGGTCAAGAATTTGGAAGTAGT
+GACTCTGGCTCAGGGTCTCATTTAAGGTGGTAGTTCAGGATGCCAGTCAGGGCTGCAGGC
+ACTGAGGCTGCTTCCTCAATGGCCCACTCACATGGCTGTTGGCTGGAGGCCTCTCTTTCT
+CACCACACGGGCCTCTTCATAGGACTGCCCGAGTGTCCTTACAGCGTGGCAGCTGGCTTC
+CCCCAGAGTGAACATTCTGAGAGAGAGAGAAAGAGAGAGTTAGAAGGCATGCTGTCACTT
+CCACTGTATCCCATTCACCAGAAGTGAGAGACTAAATTCACCAAACAGAAAAGTGAAGGA
+GAATGAGGCTCCAGTTTTTTGGGTGAGAGTTGAAGAATGTATGGATATTTTGAACAACCA
+CAATATAATTCTTCTTTTCACAGAAGCACAAAAAAATTTATTTAACTTGTCCAAGTTTAC
+ACAGTTAGCAAGCAACACCTTTGAGAAAAAAATCCATGTAGTCTGATACAAGCACCCAAA
+CTCATAACCACAATGTGAATCTAACTGCTTTTCAATTAAAAAAGAAAGAAAGATTCCCTT
+CAAATCTGGCATATGCATTCACATGGAGCATTCATACTGCCAGTGACAGTACCATAGTTA
+TATGGAATTAGAAGTTCTAACTTATCTTGGCCAAACTAAAGACTTAGGGCTGGGTAGAAG
+GTTGGAGGGATGTAAGGTCATTCTCAAGATCTCATCTAGGAGAAGAAAACAAAATGGGGA
+AGTAGAAGACAAAATGCTTTTTTAGGTTGGGAAAGGACTGGGAGAATCAAGCATCTAGAA
+ATGGGCACAAAGAGTTACCTTATTTTATTTAAAAGAAAATAAATGTTTGACTATTAATGC
+CTGAGAACGGAAGGTGATTATTAATGAGATGAAAAAGTTAATCAGATTCTCCAAGTTAGG
+AGGGACTTGAAGACCAAATTGATAAAAATAAAAAAAAAGATGTCATAGTAGAATAATCTA
+GATAATAAGCAATCAATGAGACTGAAAAAATAAAATCAAGTATATCATTTGTTACACTAA
+ATATTAATATACCAGATTCTCTCATTAAAAAAACAGAGAAAGTCAAATTGGATTAAATAA
+GAACAAAAAGTTAGCTATATAGTATTTATCAGAAACATTCTTATAAACAAATTGATAATG
+AAAGATTAAAAATAAGAGATTTGAGGCAAGGCAAGCAAAAAGAAATAAATGTTAAACAAG
+GAGAAATTAAAGGCTACGGACATTACCTAAGGAAAAGGATGACATAGAGTTACAGTGGCA
+AAAGTTAGGAAGCAGATGACATAAATCTATATGCACAAACAGTATGGCCACAAAATACAT
+TAATTAAAAATTACTAGAAATATAAGATGACTTTGATTAAAATACACTGATTACAAGGGA
+TTTAACATATAAAAATTAGGCTGATGTGGTAAATTTAAATATAATCAAATATTTAGGAAA
+ATAGAACAACACAACAAAGTTGATTACATATATTCATTTTCCAGATAGTATACTTTATGC
+CTATGAAATAGTTCTTAAAATCAATTATATATGGCCGGGTGCAGTGGCCCACGCCTGTAA
+TCCCAGCACTTTGGGAGGCCAAGGCAGGTGGATCACGAGGTCAGGAGATCGAGACCATCC
+TGGCTAACACAGTGAAACCCCGTCTCTACTAAAAATACAAAAAAAAAAAAAATTAGCTGG
+GCGTGGTGGCTGGAACCTGTAGTCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATGGCGT
diff --git a/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.fasta.fai b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.fasta.fai
new file mode 100644
index 0000000..77ced1f
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/cram/human_g1k_v37.20.subset.fasta.fai
@@ -0,0 +1 @@
+20	9000	4	60	61
diff --git a/src/test/resources/htsjdk/samtools/cram/samtoolsSliceMD5WithAmbiguityCodesTest.cram b/src/test/resources/htsjdk/samtools/cram/samtoolsSliceMD5WithAmbiguityCodesTest.cram
new file mode 100644
index 0000000..e727318
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/cram/samtoolsSliceMD5WithAmbiguityCodesTest.cram differ
diff --git a/src/test/resources/htsjdk/samtools/cram/test.dict b/src/test/resources/htsjdk/samtools/cram/test.dict
deleted file mode 100644
index dfb98d6..0000000
--- a/src/test/resources/htsjdk/samtools/cram/test.dict
+++ /dev/null
@@ -1,2 +0,0 @@
- at HD	VN:1.4	SO:unsorted
- at SQ	SN:Sheila	LN:20	M5:7ddd8a4b4f2c1dec43476a738b1a9b72	UR:file:/Users/edwardk/Documents/htsjdk/testdata/htsjdk/samtools/cram/auxf.fa
diff --git a/src/test/resources/htsjdk/samtools/cram/test.fa b/src/test/resources/htsjdk/samtools/cram/test.fa
deleted file mode 100644
index 11d25dd..0000000
--- a/src/test/resources/htsjdk/samtools/cram/test.fa
+++ /dev/null
@@ -1,2 +0,0 @@
->Sheila
-GCTAGCTCAGAAAAAAAAAA
diff --git a/src/test/resources/htsjdk/samtools/cram/test.fa.fai b/src/test/resources/htsjdk/samtools/cram/test.fa.fai
deleted file mode 100644
index f3cdedb..0000000
--- a/src/test/resources/htsjdk/samtools/cram/test.fa.fai
+++ /dev/null
@@ -1 +0,0 @@
-Sheila	20	8	20	21
diff --git a/src/test/resources/htsjdk/samtools/cram/test2.dict b/src/test/resources/htsjdk/samtools/cram/test2.dict
deleted file mode 100644
index dfb98d6..0000000
--- a/src/test/resources/htsjdk/samtools/cram/test2.dict
+++ /dev/null
@@ -1,2 +0,0 @@
- at HD	VN:1.4	SO:unsorted
- at SQ	SN:Sheila	LN:20	M5:7ddd8a4b4f2c1dec43476a738b1a9b72	UR:file:/Users/edwardk/Documents/htsjdk/testdata/htsjdk/samtools/cram/auxf.fa
diff --git a/src/test/resources/htsjdk/samtools/cram/test2.fa b/src/test/resources/htsjdk/samtools/cram/test2.fa
deleted file mode 100644
index 11d25dd..0000000
--- a/src/test/resources/htsjdk/samtools/cram/test2.fa
+++ /dev/null
@@ -1,2 +0,0 @@
->Sheila
-GCTAGCTCAGAAAAAAAAAA
diff --git a/src/test/resources/htsjdk/samtools/cram/test2.fa.fai b/src/test/resources/htsjdk/samtools/cram/test2.fa.fai
deleted file mode 100644
index f3cdedb..0000000
--- a/src/test/resources/htsjdk/samtools/cram/test2.fa.fai
+++ /dev/null
@@ -1 +0,0 @@
-Sheila	20	8	20	21
diff --git a/src/test/resources/htsjdk/samtools/filter/names.txt b/src/test/resources/htsjdk/samtools/filter/names.txt
new file mode 100644
index 0000000..1994356
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/filter/names.txt
@@ -0,0 +1,3 @@
+Read1_filter shouldn't affect    anything
+Read2_filter
+Hello_filter
\ No newline at end of file
diff --git a/src/test/resources/htsjdk/samtools/noheader.sam b/src/test/resources/htsjdk/samtools/noheader.sam
new file mode 100755
index 0000000..7ac0030
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/noheader.sam
@@ -0,0 +1,10 @@
+A	73	chr2	1	255	10M	*	0	0	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+A	133	*	0	0	*	chr2	1	0	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+B	99	chr1	1	255	10M	=	26	35	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+B	147	chr1	26	255	10M	=	1	-35	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+C	99	chr2	1	255	10M	=	26	35	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+C	147	chr2	26	255	10M	=	1	-35	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+D	99	chr3	1	255	10M	=	25	35	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+D	147	chr3	26	255	10M	=	1	-35	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+E	99	chr1	2	255	10M	=	15	30	CAACAGAAGC	)'.*.+2,))	RG:Z:0
+E	147	chr1	15	255	10M	=	2	-30	CAACAGAAGC	)'.*.+2,))	RG:Z:0
diff --git a/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz
new file mode 100644
index 0000000..aa8ef59
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz differ
diff --git a/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz.fai b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz.fai
new file mode 100644
index 0000000..04a438b
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz.fai
@@ -0,0 +1,2 @@
+chrM	16571	6	60	61
+chr20	1000000	16861	60	61
diff --git a/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz.gzi b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz.gzi
new file mode 100644
index 0000000..a536602
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/reference/Homo_sapiens_assembly18.trimmed.fasta.gz.gzi differ
diff --git a/src/test/resources/htsjdk/samtools/reference/crlf.fasta b/src/test/resources/htsjdk/samtools/reference/crlf.fasta
new file mode 100644
index 0000000..70c8785
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/reference/crlf.fasta
@@ -0,0 +1,4 @@
+>a test CR+LF
+ACTG
+>b test CR+LF
+ACTG
diff --git a/src/test/resources/htsjdk/samtools/reference/crlf.fasta.fai b/src/test/resources/htsjdk/samtools/reference/crlf.fasta.fai
new file mode 100644
index 0000000..923386e
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/reference/crlf.fasta.fai
@@ -0,0 +1,2 @@
+a	4	15	4	6
+b	4	36	4	5
diff --git a/src/test/resources/htsjdk/samtools/reference/header_with_white_space.fasta b/src/test/resources/htsjdk/samtools/reference/header_with_white_space.fasta
new file mode 100644
index 0000000..24cff02
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/reference/header_with_white_space.fasta
@@ -0,0 +1,4 @@
+>a test white space
+ACTG
+>b test whitespace
+ACTG
diff --git a/src/test/resources/htsjdk/samtools/reference/header_with_white_space.fasta.fai b/src/test/resources/htsjdk/samtools/reference/header_with_white_space.fasta.fai
new file mode 100644
index 0000000..bb15aa5
--- /dev/null
+++ b/src/test/resources/htsjdk/samtools/reference/header_with_white_space.fasta.fai
@@ -0,0 +1,2 @@
+a	4	20	4	5
+b	4	44	4	5
diff --git a/src/test/resources/htsjdk/samtools/util/random.bin b/src/test/resources/htsjdk/samtools/util/random.bin
new file mode 100644
index 0000000..f59b247
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/util/random.bin differ
diff --git a/src/test/resources/htsjdk/samtools/util/random.bin.gz b/src/test/resources/htsjdk/samtools/util/random.bin.gz
new file mode 100644
index 0000000..cd764e0
Binary files /dev/null and b/src/test/resources/htsjdk/samtools/util/random.bin.gz differ
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.mangled.vcf.gz b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.mangled.vcf.gz
new file mode 100644
index 0000000..98f276a
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.mangled.vcf.gz differ
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.mangled.vcf.gz.tbi b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.mangled.vcf.gz.tbi
new file mode 100644
index 0000000..333a50f
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.mangled.vcf.gz.tbi differ
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf
new file mode 100644
index 0000000..63667ca
--- /dev/null
+++ b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf
@@ -0,0 +1,37 @@
+##fileformat=VCFv4.2
+##INFO=<ID=AC,Number=A,Type=Integer,Description="Allele count in genotypes, for each ALT allele, in the same order as listed">
+##INFO=<ID=AF,Number=A,Type=Float,Description="Allele Frequency, for each ALT allele, in the same order as listed">
+##INFO=<ID=AN,Number=1,Type=Integer,Description="Total number of alleles in called genotypes">
+##INFO=<ID=DP,Number=1,Type=Integer,Description="Approximate read depth; some reads may have been filtered">
+##contig=<ID=1,length=16000>
+##contig=<ID=2,length=16000>
+##contig=<ID=3,length=16000>
+##contig=<ID=4,length=16000>
+##source=SelectVariants
+#CHROM	POS	ID	REF	ALT	QUAL	FILTER	INFO
+1	100	a	G	A	232.46	PASS	.
+1	199	b	GG	G	232.46	PASS	.
+1	200	c	G	A	232.46	PASS	.
+1	203	d	GGGG	G	232.46	PASS	.
+1	280	e	G	A	232.46	PASS	.
+1	284	f	GGG	G	232.46	PASS	.
+1	285	g	G	A	232.46	PASS	.
+1	286	h	G	A	232.46	PASS	.
+1	999	i	G	A	232.46	PASS	.
+1	1000	j	G	A	232.46	PASS	.
+1	1000	k	GGGG	G	232.46	PASS	.
+1	1076	l	G	A	232.46	PASS	.
+1	1150	m	G	A	232.46	PASS	.
+1	1176	n	G	A	232.46	PASS	.
+2	200	o	G	A	232.46	PASS	.
+2	525	p	G	A	232.46	PASS	.
+2	548	q	GGG	G	232.46	PASS	.
+2	640	r	G	A	232.46	PASS	.
+2	700	s	G	A	232.46	PASS	.
+3	1	t	G	A	232.46	PASS	.
+3	300	u	G	A	232.46	PASS	.
+3	300	v	GGGG	G	232.46	PASS	.
+3	400	w	G	A	232.46	PASS	.
+4	600	x	G	A	232.46	PASS	.
+4	775	y	G	A	232.46	PASS	.
+4	776	z	GGGG	G	232.46	PASS	.
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.gz b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.gz
new file mode 100644
index 0000000..03ad18b
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.gz differ
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.gz.tbi b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.gz.tbi
new file mode 100644
index 0000000..3441492
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.gz.tbi differ
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.idx b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.idx
new file mode 100644
index 0000000..6d5e546
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/baseVariants.vcf.idx differ
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/corruptedBaseVariants.vcf.idx b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/corruptedBaseVariants.vcf.idx
new file mode 100644
index 0000000..0e8ae0a
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/corruptedBaseVariants.vcf.idx differ
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/mangledBaseVariants.vcf b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/mangledBaseVariants.vcf
new file mode 100644
index 0000000..8a6df0d
--- /dev/null
+++ b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/mangledBaseVariants.vcf
@@ -0,0 +1,37 @@
+!##fileformat=VCFv4.2
+##INFO=<ID=AC,Number=A,Type=Integer,Description="Allele count in genotypes, for each ALT allele, in the same order as listed">
+##INFO=<ID=AF,Number=A,Type=Float,Description="Allele Frequency, for each ALT allele, in the same order as listed">
+##INFO=<ID=AN,Number=1,Type=Integer,Description="Total number of alleles in called genotypes">
+##INFO=<ID=DP,Number=1,Type=Integer,Description="Approximate read depth; some reads may have been filtered">
+##contig=<ID=1,length=16000>
+##contig=<ID=2,length=16000>
+##contig=<ID=3,length=16000>
+##contig=<ID=4,length=16000>
+##source=SelectVariants
+#CHROM	POS	ID	REF	ALT	QUAL	FILTER	INFO
+1	100	a	G	A	232.46	PASS	.
+1	199	b	GG	G	232.46	PASS	.
+1	200	c	G	A	232.46	PASS	.
+1	203	d	GGGG	G	232.46	PASS	.
+1	280	e	G	A	232.46	PASS	.
+1	284	f	GGG	G	232.46	PASS	.
+1	285	g	G	A	232.46	PASS	.
+1	286	h	G	A	232.46	PASS	.
+1	999	i	G	A	232.46	PASS	.
+1	1000	j	G	A	232.46	PASS	.
+1	1000	k	GGGG	G	232.46	PASS	.
+1	1076	l	G	A	232.46	PASS	.
+1	1150	m	G	A	232.46	PASS	.
+1	1176	n	G	A	232.46	PASS	.
+2	200	o	G	A	232.46	PASS	.
+2	525	p	G	A	232.46	PASS	.
+2	548	q	GGG	G	232.46	PASS	.
+2	640	r	G	A	232.46	PASS	.
+2	700	s	G	A	232.46	PASS	.
+3	1	t	G	A	232.46	PASS	.
+3	300	u	G	A	232.46	PASS	.
+3	300	v	GGGG	G	232.46	PASS	.
+3	400	w	G	A	232.46	PASS	.
+4	600	x	G	A	232.46	PASS	.
+4	775	y	G	A	232.46	PASS	.
+4	776	z	GGGG	G	232.46	PASS	.
diff --git a/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/mangledBaseVariants.vcf.idx b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/mangledBaseVariants.vcf.idx
new file mode 100644
index 0000000..4a20ddc
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/AbstractFeatureReaderTest/mangledBaseVariants.vcf.idx differ
diff --git a/src/test/resources/htsjdk/tribble/bed/2featuresNoHeader.bed.gz b/src/test/resources/htsjdk/tribble/bed/2featuresNoHeader.bed.gz
new file mode 100644
index 0000000..108ed5d
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/bed/2featuresNoHeader.bed.gz differ
diff --git a/src/test/resources/htsjdk/tribble/bed/2featuresNoHeader.bed.gz.tbi b/src/test/resources/htsjdk/tribble/bed/2featuresNoHeader.bed.gz.tbi
new file mode 100644
index 0000000..0ef8c62
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/bed/2featuresNoHeader.bed.gz.tbi differ
diff --git a/src/test/resources/htsjdk/tribble/bed/2featuresWithHeader.bed.gz b/src/test/resources/htsjdk/tribble/bed/2featuresWithHeader.bed.gz
new file mode 100644
index 0000000..6860383
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/bed/2featuresWithHeader.bed.gz differ
diff --git a/src/test/resources/htsjdk/tribble/bed/2featuresWithHeader.bed.gz.tbi b/src/test/resources/htsjdk/tribble/bed/2featuresWithHeader.bed.gz.tbi
new file mode 100644
index 0000000..d265958
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/bed/2featuresWithHeader.bed.gz.tbi differ
diff --git a/src/test/resources/htsjdk/tribble/bed/3featuresNoHeader.bed.gz b/src/test/resources/htsjdk/tribble/bed/3featuresNoHeader.bed.gz
new file mode 100644
index 0000000..ae1d32c
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/bed/3featuresNoHeader.bed.gz differ
diff --git a/src/test/resources/htsjdk/tribble/bed/3featuresNoHeader.bed.gz.tbi b/src/test/resources/htsjdk/tribble/bed/3featuresNoHeader.bed.gz.tbi
new file mode 100644
index 0000000..6e46ce3
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/bed/3featuresNoHeader.bed.gz.tbi differ
diff --git a/src/test/resources/htsjdk/tribble/bed/Unigene.sample.bed.gz b/src/test/resources/htsjdk/tribble/bed/Unigene.sample.bed.gz
new file mode 100644
index 0000000..64292cd
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/bed/Unigene.sample.bed.gz differ
diff --git a/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf b/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf
new file mode 100644
index 0000000..7147eef
--- /dev/null
+++ b/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf
@@ -0,0 +1,3412 @@
+##fileformat=VCFv4.2
+##FILTER=<ID=alt_allele_in_normal,Description="Evidence seen in the normal sample">
+##FILTER=<ID=clustered_events,Description="Clustered events observed in the tumor">
+##FILTER=<ID=clustered_read_position,Description="Evidence for somatic variant clusters near the ends of reads">
+##FILTER=<ID=germline_risk,Description="Evidence indicates this site is germline, not somatic">
+##FILTER=<ID=homologous_mapping_event,Description="More than three events were observed in the tumor">
+##FILTER=<ID=multi_event_alt_allele_in_normal,Description="Multiple events observed in tumor and normal">
+##FILTER=<ID=panel_of_normals,Description="Seen in at least 2 samples in the panel of normals">
+##FILTER=<ID=str_contraction,Description="Site filtered due to contraction of short tandem repeat region">
+##FILTER=<ID=strand_artifact,Description="Evidence for alt allele comes from one read direction only">
+##FILTER=<ID=t_lod_fstar,Description="Tumor does not meet likelihood threshold">
+##FILTER=<ID=triallelic_site,Description="Site filtered because more than two alt alleles pass tumor LOD">
+##FORMAT=<ID=AD,Number=R,Type=Integer,Description="Allelic depths for the ref and alt alleles in the order listed">
+##FORMAT=<ID=AF,Number=1,Type=Float,Description="Allele fraction of the event in the tumor">
+##FORMAT=<ID=ALT_F1R2,Number=1,Type=Integer,Description="Count of reads in F1R2 pair orientation supporting the alternate allele">
+##FORMAT=<ID=ALT_F2R1,Number=1,Type=Integer,Description="Count of reads in F2R1 pair orientation supporting the alternate allele">
+##FORMAT=<ID=DP,Number=1,Type=Integer,Description="Approximate read depth (reads with MQ=255 or with bad mates are filtered)">
+##FORMAT=<ID=FOXOG,Number=1,Type=Float,Description="Fraction of alt reads indicating OxoG error">
+##FORMAT=<ID=GQ,Number=1,Type=Integer,Description="Genotype Quality">
+##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
+##FORMAT=<ID=PGT,Number=1,Type=String,Description="Physical phasing haplotype information, describing how the alternate alleles are phased in relation to one another">
+##FORMAT=<ID=PID,Number=1,Type=String,Description="Physical phasing ID information, where each unique ID within a given sample (but not across samples) connects records within a phasing group">
+##FORMAT=<ID=PL,Number=G,Type=Integer,Description="Normalized, Phred-scaled likelihoods for genotypes as defined in the VCF specification">
+##FORMAT=<ID=QSS,Number=A,Type=Integer,Description="Sum of base quality scores for each allele">
+##FORMAT=<ID=REF_F1R2,Number=1,Type=Integer,Description="Count of reads in F1R2 pair orientation supporting the reference allele">
+##FORMAT=<ID=REF_F2R1,Number=1,Type=Integer,Description="Count of reads in F2R1 pair orientation supporting the reference allele">
+##GATKCommandLine.MuTect2=<ID=MuTect2,Version=3.7-0-gcfedb67,Date="Fri Feb 10 11:17:13 EST 2017",Epoch=1486743433985,CommandLineOptions="analysis_type=MuTect2 input_file=[hcc1143_T_subset50k.bam, hcc1143_N_subset50k.bam] showFullBamList=false read_buffer_size=null read_filter=[] disable_read_filter=[] intervals=[chr6:33,413,000-118,315,000] excludeIntervals=null interval_set_rule=UNION interval_merging=ALL interval_padding=0 reference_sequence=/Users/shlee/Documents/ref/hg38/Homo_sapiens [...]
+##INFO=<ID=DB,Number=0,Type=Flag,Description="dbSNP Membership">
+##INFO=<ID=ECNT,Number=1,Type=String,Description="Number of events in this haplotype">
+##INFO=<ID=HCNT,Number=1,Type=String,Description="Number of haplotypes that support this variant">
+##INFO=<ID=MAX_ED,Number=1,Type=Integer,Description="Maximum distance between events in this active region">
+##INFO=<ID=MIN_ED,Number=1,Type=Integer,Description="Minimum distance between events in this active region">
+##INFO=<ID=NLOD,Number=1,Type=String,Description="Normal LOD score">
+##INFO=<ID=PON,Number=1,Type=String,Description="Count from Panel of Normals">
+##INFO=<ID=RPA,Number=.,Type=Integer,Description="Number of times tandem repeat unit is repeated, for each allele (including reference)">
+##INFO=<ID=RU,Number=1,Type=String,Description="Tandem repeat unit (bases)">
+##INFO=<ID=STR,Number=0,Type=Flag,Description="Variant is a short tandem repeat">
+##INFO=<ID=TLOD,Number=1,Type=String,Description="Tumor LOD score">
+##SAMPLE=<ID=NORMAL,SampleName=HCC1143_normal,File=/Users/shlee/Documents/workshop_materials/mutect2_tutorial/snippet/hcc1143_N_subset50k.bam>
+##SAMPLE=<ID=TUMOR,SampleName=HCC1143_tumor,File=/Users/shlee/Documents/workshop_materials/mutect2_tutorial/snippet/hcc1143_T_subset50k.bam>
+##contig=<ID=chr1,length=248956422>
+##contig=<ID=chr2,length=242193529>
+##contig=<ID=chr3,length=198295559>
+##contig=<ID=chr4,length=190214555>
+##contig=<ID=chr5,length=181538259>
+##contig=<ID=chr6,length=170805979>
+##contig=<ID=chr7,length=159345973>
+##contig=<ID=chr8,length=145138636>
+##contig=<ID=chr9,length=138394717>
+##contig=<ID=chr10,length=133797422>
+##contig=<ID=chr11,length=135086622>
+##contig=<ID=chr12,length=133275309>
+##contig=<ID=chr13,length=114364328>
+##contig=<ID=chr14,length=107043718>
+##contig=<ID=chr15,length=101991189>
+##contig=<ID=chr16,length=90338345>
+##contig=<ID=chr17,length=83257441>
+##contig=<ID=chr18,length=80373285>
+##contig=<ID=chr19,length=58617616>
+##contig=<ID=chr20,length=64444167>
+##contig=<ID=chr21,length=46709983>
+##contig=<ID=chr22,length=50818468>
+##contig=<ID=chrX,length=156040895>
+##contig=<ID=chrY,length=57227415>
+##contig=<ID=chrM,length=16569>
+##contig=<ID=chr1_KI270706v1_random,length=175055>
+##contig=<ID=chr1_KI270707v1_random,length=32032>
+##contig=<ID=chr1_KI270708v1_random,length=127682>
+##contig=<ID=chr1_KI270709v1_random,length=66860>
+##contig=<ID=chr1_KI270710v1_random,length=40176>
+##contig=<ID=chr1_KI270711v1_random,length=42210>
+##contig=<ID=chr1_KI270712v1_random,length=176043>
+##contig=<ID=chr1_KI270713v1_random,length=40745>
+##contig=<ID=chr1_KI270714v1_random,length=41717>
+##contig=<ID=chr2_KI270715v1_random,length=161471>
+##contig=<ID=chr2_KI270716v1_random,length=153799>
+##contig=<ID=chr3_GL000221v1_random,length=155397>
+##contig=<ID=chr4_GL000008v2_random,length=209709>
+##contig=<ID=chr5_GL000208v1_random,length=92689>
+##contig=<ID=chr9_KI270717v1_random,length=40062>
+##contig=<ID=chr9_KI270718v1_random,length=38054>
+##contig=<ID=chr9_KI270719v1_random,length=176845>
+##contig=<ID=chr9_KI270720v1_random,length=39050>
+##contig=<ID=chr11_KI270721v1_random,length=100316>
+##contig=<ID=chr14_GL000009v2_random,length=201709>
+##contig=<ID=chr14_GL000225v1_random,length=211173>
+##contig=<ID=chr14_KI270722v1_random,length=194050>
+##contig=<ID=chr14_GL000194v1_random,length=191469>
+##contig=<ID=chr14_KI270723v1_random,length=38115>
+##contig=<ID=chr14_KI270724v1_random,length=39555>
+##contig=<ID=chr14_KI270725v1_random,length=172810>
+##contig=<ID=chr14_KI270726v1_random,length=43739>
+##contig=<ID=chr15_KI270727v1_random,length=448248>
+##contig=<ID=chr16_KI270728v1_random,length=1872759>
+##contig=<ID=chr17_GL000205v2_random,length=185591>
+##contig=<ID=chr17_KI270729v1_random,length=280839>
+##contig=<ID=chr17_KI270730v1_random,length=112551>
+##contig=<ID=chr22_KI270731v1_random,length=150754>
+##contig=<ID=chr22_KI270732v1_random,length=41543>
+##contig=<ID=chr22_KI270733v1_random,length=179772>
+##contig=<ID=chr22_KI270734v1_random,length=165050>
+##contig=<ID=chr22_KI270735v1_random,length=42811>
+##contig=<ID=chr22_KI270736v1_random,length=181920>
+##contig=<ID=chr22_KI270737v1_random,length=103838>
+##contig=<ID=chr22_KI270738v1_random,length=99375>
+##contig=<ID=chr22_KI270739v1_random,length=73985>
+##contig=<ID=chrY_KI270740v1_random,length=37240>
+##contig=<ID=chrUn_KI270302v1,length=2274>
+##contig=<ID=chrUn_KI270304v1,length=2165>
+##contig=<ID=chrUn_KI270303v1,length=1942>
+##contig=<ID=chrUn_KI270305v1,length=1472>
+##contig=<ID=chrUn_KI270322v1,length=21476>
+##contig=<ID=chrUn_KI270320v1,length=4416>
+##contig=<ID=chrUn_KI270310v1,length=1201>
+##contig=<ID=chrUn_KI270316v1,length=1444>
+##contig=<ID=chrUn_KI270315v1,length=2276>
+##contig=<ID=chrUn_KI270312v1,length=998>
+##contig=<ID=chrUn_KI270311v1,length=12399>
+##contig=<ID=chrUn_KI270317v1,length=37690>
+##contig=<ID=chrUn_KI270412v1,length=1179>
+##contig=<ID=chrUn_KI270411v1,length=2646>
+##contig=<ID=chrUn_KI270414v1,length=2489>
+##contig=<ID=chrUn_KI270419v1,length=1029>
+##contig=<ID=chrUn_KI270418v1,length=2145>
+##contig=<ID=chrUn_KI270420v1,length=2321>
+##contig=<ID=chrUn_KI270424v1,length=2140>
+##contig=<ID=chrUn_KI270417v1,length=2043>
+##contig=<ID=chrUn_KI270422v1,length=1445>
+##contig=<ID=chrUn_KI270423v1,length=981>
+##contig=<ID=chrUn_KI270425v1,length=1884>
+##contig=<ID=chrUn_KI270429v1,length=1361>
+##contig=<ID=chrUn_KI270442v1,length=392061>
+##contig=<ID=chrUn_KI270466v1,length=1233>
+##contig=<ID=chrUn_KI270465v1,length=1774>
+##contig=<ID=chrUn_KI270467v1,length=3920>
+##contig=<ID=chrUn_KI270435v1,length=92983>
+##contig=<ID=chrUn_KI270438v1,length=112505>
+##contig=<ID=chrUn_KI270468v1,length=4055>
+##contig=<ID=chrUn_KI270510v1,length=2415>
+##contig=<ID=chrUn_KI270509v1,length=2318>
+##contig=<ID=chrUn_KI270518v1,length=2186>
+##contig=<ID=chrUn_KI270508v1,length=1951>
+##contig=<ID=chrUn_KI270516v1,length=1300>
+##contig=<ID=chrUn_KI270512v1,length=22689>
+##contig=<ID=chrUn_KI270519v1,length=138126>
+##contig=<ID=chrUn_KI270522v1,length=5674>
+##contig=<ID=chrUn_KI270511v1,length=8127>
+##contig=<ID=chrUn_KI270515v1,length=6361>
+##contig=<ID=chrUn_KI270507v1,length=5353>
+##contig=<ID=chrUn_KI270517v1,length=3253>
+##contig=<ID=chrUn_KI270529v1,length=1899>
+##contig=<ID=chrUn_KI270528v1,length=2983>
+##contig=<ID=chrUn_KI270530v1,length=2168>
+##contig=<ID=chrUn_KI270539v1,length=993>
+##contig=<ID=chrUn_KI270538v1,length=91309>
+##contig=<ID=chrUn_KI270544v1,length=1202>
+##contig=<ID=chrUn_KI270548v1,length=1599>
+##contig=<ID=chrUn_KI270583v1,length=1400>
+##contig=<ID=chrUn_KI270587v1,length=2969>
+##contig=<ID=chrUn_KI270580v1,length=1553>
+##contig=<ID=chrUn_KI270581v1,length=7046>
+##contig=<ID=chrUn_KI270579v1,length=31033>
+##contig=<ID=chrUn_KI270589v1,length=44474>
+##contig=<ID=chrUn_KI270590v1,length=4685>
+##contig=<ID=chrUn_KI270584v1,length=4513>
+##contig=<ID=chrUn_KI270582v1,length=6504>
+##contig=<ID=chrUn_KI270588v1,length=6158>
+##contig=<ID=chrUn_KI270593v1,length=3041>
+##contig=<ID=chrUn_KI270591v1,length=5796>
+##contig=<ID=chrUn_KI270330v1,length=1652>
+##contig=<ID=chrUn_KI270329v1,length=1040>
+##contig=<ID=chrUn_KI270334v1,length=1368>
+##contig=<ID=chrUn_KI270333v1,length=2699>
+##contig=<ID=chrUn_KI270335v1,length=1048>
+##contig=<ID=chrUn_KI270338v1,length=1428>
+##contig=<ID=chrUn_KI270340v1,length=1428>
+##contig=<ID=chrUn_KI270336v1,length=1026>
+##contig=<ID=chrUn_KI270337v1,length=1121>
+##contig=<ID=chrUn_KI270363v1,length=1803>
+##contig=<ID=chrUn_KI270364v1,length=2855>
+##contig=<ID=chrUn_KI270362v1,length=3530>
+##contig=<ID=chrUn_KI270366v1,length=8320>
+##contig=<ID=chrUn_KI270378v1,length=1048>
+##contig=<ID=chrUn_KI270379v1,length=1045>
+##contig=<ID=chrUn_KI270389v1,length=1298>
+##contig=<ID=chrUn_KI270390v1,length=2387>
+##contig=<ID=chrUn_KI270387v1,length=1537>
+##contig=<ID=chrUn_KI270395v1,length=1143>
+##contig=<ID=chrUn_KI270396v1,length=1880>
+##contig=<ID=chrUn_KI270388v1,length=1216>
+##contig=<ID=chrUn_KI270394v1,length=970>
+##contig=<ID=chrUn_KI270386v1,length=1788>
+##contig=<ID=chrUn_KI270391v1,length=1484>
+##contig=<ID=chrUn_KI270383v1,length=1750>
+##contig=<ID=chrUn_KI270393v1,length=1308>
+##contig=<ID=chrUn_KI270384v1,length=1658>
+##contig=<ID=chrUn_KI270392v1,length=971>
+##contig=<ID=chrUn_KI270381v1,length=1930>
+##contig=<ID=chrUn_KI270385v1,length=990>
+##contig=<ID=chrUn_KI270382v1,length=4215>
+##contig=<ID=chrUn_KI270376v1,length=1136>
+##contig=<ID=chrUn_KI270374v1,length=2656>
+##contig=<ID=chrUn_KI270372v1,length=1650>
+##contig=<ID=chrUn_KI270373v1,length=1451>
+##contig=<ID=chrUn_KI270375v1,length=2378>
+##contig=<ID=chrUn_KI270371v1,length=2805>
+##contig=<ID=chrUn_KI270448v1,length=7992>
+##contig=<ID=chrUn_KI270521v1,length=7642>
+##contig=<ID=chrUn_GL000195v1,length=182896>
+##contig=<ID=chrUn_GL000219v1,length=179198>
+##contig=<ID=chrUn_GL000220v1,length=161802>
+##contig=<ID=chrUn_GL000224v1,length=179693>
+##contig=<ID=chrUn_KI270741v1,length=157432>
+##contig=<ID=chrUn_GL000226v1,length=15008>
+##contig=<ID=chrUn_GL000213v1,length=164239>
+##contig=<ID=chrUn_KI270743v1,length=210658>
+##contig=<ID=chrUn_KI270744v1,length=168472>
+##contig=<ID=chrUn_KI270745v1,length=41891>
+##contig=<ID=chrUn_KI270746v1,length=66486>
+##contig=<ID=chrUn_KI270747v1,length=198735>
+##contig=<ID=chrUn_KI270748v1,length=93321>
+##contig=<ID=chrUn_KI270749v1,length=158759>
+##contig=<ID=chrUn_KI270750v1,length=148850>
+##contig=<ID=chrUn_KI270751v1,length=150742>
+##contig=<ID=chrUn_KI270752v1,length=27745>
+##contig=<ID=chrUn_KI270753v1,length=62944>
+##contig=<ID=chrUn_KI270754v1,length=40191>
+##contig=<ID=chrUn_KI270755v1,length=36723>
+##contig=<ID=chrUn_KI270756v1,length=79590>
+##contig=<ID=chrUn_KI270757v1,length=71251>
+##contig=<ID=chrUn_GL000214v1,length=137718>
+##contig=<ID=chrUn_KI270742v1,length=186739>
+##contig=<ID=chrUn_GL000216v2,length=176608>
+##contig=<ID=chrUn_GL000218v1,length=161147>
+##contig=<ID=chr1_KI270762v1_alt,length=354444>
+##contig=<ID=chr1_KI270766v1_alt,length=256271>
+##contig=<ID=chr1_KI270760v1_alt,length=109528>
+##contig=<ID=chr1_KI270765v1_alt,length=185285>
+##contig=<ID=chr1_GL383518v1_alt,length=182439>
+##contig=<ID=chr1_GL383519v1_alt,length=110268>
+##contig=<ID=chr1_GL383520v2_alt,length=366580>
+##contig=<ID=chr1_KI270764v1_alt,length=50258>
+##contig=<ID=chr1_KI270763v1_alt,length=911658>
+##contig=<ID=chr1_KI270759v1_alt,length=425601>
+##contig=<ID=chr1_KI270761v1_alt,length=165834>
+##contig=<ID=chr2_KI270770v1_alt,length=136240>
+##contig=<ID=chr2_KI270773v1_alt,length=70887>
+##contig=<ID=chr2_KI270774v1_alt,length=223625>
+##contig=<ID=chr2_KI270769v1_alt,length=120616>
+##contig=<ID=chr2_GL383521v1_alt,length=143390>
+##contig=<ID=chr2_KI270772v1_alt,length=133041>
+##contig=<ID=chr2_KI270775v1_alt,length=138019>
+##contig=<ID=chr2_KI270771v1_alt,length=110395>
+##contig=<ID=chr2_KI270768v1_alt,length=110099>
+##contig=<ID=chr2_GL582966v2_alt,length=96131>
+##contig=<ID=chr2_GL383522v1_alt,length=123821>
+##contig=<ID=chr2_KI270776v1_alt,length=174166>
+##contig=<ID=chr2_KI270767v1_alt,length=161578>
+##contig=<ID=chr3_JH636055v2_alt,length=173151>
+##contig=<ID=chr3_KI270783v1_alt,length=109187>
+##contig=<ID=chr3_KI270780v1_alt,length=224108>
+##contig=<ID=chr3_GL383526v1_alt,length=180671>
+##contig=<ID=chr3_KI270777v1_alt,length=173649>
+##contig=<ID=chr3_KI270778v1_alt,length=248252>
+##contig=<ID=chr3_KI270781v1_alt,length=113034>
+##contig=<ID=chr3_KI270779v1_alt,length=205312>
+##contig=<ID=chr3_KI270782v1_alt,length=162429>
+##contig=<ID=chr3_KI270784v1_alt,length=184404>
+##contig=<ID=chr4_KI270790v1_alt,length=220246>
+##contig=<ID=chr4_GL383528v1_alt,length=376187>
+##contig=<ID=chr4_KI270787v1_alt,length=111943>
+##contig=<ID=chr4_GL000257v2_alt,length=586476>
+##contig=<ID=chr4_KI270788v1_alt,length=158965>
+##contig=<ID=chr4_GL383527v1_alt,length=164536>
+##contig=<ID=chr4_KI270785v1_alt,length=119912>
+##contig=<ID=chr4_KI270789v1_alt,length=205944>
+##contig=<ID=chr4_KI270786v1_alt,length=244096>
+##contig=<ID=chr5_KI270793v1_alt,length=126136>
+##contig=<ID=chr5_KI270792v1_alt,length=179043>
+##contig=<ID=chr5_KI270791v1_alt,length=195710>
+##contig=<ID=chr5_GL383532v1_alt,length=82728>
+##contig=<ID=chr5_GL949742v1_alt,length=226852>
+##contig=<ID=chr5_KI270794v1_alt,length=164558>
+##contig=<ID=chr5_GL339449v2_alt,length=1612928>
+##contig=<ID=chr5_GL383530v1_alt,length=101241>
+##contig=<ID=chr5_KI270796v1_alt,length=172708>
+##contig=<ID=chr5_GL383531v1_alt,length=173459>
+##contig=<ID=chr5_KI270795v1_alt,length=131892>
+##contig=<ID=chr6_GL000250v2_alt,length=4672374>
+##contig=<ID=chr6_KI270800v1_alt,length=175808>
+##contig=<ID=chr6_KI270799v1_alt,length=152148>
+##contig=<ID=chr6_GL383533v1_alt,length=124736>
+##contig=<ID=chr6_KI270801v1_alt,length=870480>
+##contig=<ID=chr6_KI270802v1_alt,length=75005>
+##contig=<ID=chr6_KB021644v2_alt,length=185823>
+##contig=<ID=chr6_KI270797v1_alt,length=197536>
+##contig=<ID=chr6_KI270798v1_alt,length=271782>
+##contig=<ID=chr7_KI270804v1_alt,length=157952>
+##contig=<ID=chr7_KI270809v1_alt,length=209586>
+##contig=<ID=chr7_KI270806v1_alt,length=158166>
+##contig=<ID=chr7_GL383534v2_alt,length=119183>
+##contig=<ID=chr7_KI270803v1_alt,length=1111570>
+##contig=<ID=chr7_KI270808v1_alt,length=271455>
+##contig=<ID=chr7_KI270807v1_alt,length=126434>
+##contig=<ID=chr7_KI270805v1_alt,length=209988>
+##contig=<ID=chr8_KI270818v1_alt,length=145606>
+##contig=<ID=chr8_KI270812v1_alt,length=282736>
+##contig=<ID=chr8_KI270811v1_alt,length=292436>
+##contig=<ID=chr8_KI270821v1_alt,length=985506>
+##contig=<ID=chr8_KI270813v1_alt,length=300230>
+##contig=<ID=chr8_KI270822v1_alt,length=624492>
+##contig=<ID=chr8_KI270814v1_alt,length=141812>
+##contig=<ID=chr8_KI270810v1_alt,length=374415>
+##contig=<ID=chr8_KI270819v1_alt,length=133535>
+##contig=<ID=chr8_KI270820v1_alt,length=36640>
+##contig=<ID=chr8_KI270817v1_alt,length=158983>
+##contig=<ID=chr8_KI270816v1_alt,length=305841>
+##contig=<ID=chr8_KI270815v1_alt,length=132244>
+##contig=<ID=chr9_GL383539v1_alt,length=162988>
+##contig=<ID=chr9_GL383540v1_alt,length=71551>
+##contig=<ID=chr9_GL383541v1_alt,length=171286>
+##contig=<ID=chr9_GL383542v1_alt,length=60032>
+##contig=<ID=chr9_KI270823v1_alt,length=439082>
+##contig=<ID=chr10_GL383545v1_alt,length=179254>
+##contig=<ID=chr10_KI270824v1_alt,length=181496>
+##contig=<ID=chr10_GL383546v1_alt,length=309802>
+##contig=<ID=chr10_KI270825v1_alt,length=188315>
+##contig=<ID=chr11_KI270832v1_alt,length=210133>
+##contig=<ID=chr11_KI270830v1_alt,length=177092>
+##contig=<ID=chr11_KI270831v1_alt,length=296895>
+##contig=<ID=chr11_KI270829v1_alt,length=204059>
+##contig=<ID=chr11_GL383547v1_alt,length=154407>
+##contig=<ID=chr11_JH159136v1_alt,length=200998>
+##contig=<ID=chr11_JH159137v1_alt,length=191409>
+##contig=<ID=chr11_KI270827v1_alt,length=67707>
+##contig=<ID=chr11_KI270826v1_alt,length=186169>
+##contig=<ID=chr12_GL877875v1_alt,length=167313>
+##contig=<ID=chr12_GL877876v1_alt,length=408271>
+##contig=<ID=chr12_KI270837v1_alt,length=40090>
+##contig=<ID=chr12_GL383549v1_alt,length=120804>
+##contig=<ID=chr12_KI270835v1_alt,length=238139>
+##contig=<ID=chr12_GL383550v2_alt,length=169178>
+##contig=<ID=chr12_GL383552v1_alt,length=138655>
+##contig=<ID=chr12_GL383553v2_alt,length=152874>
+##contig=<ID=chr12_KI270834v1_alt,length=119498>
+##contig=<ID=chr12_GL383551v1_alt,length=184319>
+##contig=<ID=chr12_KI270833v1_alt,length=76061>
+##contig=<ID=chr12_KI270836v1_alt,length=56134>
+##contig=<ID=chr13_KI270840v1_alt,length=191684>
+##contig=<ID=chr13_KI270839v1_alt,length=180306>
+##contig=<ID=chr13_KI270843v1_alt,length=103832>
+##contig=<ID=chr13_KI270841v1_alt,length=169134>
+##contig=<ID=chr13_KI270838v1_alt,length=306913>
+##contig=<ID=chr13_KI270842v1_alt,length=37287>
+##contig=<ID=chr14_KI270844v1_alt,length=322166>
+##contig=<ID=chr14_KI270847v1_alt,length=1511111>
+##contig=<ID=chr14_KI270845v1_alt,length=180703>
+##contig=<ID=chr14_KI270846v1_alt,length=1351393>
+##contig=<ID=chr15_KI270852v1_alt,length=478999>
+##contig=<ID=chr15_KI270851v1_alt,length=263054>
+##contig=<ID=chr15_KI270848v1_alt,length=327382>
+##contig=<ID=chr15_GL383554v1_alt,length=296527>
+##contig=<ID=chr15_KI270849v1_alt,length=244917>
+##contig=<ID=chr15_GL383555v2_alt,length=388773>
+##contig=<ID=chr15_KI270850v1_alt,length=430880>
+##contig=<ID=chr16_KI270854v1_alt,length=134193>
+##contig=<ID=chr16_KI270856v1_alt,length=63982>
+##contig=<ID=chr16_KI270855v1_alt,length=232857>
+##contig=<ID=chr16_KI270853v1_alt,length=2659700>
+##contig=<ID=chr16_GL383556v1_alt,length=192462>
+##contig=<ID=chr16_GL383557v1_alt,length=89672>
+##contig=<ID=chr17_GL383563v3_alt,length=375691>
+##contig=<ID=chr17_KI270862v1_alt,length=391357>
+##contig=<ID=chr17_KI270861v1_alt,length=196688>
+##contig=<ID=chr17_KI270857v1_alt,length=2877074>
+##contig=<ID=chr17_JH159146v1_alt,length=278131>
+##contig=<ID=chr17_JH159147v1_alt,length=70345>
+##contig=<ID=chr17_GL383564v2_alt,length=133151>
+##contig=<ID=chr17_GL000258v2_alt,length=1821992>
+##contig=<ID=chr17_GL383565v1_alt,length=223995>
+##contig=<ID=chr17_KI270858v1_alt,length=235827>
+##contig=<ID=chr17_KI270859v1_alt,length=108763>
+##contig=<ID=chr17_GL383566v1_alt,length=90219>
+##contig=<ID=chr17_KI270860v1_alt,length=178921>
+##contig=<ID=chr18_KI270864v1_alt,length=111737>
+##contig=<ID=chr18_GL383567v1_alt,length=289831>
+##contig=<ID=chr18_GL383570v1_alt,length=164789>
+##contig=<ID=chr18_GL383571v1_alt,length=198278>
+##contig=<ID=chr18_GL383568v1_alt,length=104552>
+##contig=<ID=chr18_GL383569v1_alt,length=167950>
+##contig=<ID=chr18_GL383572v1_alt,length=159547>
+##contig=<ID=chr18_KI270863v1_alt,length=167999>
+##contig=<ID=chr19_KI270868v1_alt,length=61734>
+##contig=<ID=chr19_KI270865v1_alt,length=52969>
+##contig=<ID=chr19_GL383573v1_alt,length=385657>
+##contig=<ID=chr19_GL383575v2_alt,length=170222>
+##contig=<ID=chr19_GL383576v1_alt,length=188024>
+##contig=<ID=chr19_GL383574v1_alt,length=155864>
+##contig=<ID=chr19_KI270866v1_alt,length=43156>
+##contig=<ID=chr19_KI270867v1_alt,length=233762>
+##contig=<ID=chr19_GL949746v1_alt,length=987716>
+##contig=<ID=chr20_GL383577v2_alt,length=128386>
+##contig=<ID=chr20_KI270869v1_alt,length=118774>
+##contig=<ID=chr20_KI270871v1_alt,length=58661>
+##contig=<ID=chr20_KI270870v1_alt,length=183433>
+##contig=<ID=chr21_GL383578v2_alt,length=63917>
+##contig=<ID=chr21_KI270874v1_alt,length=166743>
+##contig=<ID=chr21_KI270873v1_alt,length=143900>
+##contig=<ID=chr21_GL383579v2_alt,length=201197>
+##contig=<ID=chr21_GL383580v2_alt,length=74653>
+##contig=<ID=chr21_GL383581v2_alt,length=116689>
+##contig=<ID=chr21_KI270872v1_alt,length=82692>
+##contig=<ID=chr22_KI270875v1_alt,length=259914>
+##contig=<ID=chr22_KI270878v1_alt,length=186262>
+##contig=<ID=chr22_KI270879v1_alt,length=304135>
+##contig=<ID=chr22_KI270876v1_alt,length=263666>
+##contig=<ID=chr22_KI270877v1_alt,length=101331>
+##contig=<ID=chr22_GL383583v2_alt,length=96924>
+##contig=<ID=chr22_GL383582v2_alt,length=162811>
+##contig=<ID=chrX_KI270880v1_alt,length=284869>
+##contig=<ID=chrX_KI270881v1_alt,length=144206>
+##contig=<ID=chr19_KI270882v1_alt,length=248807>
+##contig=<ID=chr19_KI270883v1_alt,length=170399>
+##contig=<ID=chr19_KI270884v1_alt,length=157053>
+##contig=<ID=chr19_KI270885v1_alt,length=171027>
+##contig=<ID=chr19_KI270886v1_alt,length=204239>
+##contig=<ID=chr19_KI270887v1_alt,length=209512>
+##contig=<ID=chr19_KI270888v1_alt,length=155532>
+##contig=<ID=chr19_KI270889v1_alt,length=170698>
+##contig=<ID=chr19_KI270890v1_alt,length=184499>
+##contig=<ID=chr19_KI270891v1_alt,length=170680>
+##contig=<ID=chr1_KI270892v1_alt,length=162212>
+##contig=<ID=chr2_KI270894v1_alt,length=214158>
+##contig=<ID=chr2_KI270893v1_alt,length=161218>
+##contig=<ID=chr3_KI270895v1_alt,length=162896>
+##contig=<ID=chr4_KI270896v1_alt,length=378547>
+##contig=<ID=chr5_KI270897v1_alt,length=1144418>
+##contig=<ID=chr5_KI270898v1_alt,length=130957>
+##contig=<ID=chr6_GL000251v2_alt,length=4795265>
+##contig=<ID=chr7_KI270899v1_alt,length=190869>
+##contig=<ID=chr8_KI270901v1_alt,length=136959>
+##contig=<ID=chr8_KI270900v1_alt,length=318687>
+##contig=<ID=chr11_KI270902v1_alt,length=106711>
+##contig=<ID=chr11_KI270903v1_alt,length=214625>
+##contig=<ID=chr12_KI270904v1_alt,length=572349>
+##contig=<ID=chr15_KI270906v1_alt,length=196384>
+##contig=<ID=chr15_KI270905v1_alt,length=5161414>
+##contig=<ID=chr17_KI270907v1_alt,length=137721>
+##contig=<ID=chr17_KI270910v1_alt,length=157099>
+##contig=<ID=chr17_KI270909v1_alt,length=325800>
+##contig=<ID=chr17_JH159148v1_alt,length=88070>
+##contig=<ID=chr17_KI270908v1_alt,length=1423190>
+##contig=<ID=chr18_KI270912v1_alt,length=174061>
+##contig=<ID=chr18_KI270911v1_alt,length=157710>
+##contig=<ID=chr19_GL949747v2_alt,length=729520>
+##contig=<ID=chr22_KB663609v1_alt,length=74013>
+##contig=<ID=chrX_KI270913v1_alt,length=274009>
+##contig=<ID=chr19_KI270914v1_alt,length=205194>
+##contig=<ID=chr19_KI270915v1_alt,length=170665>
+##contig=<ID=chr19_KI270916v1_alt,length=184516>
+##contig=<ID=chr19_KI270917v1_alt,length=190932>
+##contig=<ID=chr19_KI270918v1_alt,length=123111>
+##contig=<ID=chr19_KI270919v1_alt,length=170701>
+##contig=<ID=chr19_KI270920v1_alt,length=198005>
+##contig=<ID=chr19_KI270921v1_alt,length=282224>
+##contig=<ID=chr19_KI270922v1_alt,length=187935>
+##contig=<ID=chr19_KI270923v1_alt,length=189352>
+##contig=<ID=chr3_KI270924v1_alt,length=166540>
+##contig=<ID=chr4_KI270925v1_alt,length=555799>
+##contig=<ID=chr6_GL000252v2_alt,length=4604811>
+##contig=<ID=chr8_KI270926v1_alt,length=229282>
+##contig=<ID=chr11_KI270927v1_alt,length=218612>
+##contig=<ID=chr19_GL949748v2_alt,length=1064304>
+##contig=<ID=chr22_KI270928v1_alt,length=176103>
+##contig=<ID=chr19_KI270929v1_alt,length=186203>
+##contig=<ID=chr19_KI270930v1_alt,length=200773>
+##contig=<ID=chr19_KI270931v1_alt,length=170148>
+##contig=<ID=chr19_KI270932v1_alt,length=215732>
+##contig=<ID=chr19_KI270933v1_alt,length=170537>
+##contig=<ID=chr19_GL000209v2_alt,length=177381>
+##contig=<ID=chr3_KI270934v1_alt,length=163458>
+##contig=<ID=chr6_GL000253v2_alt,length=4677643>
+##contig=<ID=chr19_GL949749v2_alt,length=1091841>
+##contig=<ID=chr3_KI270935v1_alt,length=197351>
+##contig=<ID=chr6_GL000254v2_alt,length=4827813>
+##contig=<ID=chr19_GL949750v2_alt,length=1066390>
+##contig=<ID=chr3_KI270936v1_alt,length=164170>
+##contig=<ID=chr6_GL000255v2_alt,length=4606388>
+##contig=<ID=chr19_GL949751v2_alt,length=1002683>
+##contig=<ID=chr3_KI270937v1_alt,length=165607>
+##contig=<ID=chr6_GL000256v2_alt,length=4929269>
+##contig=<ID=chr19_GL949752v1_alt,length=987100>
+##contig=<ID=chr6_KI270758v1_alt,length=76752>
+##contig=<ID=chr19_GL949753v2_alt,length=796479>
+##contig=<ID=chr19_KI270938v1_alt,length=1066800>
+##contig=<ID=chrEBV,length=171823>
+##contig=<ID=chrUn_KN707606v1_decoy,length=2200>
+##contig=<ID=chrUn_KN707607v1_decoy,length=3033>
+##contig=<ID=chrUn_KN707608v1_decoy,length=3112>
+##contig=<ID=chrUn_KN707609v1_decoy,length=1642>
+##contig=<ID=chrUn_KN707610v1_decoy,length=1393>
+##contig=<ID=chrUn_KN707611v1_decoy,length=1103>
+##contig=<ID=chrUn_KN707612v1_decoy,length=1039>
+##contig=<ID=chrUn_KN707613v1_decoy,length=1619>
+##contig=<ID=chrUn_KN707614v1_decoy,length=3122>
+##contig=<ID=chrUn_KN707615v1_decoy,length=1934>
+##contig=<ID=chrUn_KN707616v1_decoy,length=3111>
+##contig=<ID=chrUn_KN707617v1_decoy,length=2545>
+##contig=<ID=chrUn_KN707618v1_decoy,length=2295>
+##contig=<ID=chrUn_KN707619v1_decoy,length=1551>
+##contig=<ID=chrUn_KN707620v1_decoy,length=2046>
+##contig=<ID=chrUn_KN707621v1_decoy,length=1222>
+##contig=<ID=chrUn_KN707622v1_decoy,length=1535>
+##contig=<ID=chrUn_KN707623v1_decoy,length=3784>
+##contig=<ID=chrUn_KN707624v1_decoy,length=1329>
+##contig=<ID=chrUn_KN707625v1_decoy,length=1238>
+##contig=<ID=chrUn_KN707626v1_decoy,length=5623>
+##contig=<ID=chrUn_KN707627v1_decoy,length=5821>
+##contig=<ID=chrUn_KN707628v1_decoy,length=2960>
+##contig=<ID=chrUn_KN707629v1_decoy,length=1848>
+##contig=<ID=chrUn_KN707630v1_decoy,length=2315>
+##contig=<ID=chrUn_KN707631v1_decoy,length=1945>
+##contig=<ID=chrUn_KN707632v1_decoy,length=1424>
+##contig=<ID=chrUn_KN707633v1_decoy,length=1274>
+##contig=<ID=chrUn_KN707634v1_decoy,length=1007>
+##contig=<ID=chrUn_KN707635v1_decoy,length=1414>
+##contig=<ID=chrUn_KN707636v1_decoy,length=1725>
+##contig=<ID=chrUn_KN707637v1_decoy,length=5354>
+##contig=<ID=chrUn_KN707638v1_decoy,length=2189>
+##contig=<ID=chrUn_KN707639v1_decoy,length=1294>
+##contig=<ID=chrUn_KN707640v1_decoy,length=1831>
+##contig=<ID=chrUn_KN707641v1_decoy,length=1647>
+##contig=<ID=chrUn_KN707642v1_decoy,length=2943>
+##contig=<ID=chrUn_KN707643v1_decoy,length=2857>
+##contig=<ID=chrUn_KN707644v1_decoy,length=1030>
+##contig=<ID=chrUn_KN707645v1_decoy,length=1070>
+##contig=<ID=chrUn_KN707646v1_decoy,length=1735>
+##contig=<ID=chrUn_KN707647v1_decoy,length=1982>
+##contig=<ID=chrUn_KN707648v1_decoy,length=1564>
+##contig=<ID=chrUn_KN707649v1_decoy,length=1775>
+##contig=<ID=chrUn_KN707650v1_decoy,length=1540>
+##contig=<ID=chrUn_KN707651v1_decoy,length=2013>
+##contig=<ID=chrUn_KN707652v1_decoy,length=1176>
+##contig=<ID=chrUn_KN707653v1_decoy,length=1890>
+##contig=<ID=chrUn_KN707654v1_decoy,length=3644>
+##contig=<ID=chrUn_KN707655v1_decoy,length=2785>
+##contig=<ID=chrUn_KN707656v1_decoy,length=1017>
+##contig=<ID=chrUn_KN707657v1_decoy,length=1068>
+##contig=<ID=chrUn_KN707658v1_decoy,length=1007>
+##contig=<ID=chrUn_KN707659v1_decoy,length=2605>
+##contig=<ID=chrUn_KN707660v1_decoy,length=8410>
+##contig=<ID=chrUn_KN707661v1_decoy,length=5534>
+##contig=<ID=chrUn_KN707662v1_decoy,length=2173>
+##contig=<ID=chrUn_KN707663v1_decoy,length=1065>
+##contig=<ID=chrUn_KN707664v1_decoy,length=8683>
+##contig=<ID=chrUn_KN707665v1_decoy,length=2670>
+##contig=<ID=chrUn_KN707666v1_decoy,length=2420>
+##contig=<ID=chrUn_KN707667v1_decoy,length=2189>
+##contig=<ID=chrUn_KN707668v1_decoy,length=2093>
+##contig=<ID=chrUn_KN707669v1_decoy,length=1184>
+##contig=<ID=chrUn_KN707670v1_decoy,length=1205>
+##contig=<ID=chrUn_KN707671v1_decoy,length=2786>
+##contig=<ID=chrUn_KN707672v1_decoy,length=2794>
+##contig=<ID=chrUn_KN707673v1_decoy,length=19544>
+##contig=<ID=chrUn_KN707674v1_decoy,length=2848>
+##contig=<ID=chrUn_KN707675v1_decoy,length=10556>
+##contig=<ID=chrUn_KN707676v1_decoy,length=9066>
+##contig=<ID=chrUn_KN707677v1_decoy,length=7267>
+##contig=<ID=chrUn_KN707678v1_decoy,length=2462>
+##contig=<ID=chrUn_KN707679v1_decoy,length=1774>
+##contig=<ID=chrUn_KN707680v1_decoy,length=1297>
+##contig=<ID=chrUn_KN707681v1_decoy,length=4379>
+##contig=<ID=chrUn_KN707682v1_decoy,length=4208>
+##contig=<ID=chrUn_KN707683v1_decoy,length=4068>
+##contig=<ID=chrUn_KN707684v1_decoy,length=2940>
+##contig=<ID=chrUn_KN707685v1_decoy,length=3938>
+##contig=<ID=chrUn_KN707686v1_decoy,length=2072>
+##contig=<ID=chrUn_KN707687v1_decoy,length=1136>
+##contig=<ID=chrUn_KN707688v1_decoy,length=4248>
+##contig=<ID=chrUn_KN707689v1_decoy,length=5823>
+##contig=<ID=chrUn_KN707690v1_decoy,length=3715>
+##contig=<ID=chrUn_KN707691v1_decoy,length=4885>
+##contig=<ID=chrUn_KN707692v1_decoy,length=4813>
+##contig=<ID=chrUn_KN707693v1_decoy,length=2899>
+##contig=<ID=chrUn_KN707694v1_decoy,length=1228>
+##contig=<ID=chrUn_KN707695v1_decoy,length=3119>
+##contig=<ID=chrUn_KN707696v1_decoy,length=3828>
+##contig=<ID=chrUn_KN707697v1_decoy,length=1186>
+##contig=<ID=chrUn_KN707698v1_decoy,length=1908>
+##contig=<ID=chrUn_KN707699v1_decoy,length=2795>
+##contig=<ID=chrUn_KN707700v1_decoy,length=3703>
+##contig=<ID=chrUn_KN707701v1_decoy,length=6722>
+##contig=<ID=chrUn_KN707702v1_decoy,length=6466>
+##contig=<ID=chrUn_KN707703v1_decoy,length=2235>
+##contig=<ID=chrUn_KN707704v1_decoy,length=2871>
+##contig=<ID=chrUn_KN707705v1_decoy,length=4632>
+##contig=<ID=chrUn_KN707706v1_decoy,length=4225>
+##contig=<ID=chrUn_KN707707v1_decoy,length=4339>
+##contig=<ID=chrUn_KN707708v1_decoy,length=2305>
+##contig=<ID=chrUn_KN707709v1_decoy,length=3273>
+##contig=<ID=chrUn_KN707710v1_decoy,length=5701>
+##contig=<ID=chrUn_KN707711v1_decoy,length=4154>
+##contig=<ID=chrUn_KN707712v1_decoy,length=1243>
+##contig=<ID=chrUn_KN707713v1_decoy,length=1308>
+##contig=<ID=chrUn_KN707714v1_decoy,length=2922>
+##contig=<ID=chrUn_KN707715v1_decoy,length=3044>
+##contig=<ID=chrUn_KN707716v1_decoy,length=2888>
+##contig=<ID=chrUn_KN707717v1_decoy,length=1742>
+##contig=<ID=chrUn_KN707718v1_decoy,length=4969>
+##contig=<ID=chrUn_KN707719v1_decoy,length=3270>
+##contig=<ID=chrUn_KN707720v1_decoy,length=6028>
+##contig=<ID=chrUn_KN707721v1_decoy,length=1105>
+##contig=<ID=chrUn_KN707722v1_decoy,length=2884>
+##contig=<ID=chrUn_KN707723v1_decoy,length=1124>
+##contig=<ID=chrUn_KN707724v1_decoy,length=1454>
+##contig=<ID=chrUn_KN707725v1_decoy,length=2565>
+##contig=<ID=chrUn_KN707726v1_decoy,length=2149>
+##contig=<ID=chrUn_KN707727v1_decoy,length=2630>
+##contig=<ID=chrUn_KN707728v1_decoy,length=14625>
+##contig=<ID=chrUn_KN707729v1_decoy,length=7431>
+##contig=<ID=chrUn_KN707730v1_decoy,length=5776>
+##contig=<ID=chrUn_KN707731v1_decoy,length=4820>
+##contig=<ID=chrUn_KN707732v1_decoy,length=1227>
+##contig=<ID=chrUn_KN707733v1_decoy,length=7503>
+##contig=<ID=chrUn_KN707734v1_decoy,length=9652>
+##contig=<ID=chrUn_KN707735v1_decoy,length=1091>
+##contig=<ID=chrUn_KN707736v1_decoy,length=2467>
+##contig=<ID=chrUn_KN707737v1_decoy,length=1270>
+##contig=<ID=chrUn_KN707738v1_decoy,length=4365>
+##contig=<ID=chrUn_KN707739v1_decoy,length=4284>
+##contig=<ID=chrUn_KN707740v1_decoy,length=10282>
+##contig=<ID=chrUn_KN707741v1_decoy,length=5601>
+##contig=<ID=chrUn_KN707742v1_decoy,length=4758>
+##contig=<ID=chrUn_KN707743v1_decoy,length=1624>
+##contig=<ID=chrUn_KN707744v1_decoy,length=4024>
+##contig=<ID=chrUn_KN707745v1_decoy,length=1276>
+##contig=<ID=chrUn_KN707746v1_decoy,length=5083>
+##contig=<ID=chrUn_KN707747v1_decoy,length=2075>
+##contig=<ID=chrUn_KN707748v1_decoy,length=3553>
+##contig=<ID=chrUn_KN707749v1_decoy,length=7010>
+##contig=<ID=chrUn_KN707750v1_decoy,length=4718>
+##contig=<ID=chrUn_KN707751v1_decoy,length=3546>
+##contig=<ID=chrUn_KN707752v1_decoy,length=2873>
+##contig=<ID=chrUn_KN707753v1_decoy,length=2144>
+##contig=<ID=chrUn_KN707754v1_decoy,length=2243>
+##contig=<ID=chrUn_KN707755v1_decoy,length=5343>
+##contig=<ID=chrUn_KN707756v1_decoy,length=4877>
+##contig=<ID=chrUn_KN707757v1_decoy,length=3034>
+##contig=<ID=chrUn_KN707758v1_decoy,length=2826>
+##contig=<ID=chrUn_KN707759v1_decoy,length=1221>
+##contig=<ID=chrUn_KN707760v1_decoy,length=1169>
+##contig=<ID=chrUn_KN707761v1_decoy,length=2319>
+##contig=<ID=chrUn_KN707762v1_decoy,length=3450>
+##contig=<ID=chrUn_KN707763v1_decoy,length=2674>
+##contig=<ID=chrUn_KN707764v1_decoy,length=3912>
+##contig=<ID=chrUn_KN707765v1_decoy,length=6020>
+##contig=<ID=chrUn_KN707766v1_decoy,length=2303>
+##contig=<ID=chrUn_KN707767v1_decoy,length=2552>
+##contig=<ID=chrUn_KN707768v1_decoy,length=3656>
+##contig=<ID=chrUn_KN707769v1_decoy,length=1591>
+##contig=<ID=chrUn_KN707770v1_decoy,length=1209>
+##contig=<ID=chrUn_KN707771v1_decoy,length=3176>
+##contig=<ID=chrUn_KN707772v1_decoy,length=8915>
+##contig=<ID=chrUn_KN707773v1_decoy,length=4902>
+##contig=<ID=chrUn_KN707774v1_decoy,length=3324>
+##contig=<ID=chrUn_KN707775v1_decoy,length=5997>
+##contig=<ID=chrUn_KN707776v1_decoy,length=2618>
+##contig=<ID=chrUn_KN707777v1_decoy,length=10311>
+##contig=<ID=chrUn_KN707778v1_decoy,length=2440>
+##contig=<ID=chrUn_KN707779v1_decoy,length=12444>
+##contig=<ID=chrUn_KN707780v1_decoy,length=5691>
+##contig=<ID=chrUn_KN707781v1_decoy,length=2717>
+##contig=<ID=chrUn_KN707782v1_decoy,length=5277>
+##contig=<ID=chrUn_KN707783v1_decoy,length=4373>
+##contig=<ID=chrUn_KN707784v1_decoy,length=3224>
+##contig=<ID=chrUn_KN707785v1_decoy,length=2631>
+##contig=<ID=chrUn_KN707786v1_decoy,length=5385>
+##contig=<ID=chrUn_KN707787v1_decoy,length=3678>
+##contig=<ID=chrUn_KN707788v1_decoy,length=1412>
+##contig=<ID=chrUn_KN707789v1_decoy,length=1443>
+##contig=<ID=chrUn_KN707790v1_decoy,length=1098>
+##contig=<ID=chrUn_KN707791v1_decoy,length=3240>
+##contig=<ID=chrUn_KN707792v1_decoy,length=1915>
+##contig=<ID=chrUn_KN707793v1_decoy,length=4667>
+##contig=<ID=chrUn_KN707794v1_decoy,length=7219>
+##contig=<ID=chrUn_KN707795v1_decoy,length=3277>
+##contig=<ID=chrUn_KN707796v1_decoy,length=3473>
+##contig=<ID=chrUn_KN707797v1_decoy,length=4243>
+##contig=<ID=chrUn_KN707798v1_decoy,length=17599>
+##contig=<ID=chrUn_KN707799v1_decoy,length=5095>
+##contig=<ID=chrUn_KN707800v1_decoy,length=2237>
+##contig=<ID=chrUn_KN707801v1_decoy,length=2901>
+##contig=<ID=chrUn_KN707802v1_decoy,length=2666>
+##contig=<ID=chrUn_KN707803v1_decoy,length=5336>
+##contig=<ID=chrUn_KN707804v1_decoy,length=4383>
+##contig=<ID=chrUn_KN707805v1_decoy,length=5446>
+##contig=<ID=chrUn_KN707806v1_decoy,length=6252>
+##contig=<ID=chrUn_KN707807v1_decoy,length=4616>
+##contig=<ID=chrUn_KN707808v1_decoy,length=3021>
+##contig=<ID=chrUn_KN707809v1_decoy,length=3667>
+##contig=<ID=chrUn_KN707810v1_decoy,length=4563>
+##contig=<ID=chrUn_KN707811v1_decoy,length=1120>
+##contig=<ID=chrUn_KN707812v1_decoy,length=3845>
+##contig=<ID=chrUn_KN707813v1_decoy,length=2272>
+##contig=<ID=chrUn_KN707814v1_decoy,length=4764>
+##contig=<ID=chrUn_KN707815v1_decoy,length=5410>
+##contig=<ID=chrUn_KN707816v1_decoy,length=7150>
+##contig=<ID=chrUn_KN707817v1_decoy,length=1762>
+##contig=<ID=chrUn_KN707818v1_decoy,length=1207>
+##contig=<ID=chrUn_KN707819v1_decoy,length=1331>
+##contig=<ID=chrUn_KN707820v1_decoy,length=8307>
+##contig=<ID=chrUn_KN707821v1_decoy,length=2276>
+##contig=<ID=chrUn_KN707822v1_decoy,length=2575>
+##contig=<ID=chrUn_KN707823v1_decoy,length=3970>
+##contig=<ID=chrUn_KN707824v1_decoy,length=1352>
+##contig=<ID=chrUn_KN707825v1_decoy,length=3040>
+##contig=<ID=chrUn_KN707826v1_decoy,length=2070>
+##contig=<ID=chrUn_KN707827v1_decoy,length=2913>
+##contig=<ID=chrUn_KN707828v1_decoy,length=2389>
+##contig=<ID=chrUn_KN707829v1_decoy,length=1835>
+##contig=<ID=chrUn_KN707830v1_decoy,length=4807>
+##contig=<ID=chrUn_KN707831v1_decoy,length=2201>
+##contig=<ID=chrUn_KN707832v1_decoy,length=1265>
+##contig=<ID=chrUn_KN707833v1_decoy,length=1961>
+##contig=<ID=chrUn_KN707834v1_decoy,length=1064>
+##contig=<ID=chrUn_KN707835v1_decoy,length=1932>
+##contig=<ID=chrUn_KN707836v1_decoy,length=3213>
+##contig=<ID=chrUn_KN707837v1_decoy,length=1178>
+##contig=<ID=chrUn_KN707838v1_decoy,length=2926>
+##contig=<ID=chrUn_KN707839v1_decoy,length=1038>
+##contig=<ID=chrUn_KN707840v1_decoy,length=3298>
+##contig=<ID=chrUn_KN707841v1_decoy,length=8992>
+##contig=<ID=chrUn_KN707842v1_decoy,length=6698>
+##contig=<ID=chrUn_KN707843v1_decoy,length=4880>
+##contig=<ID=chrUn_KN707844v1_decoy,length=1766>
+##contig=<ID=chrUn_KN707845v1_decoy,length=3532>
+##contig=<ID=chrUn_KN707846v1_decoy,length=2297>
+##contig=<ID=chrUn_KN707847v1_decoy,length=1234>
+##contig=<ID=chrUn_KN707848v1_decoy,length=1205>
+##contig=<ID=chrUn_KN707849v1_decoy,length=2790>
+##contig=<ID=chrUn_KN707850v1_decoy,length=2006>
+##contig=<ID=chrUn_KN707851v1_decoy,length=4593>
+##contig=<ID=chrUn_KN707852v1_decoy,length=1579>
+##contig=<ID=chrUn_KN707853v1_decoy,length=9597>
+##contig=<ID=chrUn_KN707854v1_decoy,length=10451>
+##contig=<ID=chrUn_KN707855v1_decoy,length=3219>
+##contig=<ID=chrUn_KN707856v1_decoy,length=2300>
+##contig=<ID=chrUn_KN707857v1_decoy,length=5985>
+##contig=<ID=chrUn_KN707858v1_decoy,length=2959>
+##contig=<ID=chrUn_KN707859v1_decoy,length=1340>
+##contig=<ID=chrUn_KN707860v1_decoy,length=3148>
+##contig=<ID=chrUn_KN707861v1_decoy,length=2242>
+##contig=<ID=chrUn_KN707862v1_decoy,length=16513>
+##contig=<ID=chrUn_KN707863v1_decoy,length=7821>
+##contig=<ID=chrUn_KN707864v1_decoy,length=2159>
+##contig=<ID=chrUn_KN707865v1_decoy,length=2114>
+##contig=<ID=chrUn_KN707866v1_decoy,length=4109>
+##contig=<ID=chrUn_KN707867v1_decoy,length=1544>
+##contig=<ID=chrUn_KN707868v1_decoy,length=1005>
+##contig=<ID=chrUn_KN707869v1_decoy,length=8632>
+##contig=<ID=chrUn_KN707870v1_decoy,length=1012>
+##contig=<ID=chrUn_KN707871v1_decoy,length=4728>
+##contig=<ID=chrUn_KN707872v1_decoy,length=2165>
+##contig=<ID=chrUn_KN707873v1_decoy,length=7591>
+##contig=<ID=chrUn_KN707874v1_decoy,length=5202>
+##contig=<ID=chrUn_KN707875v1_decoy,length=4241>
+##contig=<ID=chrUn_KN707876v1_decoy,length=4131>
+##contig=<ID=chrUn_KN707877v1_decoy,length=2272>
+##contig=<ID=chrUn_KN707878v1_decoy,length=2085>
+##contig=<ID=chrUn_KN707879v1_decoy,length=4346>
+##contig=<ID=chrUn_KN707880v1_decoy,length=1208>
+##contig=<ID=chrUn_KN707881v1_decoy,length=4543>
+##contig=<ID=chrUn_KN707882v1_decoy,length=2772>
+##contig=<ID=chrUn_KN707883v1_decoy,length=2490>
+##contig=<ID=chrUn_KN707884v1_decoy,length=4568>
+##contig=<ID=chrUn_KN707885v1_decoy,length=1776>
+##contig=<ID=chrUn_KN707886v1_decoy,length=2699>
+##contig=<ID=chrUn_KN707887v1_decoy,length=3534>
+##contig=<ID=chrUn_KN707888v1_decoy,length=2424>
+##contig=<ID=chrUn_KN707889v1_decoy,length=1747>
+##contig=<ID=chrUn_KN707890v1_decoy,length=1088>
+##contig=<ID=chrUn_KN707891v1_decoy,length=1143>
+##contig=<ID=chrUn_KN707892v1_decoy,length=2530>
+##contig=<ID=chrUn_KN707893v1_decoy,length=8049>
+##contig=<ID=chrUn_KN707894v1_decoy,length=1366>
+##contig=<ID=chrUn_KN707895v1_decoy,length=4284>
+##contig=<ID=chrUn_KN707896v1_decoy,length=33125>
+##contig=<ID=chrUn_KN707897v1_decoy,length=2137>
+##contig=<ID=chrUn_KN707898v1_decoy,length=3840>
+##contig=<ID=chrUn_KN707899v1_decoy,length=3087>
+##contig=<ID=chrUn_KN707900v1_decoy,length=2041>
+##contig=<ID=chrUn_KN707901v1_decoy,length=3344>
+##contig=<ID=chrUn_KN707902v1_decoy,length=2921>
+##contig=<ID=chrUn_KN707903v1_decoy,length=6581>
+##contig=<ID=chrUn_KN707904v1_decoy,length=3968>
+##contig=<ID=chrUn_KN707905v1_decoy,length=2339>
+##contig=<ID=chrUn_KN707906v1_decoy,length=1243>
+##contig=<ID=chrUn_KN707907v1_decoy,length=7776>
+##contig=<ID=chrUn_KN707908v1_decoy,length=19837>
+##contig=<ID=chrUn_KN707909v1_decoy,length=1737>
+##contig=<ID=chrUn_KN707910v1_decoy,length=1098>
+##contig=<ID=chrUn_KN707911v1_decoy,length=1893>
+##contig=<ID=chrUn_KN707912v1_decoy,length=1281>
+##contig=<ID=chrUn_KN707913v1_decoy,length=1527>
+##contig=<ID=chrUn_KN707914v1_decoy,length=2055>
+##contig=<ID=chrUn_KN707915v1_decoy,length=2527>
+##contig=<ID=chrUn_KN707916v1_decoy,length=3275>
+##contig=<ID=chrUn_KN707917v1_decoy,length=1265>
+##contig=<ID=chrUn_KN707918v1_decoy,length=2623>
+##contig=<ID=chrUn_KN707919v1_decoy,length=4850>
+##contig=<ID=chrUn_KN707920v1_decoy,length=3584>
+##contig=<ID=chrUn_KN707921v1_decoy,length=2561>
+##contig=<ID=chrUn_KN707922v1_decoy,length=3041>
+##contig=<ID=chrUn_KN707923v1_decoy,length=1409>
+##contig=<ID=chrUn_KN707924v1_decoy,length=4596>
+##contig=<ID=chrUn_KN707925v1_decoy,length=11555>
+##contig=<ID=chrUn_KN707926v1_decoy,length=1266>
+##contig=<ID=chrUn_KN707927v1_decoy,length=1079>
+##contig=<ID=chrUn_KN707928v1_decoy,length=1087>
+##contig=<ID=chrUn_KN707929v1_decoy,length=1226>
+##contig=<ID=chrUn_KN707930v1_decoy,length=1131>
+##contig=<ID=chrUn_KN707931v1_decoy,length=1199>
+##contig=<ID=chrUn_KN707932v1_decoy,length=1084>
+##contig=<ID=chrUn_KN707933v1_decoy,length=2038>
+##contig=<ID=chrUn_KN707934v1_decoy,length=1070>
+##contig=<ID=chrUn_KN707935v1_decoy,length=1312>
+##contig=<ID=chrUn_KN707936v1_decoy,length=4031>
+##contig=<ID=chrUn_KN707937v1_decoy,length=7445>
+##contig=<ID=chrUn_KN707938v1_decoy,length=1770>
+##contig=<ID=chrUn_KN707939v1_decoy,length=5600>
+##contig=<ID=chrUn_KN707940v1_decoy,length=1882>
+##contig=<ID=chrUn_KN707941v1_decoy,length=1170>
+##contig=<ID=chrUn_KN707942v1_decoy,length=1300>
+##contig=<ID=chrUn_KN707943v1_decoy,length=5325>
+##contig=<ID=chrUn_KN707944v1_decoy,length=2043>
+##contig=<ID=chrUn_KN707945v1_decoy,length=1072>
+##contig=<ID=chrUn_KN707946v1_decoy,length=2463>
+##contig=<ID=chrUn_KN707947v1_decoy,length=1010>
+##contig=<ID=chrUn_KN707948v1_decoy,length=1432>
+##contig=<ID=chrUn_KN707949v1_decoy,length=1162>
+##contig=<ID=chrUn_KN707950v1_decoy,length=1095>
+##contig=<ID=chrUn_KN707951v1_decoy,length=1118>
+##contig=<ID=chrUn_KN707952v1_decoy,length=1383>
+##contig=<ID=chrUn_KN707953v1_decoy,length=2289>
+##contig=<ID=chrUn_KN707954v1_decoy,length=1648>
+##contig=<ID=chrUn_KN707955v1_decoy,length=2203>
+##contig=<ID=chrUn_KN707956v1_decoy,length=3270>
+##contig=<ID=chrUn_KN707957v1_decoy,length=11499>
+##contig=<ID=chrUn_KN707958v1_decoy,length=2474>
+##contig=<ID=chrUn_KN707959v1_decoy,length=2294>
+##contig=<ID=chrUn_KN707960v1_decoy,length=1238>
+##contig=<ID=chrUn_KN707961v1_decoy,length=3410>
+##contig=<ID=chrUn_KN707962v1_decoy,length=1523>
+##contig=<ID=chrUn_KN707963v1_decoy,length=62955>
+##contig=<ID=chrUn_KN707964v1_decoy,length=6282>
+##contig=<ID=chrUn_KN707965v1_decoy,length=3836>
+##contig=<ID=chrUn_KN707966v1_decoy,length=6486>
+##contig=<ID=chrUn_KN707967v1_decoy,length=15368>
+##contig=<ID=chrUn_KN707968v1_decoy,length=9572>
+##contig=<ID=chrUn_KN707969v1_decoy,length=6413>
+##contig=<ID=chrUn_KN707970v1_decoy,length=4104>
+##contig=<ID=chrUn_KN707971v1_decoy,length=12943>
+##contig=<ID=chrUn_KN707972v1_decoy,length=4650>
+##contig=<ID=chrUn_KN707973v1_decoy,length=3080>
+##contig=<ID=chrUn_KN707974v1_decoy,length=3134>
+##contig=<ID=chrUn_KN707975v1_decoy,length=6211>
+##contig=<ID=chrUn_KN707976v1_decoy,length=1126>
+##contig=<ID=chrUn_KN707977v1_decoy,length=1101>
+##contig=<ID=chrUn_KN707978v1_decoy,length=1101>
+##contig=<ID=chrUn_KN707979v1_decoy,length=2648>
+##contig=<ID=chrUn_KN707980v1_decoy,length=2973>
+##contig=<ID=chrUn_KN707981v1_decoy,length=2520>
+##contig=<ID=chrUn_KN707982v1_decoy,length=2318>
+##contig=<ID=chrUn_KN707983v1_decoy,length=2606>
+##contig=<ID=chrUn_KN707984v1_decoy,length=2205>
+##contig=<ID=chrUn_KN707985v1_decoy,length=2929>
+##contig=<ID=chrUn_KN707986v1_decoy,length=3869>
+##contig=<ID=chrUn_KN707987v1_decoy,length=1117>
+##contig=<ID=chrUn_KN707988v1_decoy,length=2960>
+##contig=<ID=chrUn_KN707989v1_decoy,length=1009>
+##contig=<ID=chrUn_KN707990v1_decoy,length=4048>
+##contig=<ID=chrUn_KN707991v1_decoy,length=2193>
+##contig=<ID=chrUn_KN707992v1_decoy,length=1830>
+##contig=<ID=chrUn_JTFH01000001v1_decoy,length=25139>
+##contig=<ID=chrUn_JTFH01000002v1_decoy,length=18532>
+##contig=<ID=chrUn_JTFH01000003v1_decoy,length=15240>
+##contig=<ID=chrUn_JTFH01000004v1_decoy,length=13739>
+##contig=<ID=chrUn_JTFH01000005v1_decoy,length=11297>
+##contig=<ID=chrUn_JTFH01000006v1_decoy,length=10074>
+##contig=<ID=chrUn_JTFH01000007v1_decoy,length=9891>
+##contig=<ID=chrUn_JTFH01000008v1_decoy,length=9774>
+##contig=<ID=chrUn_JTFH01000009v1_decoy,length=9727>
+##contig=<ID=chrUn_JTFH01000010v1_decoy,length=9358>
+##contig=<ID=chrUn_JTFH01000011v1_decoy,length=8920>
+##contig=<ID=chrUn_JTFH01000012v1_decoy,length=8479>
+##contig=<ID=chrUn_JTFH01000013v1_decoy,length=8312>
+##contig=<ID=chrUn_JTFH01000014v1_decoy,length=8261>
+##contig=<ID=chrUn_JTFH01000015v1_decoy,length=8131>
+##contig=<ID=chrUn_JTFH01000016v1_decoy,length=8051>
+##contig=<ID=chrUn_JTFH01000017v1_decoy,length=7832>
+##contig=<ID=chrUn_JTFH01000018v1_decoy,length=7710>
+##contig=<ID=chrUn_JTFH01000019v1_decoy,length=7702>
+##contig=<ID=chrUn_JTFH01000020v1_decoy,length=7479>
+##contig=<ID=chrUn_JTFH01000021v1_decoy,length=7368>
+##contig=<ID=chrUn_JTFH01000022v1_decoy,length=7162>
+##contig=<ID=chrUn_JTFH01000023v1_decoy,length=7065>
+##contig=<ID=chrUn_JTFH01000024v1_decoy,length=7019>
+##contig=<ID=chrUn_JTFH01000025v1_decoy,length=6997>
+##contig=<ID=chrUn_JTFH01000026v1_decoy,length=6994>
+##contig=<ID=chrUn_JTFH01000027v1_decoy,length=6979>
+##contig=<ID=chrUn_JTFH01000028v1_decoy,length=6797>
+##contig=<ID=chrUn_JTFH01000029v1_decoy,length=6525>
+##contig=<ID=chrUn_JTFH01000030v1_decoy,length=6246>
+##contig=<ID=chrUn_JTFH01000031v1_decoy,length=5926>
+##contig=<ID=chrUn_JTFH01000032v1_decoy,length=5914>
+##contig=<ID=chrUn_JTFH01000033v1_decoy,length=5898>
+##contig=<ID=chrUn_JTFH01000034v1_decoy,length=5879>
+##contig=<ID=chrUn_JTFH01000035v1_decoy,length=5834>
+##contig=<ID=chrUn_JTFH01000036v1_decoy,length=5743>
+##contig=<ID=chrUn_JTFH01000037v1_decoy,length=5577>
+##contig=<ID=chrUn_JTFH01000038v1_decoy,length=5413>
+##contig=<ID=chrUn_JTFH01000039v1_decoy,length=5250>
+##contig=<ID=chrUn_JTFH01000040v1_decoy,length=5246>
+##contig=<ID=chrUn_JTFH01000041v1_decoy,length=5118>
+##contig=<ID=chrUn_JTFH01000042v1_decoy,length=5058>
+##contig=<ID=chrUn_JTFH01000043v1_decoy,length=4959>
+##contig=<ID=chrUn_JTFH01000044v1_decoy,length=4853>
+##contig=<ID=chrUn_JTFH01000045v1_decoy,length=4828>
+##contig=<ID=chrUn_JTFH01000046v1_decoy,length=4819>
+##contig=<ID=chrUn_JTFH01000047v1_decoy,length=4809>
+##contig=<ID=chrUn_JTFH01000048v1_decoy,length=4710>
+##contig=<ID=chrUn_JTFH01000049v1_decoy,length=4680>
+##contig=<ID=chrUn_JTFH01000050v1_decoy,length=4645>
+##contig=<ID=chrUn_JTFH01000051v1_decoy,length=4514>
+##contig=<ID=chrUn_JTFH01000052v1_decoy,length=4439>
+##contig=<ID=chrUn_JTFH01000053v1_decoy,length=4416>
+##contig=<ID=chrUn_JTFH01000054v1_decoy,length=4409>
+##contig=<ID=chrUn_JTFH01000055v1_decoy,length=4392>
+##contig=<ID=chrUn_JTFH01000056v1_decoy,length=4359>
+##contig=<ID=chrUn_JTFH01000057v1_decoy,length=4319>
+##contig=<ID=chrUn_JTFH01000058v1_decoy,length=4290>
+##contig=<ID=chrUn_JTFH01000059v1_decoy,length=4242>
+##contig=<ID=chrUn_JTFH01000060v1_decoy,length=4228>
+##contig=<ID=chrUn_JTFH01000061v1_decoy,length=4222>
+##contig=<ID=chrUn_JTFH01000062v1_decoy,length=4216>
+##contig=<ID=chrUn_JTFH01000063v1_decoy,length=4210>
+##contig=<ID=chrUn_JTFH01000064v1_decoy,length=4206>
+##contig=<ID=chrUn_JTFH01000065v1_decoy,length=4102>
+##contig=<ID=chrUn_JTFH01000066v1_decoy,length=4101>
+##contig=<ID=chrUn_JTFH01000067v1_decoy,length=4083>
+##contig=<ID=chrUn_JTFH01000068v1_decoy,length=3967>
+##contig=<ID=chrUn_JTFH01000069v1_decoy,length=3955>
+##contig=<ID=chrUn_JTFH01000070v1_decoy,length=3945>
+##contig=<ID=chrUn_JTFH01000071v1_decoy,length=3930>
+##contig=<ID=chrUn_JTFH01000072v1_decoy,length=3929>
+##contig=<ID=chrUn_JTFH01000073v1_decoy,length=3924>
+##contig=<ID=chrUn_JTFH01000074v1_decoy,length=3919>
+##contig=<ID=chrUn_JTFH01000075v1_decoy,length=3908>
+##contig=<ID=chrUn_JTFH01000076v1_decoy,length=3892>
+##contig=<ID=chrUn_JTFH01000077v1_decoy,length=3890>
+##contig=<ID=chrUn_JTFH01000078v1_decoy,length=3859>
+##contig=<ID=chrUn_JTFH01000079v1_decoy,length=3846>
+##contig=<ID=chrUn_JTFH01000080v1_decoy,length=3835>
+##contig=<ID=chrUn_JTFH01000081v1_decoy,length=3830>
+##contig=<ID=chrUn_JTFH01000082v1_decoy,length=3828>
+##contig=<ID=chrUn_JTFH01000083v1_decoy,length=3825>
+##contig=<ID=chrUn_JTFH01000084v1_decoy,length=3821>
+##contig=<ID=chrUn_JTFH01000085v1_decoy,length=3809>
+##contig=<ID=chrUn_JTFH01000086v1_decoy,length=3801>
+##contig=<ID=chrUn_JTFH01000087v1_decoy,length=3799>
+##contig=<ID=chrUn_JTFH01000088v1_decoy,length=3737>
+##contig=<ID=chrUn_JTFH01000089v1_decoy,length=3701>
+##contig=<ID=chrUn_JTFH01000090v1_decoy,length=3698>
+##contig=<ID=chrUn_JTFH01000091v1_decoy,length=3692>
+##contig=<ID=chrUn_JTFH01000092v1_decoy,length=3686>
+##contig=<ID=chrUn_JTFH01000093v1_decoy,length=3677>
+##contig=<ID=chrUn_JTFH01000094v1_decoy,length=3664>
+##contig=<ID=chrUn_JTFH01000095v1_decoy,length=3613>
+##contig=<ID=chrUn_JTFH01000096v1_decoy,length=3611>
+##contig=<ID=chrUn_JTFH01000097v1_decoy,length=3606>
+##contig=<ID=chrUn_JTFH01000098v1_decoy,length=3584>
+##contig=<ID=chrUn_JTFH01000099v1_decoy,length=3581>
+##contig=<ID=chrUn_JTFH01000100v1_decoy,length=3543>
+##contig=<ID=chrUn_JTFH01000101v1_decoy,length=3528>
+##contig=<ID=chrUn_JTFH01000102v1_decoy,length=3527>
+##contig=<ID=chrUn_JTFH01000103v1_decoy,length=3496>
+##contig=<ID=chrUn_JTFH01000104v1_decoy,length=3493>
+##contig=<ID=chrUn_JTFH01000105v1_decoy,length=3484>
+##contig=<ID=chrUn_JTFH01000106v1_decoy,length=3435>
+##contig=<ID=chrUn_JTFH01000107v1_decoy,length=3391>
+##contig=<ID=chrUn_JTFH01000108v1_decoy,length=3374>
+##contig=<ID=chrUn_JTFH01000109v1_decoy,length=3371>
+##contig=<ID=chrUn_JTFH01000110v1_decoy,length=3361>
+##contig=<ID=chrUn_JTFH01000111v1_decoy,length=3351>
+##contig=<ID=chrUn_JTFH01000112v1_decoy,length=3340>
+##contig=<ID=chrUn_JTFH01000113v1_decoy,length=3320>
+##contig=<ID=chrUn_JTFH01000114v1_decoy,length=3282>
+##contig=<ID=chrUn_JTFH01000115v1_decoy,length=3278>
+##contig=<ID=chrUn_JTFH01000116v1_decoy,length=3260>
+##contig=<ID=chrUn_JTFH01000117v1_decoy,length=3258>
+##contig=<ID=chrUn_JTFH01000118v1_decoy,length=3253>
+##contig=<ID=chrUn_JTFH01000119v1_decoy,length=3247>
+##contig=<ID=chrUn_JTFH01000120v1_decoy,length=3230>
+##contig=<ID=chrUn_JTFH01000121v1_decoy,length=3224>
+##contig=<ID=chrUn_JTFH01000122v1_decoy,length=3216>
+##contig=<ID=chrUn_JTFH01000123v1_decoy,length=3212>
+##contig=<ID=chrUn_JTFH01000124v1_decoy,length=3194>
+##contig=<ID=chrUn_JTFH01000125v1_decoy,length=3189>
+##contig=<ID=chrUn_JTFH01000126v1_decoy,length=3177>
+##contig=<ID=chrUn_JTFH01000127v1_decoy,length=3176>
+##contig=<ID=chrUn_JTFH01000128v1_decoy,length=3173>
+##contig=<ID=chrUn_JTFH01000129v1_decoy,length=3170>
+##contig=<ID=chrUn_JTFH01000130v1_decoy,length=3166>
+##contig=<ID=chrUn_JTFH01000131v1_decoy,length=3163>
+##contig=<ID=chrUn_JTFH01000132v1_decoy,length=3143>
+##contig=<ID=chrUn_JTFH01000133v1_decoy,length=3137>
+##contig=<ID=chrUn_JTFH01000134v1_decoy,length=3116>
+##contig=<ID=chrUn_JTFH01000135v1_decoy,length=3106>
+##contig=<ID=chrUn_JTFH01000136v1_decoy,length=3093>
+##contig=<ID=chrUn_JTFH01000137v1_decoy,length=3079>
+##contig=<ID=chrUn_JTFH01000138v1_decoy,length=3053>
+##contig=<ID=chrUn_JTFH01000139v1_decoy,length=3051>
+##contig=<ID=chrUn_JTFH01000140v1_decoy,length=3015>
+##contig=<ID=chrUn_JTFH01000141v1_decoy,length=3012>
+##contig=<ID=chrUn_JTFH01000142v1_decoy,length=3009>
+##contig=<ID=chrUn_JTFH01000143v1_decoy,length=2997>
+##contig=<ID=chrUn_JTFH01000144v1_decoy,length=2997>
+##contig=<ID=chrUn_JTFH01000145v1_decoy,length=2983>
+##contig=<ID=chrUn_JTFH01000146v1_decoy,length=2979>
+##contig=<ID=chrUn_JTFH01000147v1_decoy,length=2967>
+##contig=<ID=chrUn_JTFH01000148v1_decoy,length=2967>
+##contig=<ID=chrUn_JTFH01000149v1_decoy,length=2966>
+##contig=<ID=chrUn_JTFH01000150v1_decoy,length=2954>
+##contig=<ID=chrUn_JTFH01000151v1_decoy,length=2952>
+##contig=<ID=chrUn_JTFH01000152v1_decoy,length=2934>
+##contig=<ID=chrUn_JTFH01000153v1_decoy,length=2918>
+##contig=<ID=chrUn_JTFH01000154v1_decoy,length=2895>
+##contig=<ID=chrUn_JTFH01000155v1_decoy,length=2887>
+##contig=<ID=chrUn_JTFH01000156v1_decoy,length=2879>
+##contig=<ID=chrUn_JTFH01000157v1_decoy,length=2878>
+##contig=<ID=chrUn_JTFH01000158v1_decoy,length=2872>
+##contig=<ID=chrUn_JTFH01000159v1_decoy,length=2868>
+##contig=<ID=chrUn_JTFH01000160v1_decoy,length=2866>
+##contig=<ID=chrUn_JTFH01000161v1_decoy,length=2865>
+##contig=<ID=chrUn_JTFH01000162v1_decoy,length=2864>
+##contig=<ID=chrUn_JTFH01000163v1_decoy,length=2859>
+##contig=<ID=chrUn_JTFH01000164v1_decoy,length=2854>
+##contig=<ID=chrUn_JTFH01000165v1_decoy,length=2830>
+##contig=<ID=chrUn_JTFH01000166v1_decoy,length=2828>
+##contig=<ID=chrUn_JTFH01000167v1_decoy,length=2824>
+##contig=<ID=chrUn_JTFH01000168v1_decoy,length=2819>
+##contig=<ID=chrUn_JTFH01000169v1_decoy,length=2813>
+##contig=<ID=chrUn_JTFH01000170v1_decoy,length=2809>
+##contig=<ID=chrUn_JTFH01000171v1_decoy,length=2802>
+##contig=<ID=chrUn_JTFH01000172v1_decoy,length=2791>
+##contig=<ID=chrUn_JTFH01000173v1_decoy,length=2783>
+##contig=<ID=chrUn_JTFH01000174v1_decoy,length=2778>
+##contig=<ID=chrUn_JTFH01000175v1_decoy,length=2777>
+##contig=<ID=chrUn_JTFH01000176v1_decoy,length=2770>
+##contig=<ID=chrUn_JTFH01000177v1_decoy,length=2769>
+##contig=<ID=chrUn_JTFH01000178v1_decoy,length=2767>
+##contig=<ID=chrUn_JTFH01000179v1_decoy,length=2763>
+##contig=<ID=chrUn_JTFH01000180v1_decoy,length=2745>
+##contig=<ID=chrUn_JTFH01000181v1_decoy,length=2742>
+##contig=<ID=chrUn_JTFH01000182v1_decoy,length=2736>
+##contig=<ID=chrUn_JTFH01000183v1_decoy,length=2729>
+##contig=<ID=chrUn_JTFH01000184v1_decoy,length=2726>
+##contig=<ID=chrUn_JTFH01000185v1_decoy,length=2719>
+##contig=<ID=chrUn_JTFH01000186v1_decoy,length=2715>
+##contig=<ID=chrUn_JTFH01000187v1_decoy,length=2708>
+##contig=<ID=chrUn_JTFH01000188v1_decoy,length=2704>
+##contig=<ID=chrUn_JTFH01000189v1_decoy,length=2692>
+##contig=<ID=chrUn_JTFH01000190v1_decoy,length=2691>
+##contig=<ID=chrUn_JTFH01000191v1_decoy,length=2690>
+##contig=<ID=chrUn_JTFH01000192v1_decoy,length=2687>
+##contig=<ID=chrUn_JTFH01000193v1_decoy,length=2677>
+##contig=<ID=chrUn_JTFH01000194v1_decoy,length=2668>
+##contig=<ID=chrUn_JTFH01000195v1_decoy,length=2668>
+##contig=<ID=chrUn_JTFH01000196v1_decoy,length=2663>
+##contig=<ID=chrUn_JTFH01000197v1_decoy,length=2655>
+##contig=<ID=chrUn_JTFH01000198v1_decoy,length=2644>
+##contig=<ID=chrUn_JTFH01000199v1_decoy,length=2642>
+##contig=<ID=chrUn_JTFH01000200v1_decoy,length=2632>
+##contig=<ID=chrUn_JTFH01000201v1_decoy,length=2632>
+##contig=<ID=chrUn_JTFH01000202v1_decoy,length=2628>
+##contig=<ID=chrUn_JTFH01000203v1_decoy,length=2623>
+##contig=<ID=chrUn_JTFH01000204v1_decoy,length=2622>
+##contig=<ID=chrUn_JTFH01000205v1_decoy,length=2619>
+##contig=<ID=chrUn_JTFH01000206v1_decoy,length=2605>
+##contig=<ID=chrUn_JTFH01000207v1_decoy,length=2603>
+##contig=<ID=chrUn_JTFH01000208v1_decoy,length=2601>
+##contig=<ID=chrUn_JTFH01000209v1_decoy,length=2598>
+##contig=<ID=chrUn_JTFH01000210v1_decoy,length=2597>
+##contig=<ID=chrUn_JTFH01000211v1_decoy,length=2596>
+##contig=<ID=chrUn_JTFH01000212v1_decoy,length=2594>
+##contig=<ID=chrUn_JTFH01000213v1_decoy,length=2586>
+##contig=<ID=chrUn_JTFH01000214v1_decoy,length=2585>
+##contig=<ID=chrUn_JTFH01000215v1_decoy,length=2583>
+##contig=<ID=chrUn_JTFH01000216v1_decoy,length=2578>
+##contig=<ID=chrUn_JTFH01000217v1_decoy,length=2569>
+##contig=<ID=chrUn_JTFH01000218v1_decoy,length=2569>
+##contig=<ID=chrUn_JTFH01000219v1_decoy,length=2551>
+##contig=<ID=chrUn_JTFH01000220v1_decoy,length=2548>
+##contig=<ID=chrUn_JTFH01000221v1_decoy,length=2548>
+##contig=<ID=chrUn_JTFH01000222v1_decoy,length=2546>
+##contig=<ID=chrUn_JTFH01000223v1_decoy,length=2545>
+##contig=<ID=chrUn_JTFH01000224v1_decoy,length=2534>
+##contig=<ID=chrUn_JTFH01000225v1_decoy,length=2533>
+##contig=<ID=chrUn_JTFH01000226v1_decoy,length=2522>
+##contig=<ID=chrUn_JTFH01000227v1_decoy,length=2522>
+##contig=<ID=chrUn_JTFH01000228v1_decoy,length=2515>
+##contig=<ID=chrUn_JTFH01000229v1_decoy,length=2513>
+##contig=<ID=chrUn_JTFH01000230v1_decoy,length=2507>
+##contig=<ID=chrUn_JTFH01000231v1_decoy,length=2504>
+##contig=<ID=chrUn_JTFH01000232v1_decoy,length=2497>
+##contig=<ID=chrUn_JTFH01000233v1_decoy,length=2471>
+##contig=<ID=chrUn_JTFH01000234v1_decoy,length=2465>
+##contig=<ID=chrUn_JTFH01000235v1_decoy,length=2464>
+##contig=<ID=chrUn_JTFH01000236v1_decoy,length=2459>
+##contig=<ID=chrUn_JTFH01000237v1_decoy,length=2457>
+##contig=<ID=chrUn_JTFH01000238v1_decoy,length=2450>
+##contig=<ID=chrUn_JTFH01000239v1_decoy,length=2435>
+##contig=<ID=chrUn_JTFH01000240v1_decoy,length=2434>
+##contig=<ID=chrUn_JTFH01000241v1_decoy,length=2432>
+##contig=<ID=chrUn_JTFH01000242v1_decoy,length=2427>
+##contig=<ID=chrUn_JTFH01000243v1_decoy,length=2421>
+##contig=<ID=chrUn_JTFH01000244v1_decoy,length=2420>
+##contig=<ID=chrUn_JTFH01000245v1_decoy,length=2414>
+##contig=<ID=chrUn_JTFH01000246v1_decoy,length=2404>
+##contig=<ID=chrUn_JTFH01000247v1_decoy,length=2403>
+##contig=<ID=chrUn_JTFH01000248v1_decoy,length=2402>
+##contig=<ID=chrUn_JTFH01000249v1_decoy,length=2397>
+##contig=<ID=chrUn_JTFH01000250v1_decoy,length=2395>
+##contig=<ID=chrUn_JTFH01000251v1_decoy,length=2394>
+##contig=<ID=chrUn_JTFH01000252v1_decoy,length=2388>
+##contig=<ID=chrUn_JTFH01000253v1_decoy,length=2382>
+##contig=<ID=chrUn_JTFH01000254v1_decoy,length=2381>
+##contig=<ID=chrUn_JTFH01000255v1_decoy,length=2380>
+##contig=<ID=chrUn_JTFH01000256v1_decoy,length=2368>
+##contig=<ID=chrUn_JTFH01000257v1_decoy,length=2364>
+##contig=<ID=chrUn_JTFH01000258v1_decoy,length=2363>
+##contig=<ID=chrUn_JTFH01000259v1_decoy,length=2348>
+##contig=<ID=chrUn_JTFH01000260v1_decoy,length=2339>
+##contig=<ID=chrUn_JTFH01000261v1_decoy,length=2335>
+##contig=<ID=chrUn_JTFH01000262v1_decoy,length=2332>
+##contig=<ID=chrUn_JTFH01000263v1_decoy,length=2331>
+##contig=<ID=chrUn_JTFH01000264v1_decoy,length=2330>
+##contig=<ID=chrUn_JTFH01000265v1_decoy,length=2323>
+##contig=<ID=chrUn_JTFH01000266v1_decoy,length=2319>
+##contig=<ID=chrUn_JTFH01000267v1_decoy,length=2314>
+##contig=<ID=chrUn_JTFH01000268v1_decoy,length=2308>
+##contig=<ID=chrUn_JTFH01000269v1_decoy,length=2306>
+##contig=<ID=chrUn_JTFH01000270v1_decoy,length=2296>
+##contig=<ID=chrUn_JTFH01000271v1_decoy,length=2287>
+##contig=<ID=chrUn_JTFH01000272v1_decoy,length=2279>
+##contig=<ID=chrUn_JTFH01000273v1_decoy,length=2276>
+##contig=<ID=chrUn_JTFH01000274v1_decoy,length=2273>
+##contig=<ID=chrUn_JTFH01000275v1_decoy,length=2262>
+##contig=<ID=chrUn_JTFH01000276v1_decoy,length=2254>
+##contig=<ID=chrUn_JTFH01000277v1_decoy,length=2252>
+##contig=<ID=chrUn_JTFH01000278v1_decoy,length=2245>
+##contig=<ID=chrUn_JTFH01000279v1_decoy,length=2239>
+##contig=<ID=chrUn_JTFH01000280v1_decoy,length=2223>
+##contig=<ID=chrUn_JTFH01000281v1_decoy,length=2220>
+##contig=<ID=chrUn_JTFH01000282v1_decoy,length=2218>
+##contig=<ID=chrUn_JTFH01000283v1_decoy,length=2215>
+##contig=<ID=chrUn_JTFH01000284v1_decoy,length=2213>
+##contig=<ID=chrUn_JTFH01000285v1_decoy,length=2203>
+##contig=<ID=chrUn_JTFH01000286v1_decoy,length=2200>
+##contig=<ID=chrUn_JTFH01000287v1_decoy,length=2197>
+##contig=<ID=chrUn_JTFH01000288v1_decoy,length=2194>
+##contig=<ID=chrUn_JTFH01000289v1_decoy,length=2183>
+##contig=<ID=chrUn_JTFH01000290v1_decoy,length=2179>
+##contig=<ID=chrUn_JTFH01000291v1_decoy,length=2177>
+##contig=<ID=chrUn_JTFH01000292v1_decoy,length=2177>
+##contig=<ID=chrUn_JTFH01000293v1_decoy,length=2177>
+##contig=<ID=chrUn_JTFH01000294v1_decoy,length=2168>
+##contig=<ID=chrUn_JTFH01000295v1_decoy,length=2160>
+##contig=<ID=chrUn_JTFH01000296v1_decoy,length=2155>
+##contig=<ID=chrUn_JTFH01000297v1_decoy,length=2144>
+##contig=<ID=chrUn_JTFH01000298v1_decoy,length=2143>
+##contig=<ID=chrUn_JTFH01000299v1_decoy,length=2136>
+##contig=<ID=chrUn_JTFH01000300v1_decoy,length=2134>
+##contig=<ID=chrUn_JTFH01000301v1_decoy,length=2129>
+##contig=<ID=chrUn_JTFH01000302v1_decoy,length=2128>
+##contig=<ID=chrUn_JTFH01000303v1_decoy,length=2125>
+##contig=<ID=chrUn_JTFH01000304v1_decoy,length=2125>
+##contig=<ID=chrUn_JTFH01000305v1_decoy,length=2122>
+##contig=<ID=chrUn_JTFH01000306v1_decoy,length=2111>
+##contig=<ID=chrUn_JTFH01000307v1_decoy,length=2106>
+##contig=<ID=chrUn_JTFH01000308v1_decoy,length=2094>
+##contig=<ID=chrUn_JTFH01000309v1_decoy,length=2093>
+##contig=<ID=chrUn_JTFH01000310v1_decoy,length=2088>
+##contig=<ID=chrUn_JTFH01000311v1_decoy,length=2086>
+##contig=<ID=chrUn_JTFH01000312v1_decoy,length=2086>
+##contig=<ID=chrUn_JTFH01000313v1_decoy,length=2084>
+##contig=<ID=chrUn_JTFH01000314v1_decoy,length=2080>
+##contig=<ID=chrUn_JTFH01000315v1_decoy,length=2079>
+##contig=<ID=chrUn_JTFH01000316v1_decoy,length=2076>
+##contig=<ID=chrUn_JTFH01000317v1_decoy,length=2071>
+##contig=<ID=chrUn_JTFH01000318v1_decoy,length=2066>
+##contig=<ID=chrUn_JTFH01000319v1_decoy,length=2061>
+##contig=<ID=chrUn_JTFH01000320v1_decoy,length=2055>
+##contig=<ID=chrUn_JTFH01000321v1_decoy,length=2053>
+##contig=<ID=chrUn_JTFH01000322v1_decoy,length=2040>
+##contig=<ID=chrUn_JTFH01000323v1_decoy,length=2036>
+##contig=<ID=chrUn_JTFH01000324v1_decoy,length=2035>
+##contig=<ID=chrUn_JTFH01000325v1_decoy,length=2034>
+##contig=<ID=chrUn_JTFH01000326v1_decoy,length=2032>
+##contig=<ID=chrUn_JTFH01000327v1_decoy,length=2029>
+##contig=<ID=chrUn_JTFH01000328v1_decoy,length=2025>
+##contig=<ID=chrUn_JTFH01000329v1_decoy,length=2021>
+##contig=<ID=chrUn_JTFH01000330v1_decoy,length=2018>
+##contig=<ID=chrUn_JTFH01000331v1_decoy,length=2015>
+##contig=<ID=chrUn_JTFH01000332v1_decoy,length=2009>
+##contig=<ID=chrUn_JTFH01000333v1_decoy,length=2007>
+##contig=<ID=chrUn_JTFH01000334v1_decoy,length=2005>
+##contig=<ID=chrUn_JTFH01000335v1_decoy,length=2003>
+##contig=<ID=chrUn_JTFH01000336v1_decoy,length=2001>
+##contig=<ID=chrUn_JTFH01000337v1_decoy,length=2001>
+##contig=<ID=chrUn_JTFH01000338v1_decoy,length=2000>
+##contig=<ID=chrUn_JTFH01000339v1_decoy,length=1996>
+##contig=<ID=chrUn_JTFH01000340v1_decoy,length=1992>
+##contig=<ID=chrUn_JTFH01000341v1_decoy,length=1985>
+##contig=<ID=chrUn_JTFH01000342v1_decoy,length=1981>
+##contig=<ID=chrUn_JTFH01000343v1_decoy,length=1977>
+##contig=<ID=chrUn_JTFH01000344v1_decoy,length=1971>
+##contig=<ID=chrUn_JTFH01000345v1_decoy,length=1968>
+##contig=<ID=chrUn_JTFH01000346v1_decoy,length=1962>
+##contig=<ID=chrUn_JTFH01000347v1_decoy,length=1961>
+##contig=<ID=chrUn_JTFH01000348v1_decoy,length=1960>
+##contig=<ID=chrUn_JTFH01000349v1_decoy,length=1960>
+##contig=<ID=chrUn_JTFH01000350v1_decoy,length=1954>
+##contig=<ID=chrUn_JTFH01000351v1_decoy,length=1952>
+##contig=<ID=chrUn_JTFH01000352v1_decoy,length=1947>
+##contig=<ID=chrUn_JTFH01000353v1_decoy,length=1944>
+##contig=<ID=chrUn_JTFH01000354v1_decoy,length=1943>
+##contig=<ID=chrUn_JTFH01000355v1_decoy,length=1941>
+##contig=<ID=chrUn_JTFH01000356v1_decoy,length=1937>
+##contig=<ID=chrUn_JTFH01000357v1_decoy,length=1934>
+##contig=<ID=chrUn_JTFH01000358v1_decoy,length=1929>
+##contig=<ID=chrUn_JTFH01000359v1_decoy,length=1924>
+##contig=<ID=chrUn_JTFH01000360v1_decoy,length=1924>
+##contig=<ID=chrUn_JTFH01000361v1_decoy,length=1923>
+##contig=<ID=chrUn_JTFH01000362v1_decoy,length=1921>
+##contig=<ID=chrUn_JTFH01000363v1_decoy,length=1918>
+##contig=<ID=chrUn_JTFH01000364v1_decoy,length=1915>
+##contig=<ID=chrUn_JTFH01000365v1_decoy,length=1915>
+##contig=<ID=chrUn_JTFH01000366v1_decoy,length=1914>
+##contig=<ID=chrUn_JTFH01000367v1_decoy,length=1912>
+##contig=<ID=chrUn_JTFH01000368v1_decoy,length=1910>
+##contig=<ID=chrUn_JTFH01000369v1_decoy,length=1907>
+##contig=<ID=chrUn_JTFH01000370v1_decoy,length=1904>
+##contig=<ID=chrUn_JTFH01000371v1_decoy,length=1897>
+##contig=<ID=chrUn_JTFH01000372v1_decoy,length=1891>
+##contig=<ID=chrUn_JTFH01000373v1_decoy,length=1890>
+##contig=<ID=chrUn_JTFH01000374v1_decoy,length=1888>
+##contig=<ID=chrUn_JTFH01000375v1_decoy,length=1888>
+##contig=<ID=chrUn_JTFH01000376v1_decoy,length=1885>
+##contig=<ID=chrUn_JTFH01000377v1_decoy,length=1881>
+##contig=<ID=chrUn_JTFH01000378v1_decoy,length=1879>
+##contig=<ID=chrUn_JTFH01000379v1_decoy,length=1877>
+##contig=<ID=chrUn_JTFH01000380v1_decoy,length=1876>
+##contig=<ID=chrUn_JTFH01000381v1_decoy,length=1876>
+##contig=<ID=chrUn_JTFH01000382v1_decoy,length=1874>
+##contig=<ID=chrUn_JTFH01000383v1_decoy,length=1872>
+##contig=<ID=chrUn_JTFH01000384v1_decoy,length=1869>
+##contig=<ID=chrUn_JTFH01000385v1_decoy,length=1866>
+##contig=<ID=chrUn_JTFH01000386v1_decoy,length=1865>
+##contig=<ID=chrUn_JTFH01000387v1_decoy,length=1865>
+##contig=<ID=chrUn_JTFH01000388v1_decoy,length=1865>
+##contig=<ID=chrUn_JTFH01000389v1_decoy,length=1862>
+##contig=<ID=chrUn_JTFH01000390v1_decoy,length=1862>
+##contig=<ID=chrUn_JTFH01000391v1_decoy,length=1859>
+##contig=<ID=chrUn_JTFH01000392v1_decoy,length=1856>
+##contig=<ID=chrUn_JTFH01000393v1_decoy,length=1856>
+##contig=<ID=chrUn_JTFH01000394v1_decoy,length=1854>
+##contig=<ID=chrUn_JTFH01000395v1_decoy,length=1850>
+##contig=<ID=chrUn_JTFH01000396v1_decoy,length=1849>
+##contig=<ID=chrUn_JTFH01000397v1_decoy,length=1849>
+##contig=<ID=chrUn_JTFH01000398v1_decoy,length=1847>
+##contig=<ID=chrUn_JTFH01000399v1_decoy,length=1839>
+##contig=<ID=chrUn_JTFH01000400v1_decoy,length=1834>
+##contig=<ID=chrUn_JTFH01000401v1_decoy,length=1821>
+##contig=<ID=chrUn_JTFH01000402v1_decoy,length=1815>
+##contig=<ID=chrUn_JTFH01000403v1_decoy,length=1811>
+##contig=<ID=chrUn_JTFH01000404v1_decoy,length=1808>
+##contig=<ID=chrUn_JTFH01000405v1_decoy,length=1808>
+##contig=<ID=chrUn_JTFH01000406v1_decoy,length=1807>
+##contig=<ID=chrUn_JTFH01000407v1_decoy,length=1807>
+##contig=<ID=chrUn_JTFH01000408v1_decoy,length=1802>
+##contig=<ID=chrUn_JTFH01000409v1_decoy,length=1801>
+##contig=<ID=chrUn_JTFH01000410v1_decoy,length=1800>
+##contig=<ID=chrUn_JTFH01000411v1_decoy,length=1795>
+##contig=<ID=chrUn_JTFH01000412v1_decoy,length=1794>
+##contig=<ID=chrUn_JTFH01000413v1_decoy,length=1792>
+##contig=<ID=chrUn_JTFH01000414v1_decoy,length=1788>
+##contig=<ID=chrUn_JTFH01000415v1_decoy,length=1786>
+##contig=<ID=chrUn_JTFH01000416v1_decoy,length=1782>
+##contig=<ID=chrUn_JTFH01000417v1_decoy,length=1782>
+##contig=<ID=chrUn_JTFH01000418v1_decoy,length=1781>
+##contig=<ID=chrUn_JTFH01000419v1_decoy,length=1781>
+##contig=<ID=chrUn_JTFH01000420v1_decoy,length=1779>
+##contig=<ID=chrUn_JTFH01000421v1_decoy,length=1777>
+##contig=<ID=chrUn_JTFH01000422v1_decoy,length=1764>
+##contig=<ID=chrUn_JTFH01000423v1_decoy,length=1762>
+##contig=<ID=chrUn_JTFH01000424v1_decoy,length=1755>
+##contig=<ID=chrUn_JTFH01000425v1_decoy,length=1749>
+##contig=<ID=chrUn_JTFH01000426v1_decoy,length=1747>
+##contig=<ID=chrUn_JTFH01000427v1_decoy,length=1746>
+##contig=<ID=chrUn_JTFH01000428v1_decoy,length=1745>
+##contig=<ID=chrUn_JTFH01000429v1_decoy,length=1744>
+##contig=<ID=chrUn_JTFH01000430v1_decoy,length=1742>
+##contig=<ID=chrUn_JTFH01000431v1_decoy,length=1740>
+##contig=<ID=chrUn_JTFH01000432v1_decoy,length=1740>
+##contig=<ID=chrUn_JTFH01000433v1_decoy,length=1736>
+##contig=<ID=chrUn_JTFH01000434v1_decoy,length=1735>
+##contig=<ID=chrUn_JTFH01000435v1_decoy,length=1732>
+##contig=<ID=chrUn_JTFH01000436v1_decoy,length=1732>
+##contig=<ID=chrUn_JTFH01000437v1_decoy,length=1730>
+##contig=<ID=chrUn_JTFH01000438v1_decoy,length=1727>
+##contig=<ID=chrUn_JTFH01000439v1_decoy,length=1722>
+##contig=<ID=chrUn_JTFH01000440v1_decoy,length=1718>
+##contig=<ID=chrUn_JTFH01000441v1_decoy,length=1716>
+##contig=<ID=chrUn_JTFH01000442v1_decoy,length=1710>
+##contig=<ID=chrUn_JTFH01000443v1_decoy,length=1708>
+##contig=<ID=chrUn_JTFH01000444v1_decoy,length=1707>
+##contig=<ID=chrUn_JTFH01000445v1_decoy,length=1706>
+##contig=<ID=chrUn_JTFH01000446v1_decoy,length=1705>
+##contig=<ID=chrUn_JTFH01000447v1_decoy,length=1704>
+##contig=<ID=chrUn_JTFH01000448v1_decoy,length=1699>
+##contig=<ID=chrUn_JTFH01000449v1_decoy,length=1698>
+##contig=<ID=chrUn_JTFH01000450v1_decoy,length=1697>
+##contig=<ID=chrUn_JTFH01000451v1_decoy,length=1697>
+##contig=<ID=chrUn_JTFH01000452v1_decoy,length=1695>
+##contig=<ID=chrUn_JTFH01000453v1_decoy,length=1695>
+##contig=<ID=chrUn_JTFH01000454v1_decoy,length=1693>
+##contig=<ID=chrUn_JTFH01000455v1_decoy,length=1687>
+##contig=<ID=chrUn_JTFH01000456v1_decoy,length=1686>
+##contig=<ID=chrUn_JTFH01000457v1_decoy,length=1680>
+##contig=<ID=chrUn_JTFH01000458v1_decoy,length=1679>
+##contig=<ID=chrUn_JTFH01000459v1_decoy,length=1679>
+##contig=<ID=chrUn_JTFH01000460v1_decoy,length=1678>
+##contig=<ID=chrUn_JTFH01000461v1_decoy,length=1674>
+##contig=<ID=chrUn_JTFH01000462v1_decoy,length=1674>
+##contig=<ID=chrUn_JTFH01000463v1_decoy,length=1671>
+##contig=<ID=chrUn_JTFH01000464v1_decoy,length=1669>
+##contig=<ID=chrUn_JTFH01000465v1_decoy,length=1665>
+##contig=<ID=chrUn_JTFH01000466v1_decoy,length=1663>
+##contig=<ID=chrUn_JTFH01000467v1_decoy,length=1657>
+##contig=<ID=chrUn_JTFH01000468v1_decoy,length=1653>
+##contig=<ID=chrUn_JTFH01000469v1_decoy,length=1652>
+##contig=<ID=chrUn_JTFH01000470v1_decoy,length=1650>
+##contig=<ID=chrUn_JTFH01000471v1_decoy,length=1649>
+##contig=<ID=chrUn_JTFH01000472v1_decoy,length=1649>
+##contig=<ID=chrUn_JTFH01000473v1_decoy,length=1640>
+##contig=<ID=chrUn_JTFH01000474v1_decoy,length=1638>
+##contig=<ID=chrUn_JTFH01000475v1_decoy,length=1636>
+##contig=<ID=chrUn_JTFH01000476v1_decoy,length=1632>
+##contig=<ID=chrUn_JTFH01000477v1_decoy,length=1631>
+##contig=<ID=chrUn_JTFH01000478v1_decoy,length=1630>
+##contig=<ID=chrUn_JTFH01000479v1_decoy,length=1627>
+##contig=<ID=chrUn_JTFH01000480v1_decoy,length=1624>
+##contig=<ID=chrUn_JTFH01000481v1_decoy,length=1617>
+##contig=<ID=chrUn_JTFH01000482v1_decoy,length=1616>
+##contig=<ID=chrUn_JTFH01000483v1_decoy,length=1615>
+##contig=<ID=chrUn_JTFH01000484v1_decoy,length=1611>
+##contig=<ID=chrUn_JTFH01000485v1_decoy,length=1611>
+##contig=<ID=chrUn_JTFH01000486v1_decoy,length=1606>
+##contig=<ID=chrUn_JTFH01000487v1_decoy,length=1605>
+##contig=<ID=chrUn_JTFH01000488v1_decoy,length=1605>
+##contig=<ID=chrUn_JTFH01000489v1_decoy,length=1600>
+##contig=<ID=chrUn_JTFH01000490v1_decoy,length=1598>
+##contig=<ID=chrUn_JTFH01000491v1_decoy,length=1598>
+##contig=<ID=chrUn_JTFH01000492v1_decoy,length=1597>
+##contig=<ID=chrUn_JTFH01000493v1_decoy,length=1596>
+##contig=<ID=chrUn_JTFH01000494v1_decoy,length=1595>
+##contig=<ID=chrUn_JTFH01000495v1_decoy,length=1592>
+##contig=<ID=chrUn_JTFH01000496v1_decoy,length=1589>
+##contig=<ID=chrUn_JTFH01000497v1_decoy,length=1585>
+##contig=<ID=chrUn_JTFH01000498v1_decoy,length=1579>
+##contig=<ID=chrUn_JTFH01000499v1_decoy,length=1578>
+##contig=<ID=chrUn_JTFH01000500v1_decoy,length=1577>
+##contig=<ID=chrUn_JTFH01000501v1_decoy,length=1577>
+##contig=<ID=chrUn_JTFH01000502v1_decoy,length=1577>
+##contig=<ID=chrUn_JTFH01000503v1_decoy,length=1576>
+##contig=<ID=chrUn_JTFH01000504v1_decoy,length=1575>
+##contig=<ID=chrUn_JTFH01000505v1_decoy,length=1574>
+##contig=<ID=chrUn_JTFH01000506v1_decoy,length=1572>
+##contig=<ID=chrUn_JTFH01000507v1_decoy,length=1571>
+##contig=<ID=chrUn_JTFH01000508v1_decoy,length=1563>
+##contig=<ID=chrUn_JTFH01000509v1_decoy,length=1561>
+##contig=<ID=chrUn_JTFH01000510v1_decoy,length=1561>
+##contig=<ID=chrUn_JTFH01000511v1_decoy,length=1560>
+##contig=<ID=chrUn_JTFH01000512v1_decoy,length=1560>
+##contig=<ID=chrUn_JTFH01000513v1_decoy,length=1554>
+##contig=<ID=chrUn_JTFH01000514v1_decoy,length=1552>
+##contig=<ID=chrUn_JTFH01000515v1_decoy,length=1548>
+##contig=<ID=chrUn_JTFH01000516v1_decoy,length=1546>
+##contig=<ID=chrUn_JTFH01000517v1_decoy,length=1541>
+##contig=<ID=chrUn_JTFH01000518v1_decoy,length=1536>
+##contig=<ID=chrUn_JTFH01000519v1_decoy,length=1533>
+##contig=<ID=chrUn_JTFH01000520v1_decoy,length=1532>
+##contig=<ID=chrUn_JTFH01000521v1_decoy,length=1532>
+##contig=<ID=chrUn_JTFH01000522v1_decoy,length=1530>
+##contig=<ID=chrUn_JTFH01000523v1_decoy,length=1527>
+##contig=<ID=chrUn_JTFH01000524v1_decoy,length=1526>
+##contig=<ID=chrUn_JTFH01000525v1_decoy,length=1524>
+##contig=<ID=chrUn_JTFH01000526v1_decoy,length=1523>
+##contig=<ID=chrUn_JTFH01000527v1_decoy,length=1523>
+##contig=<ID=chrUn_JTFH01000528v1_decoy,length=1522>
+##contig=<ID=chrUn_JTFH01000529v1_decoy,length=1522>
+##contig=<ID=chrUn_JTFH01000530v1_decoy,length=1519>
+##contig=<ID=chrUn_JTFH01000531v1_decoy,length=1513>
+##contig=<ID=chrUn_JTFH01000532v1_decoy,length=1508>
+##contig=<ID=chrUn_JTFH01000533v1_decoy,length=1508>
+##contig=<ID=chrUn_JTFH01000534v1_decoy,length=1505>
+##contig=<ID=chrUn_JTFH01000535v1_decoy,length=1503>
+##contig=<ID=chrUn_JTFH01000536v1_decoy,length=1496>
+##contig=<ID=chrUn_JTFH01000537v1_decoy,length=1491>
+##contig=<ID=chrUn_JTFH01000538v1_decoy,length=1490>
+##contig=<ID=chrUn_JTFH01000539v1_decoy,length=1490>
+##contig=<ID=chrUn_JTFH01000540v1_decoy,length=1487>
+##contig=<ID=chrUn_JTFH01000541v1_decoy,length=1486>
+##contig=<ID=chrUn_JTFH01000542v1_decoy,length=1485>
+##contig=<ID=chrUn_JTFH01000543v1_decoy,length=1484>
+##contig=<ID=chrUn_JTFH01000544v1_decoy,length=1483>
+##contig=<ID=chrUn_JTFH01000545v1_decoy,length=1479>
+##contig=<ID=chrUn_JTFH01000546v1_decoy,length=1479>
+##contig=<ID=chrUn_JTFH01000547v1_decoy,length=1476>
+##contig=<ID=chrUn_JTFH01000548v1_decoy,length=1475>
+##contig=<ID=chrUn_JTFH01000549v1_decoy,length=1472>
+##contig=<ID=chrUn_JTFH01000550v1_decoy,length=1469>
+##contig=<ID=chrUn_JTFH01000551v1_decoy,length=1468>
+##contig=<ID=chrUn_JTFH01000552v1_decoy,length=1467>
+##contig=<ID=chrUn_JTFH01000553v1_decoy,length=1465>
+##contig=<ID=chrUn_JTFH01000554v1_decoy,length=1464>
+##contig=<ID=chrUn_JTFH01000555v1_decoy,length=1463>
+##contig=<ID=chrUn_JTFH01000556v1_decoy,length=1463>
+##contig=<ID=chrUn_JTFH01000557v1_decoy,length=1459>
+##contig=<ID=chrUn_JTFH01000558v1_decoy,length=1459>
+##contig=<ID=chrUn_JTFH01000559v1_decoy,length=1458>
+##contig=<ID=chrUn_JTFH01000560v1_decoy,length=1458>
+##contig=<ID=chrUn_JTFH01000561v1_decoy,length=1454>
+##contig=<ID=chrUn_JTFH01000562v1_decoy,length=1449>
+##contig=<ID=chrUn_JTFH01000563v1_decoy,length=1449>
+##contig=<ID=chrUn_JTFH01000564v1_decoy,length=1448>
+##contig=<ID=chrUn_JTFH01000565v1_decoy,length=1446>
+##contig=<ID=chrUn_JTFH01000566v1_decoy,length=1442>
+##contig=<ID=chrUn_JTFH01000567v1_decoy,length=1441>
+##contig=<ID=chrUn_JTFH01000568v1_decoy,length=1440>
+##contig=<ID=chrUn_JTFH01000569v1_decoy,length=1439>
+##contig=<ID=chrUn_JTFH01000570v1_decoy,length=1437>
+##contig=<ID=chrUn_JTFH01000571v1_decoy,length=1436>
+##contig=<ID=chrUn_JTFH01000572v1_decoy,length=1429>
+##contig=<ID=chrUn_JTFH01000573v1_decoy,length=1429>
+##contig=<ID=chrUn_JTFH01000574v1_decoy,length=1427>
+##contig=<ID=chrUn_JTFH01000575v1_decoy,length=1426>
+##contig=<ID=chrUn_JTFH01000576v1_decoy,length=1425>
+##contig=<ID=chrUn_JTFH01000577v1_decoy,length=1424>
+##contig=<ID=chrUn_JTFH01000578v1_decoy,length=1424>
+##contig=<ID=chrUn_JTFH01000579v1_decoy,length=1423>
+##contig=<ID=chrUn_JTFH01000580v1_decoy,length=1423>
+##contig=<ID=chrUn_JTFH01000581v1_decoy,length=1423>
+##contig=<ID=chrUn_JTFH01000582v1_decoy,length=1414>
+##contig=<ID=chrUn_JTFH01000583v1_decoy,length=1414>
+##contig=<ID=chrUn_JTFH01000584v1_decoy,length=1413>
+##contig=<ID=chrUn_JTFH01000585v1_decoy,length=1413>
+##contig=<ID=chrUn_JTFH01000586v1_decoy,length=1410>
+##contig=<ID=chrUn_JTFH01000587v1_decoy,length=1409>
+##contig=<ID=chrUn_JTFH01000588v1_decoy,length=1409>
+##contig=<ID=chrUn_JTFH01000589v1_decoy,length=1406>
+##contig=<ID=chrUn_JTFH01000590v1_decoy,length=1405>
+##contig=<ID=chrUn_JTFH01000591v1_decoy,length=1405>
+##contig=<ID=chrUn_JTFH01000592v1_decoy,length=1404>
+##contig=<ID=chrUn_JTFH01000593v1_decoy,length=1404>
+##contig=<ID=chrUn_JTFH01000594v1_decoy,length=1402>
+##contig=<ID=chrUn_JTFH01000595v1_decoy,length=1402>
+##contig=<ID=chrUn_JTFH01000596v1_decoy,length=1402>
+##contig=<ID=chrUn_JTFH01000597v1_decoy,length=1402>
+##contig=<ID=chrUn_JTFH01000598v1_decoy,length=1400>
+##contig=<ID=chrUn_JTFH01000599v1_decoy,length=1398>
+##contig=<ID=chrUn_JTFH01000600v1_decoy,length=1396>
+##contig=<ID=chrUn_JTFH01000601v1_decoy,length=1395>
+##contig=<ID=chrUn_JTFH01000602v1_decoy,length=1394>
+##contig=<ID=chrUn_JTFH01000603v1_decoy,length=1393>
+##contig=<ID=chrUn_JTFH01000604v1_decoy,length=1391>
+##contig=<ID=chrUn_JTFH01000605v1_decoy,length=1389>
+##contig=<ID=chrUn_JTFH01000606v1_decoy,length=1389>
+##contig=<ID=chrUn_JTFH01000607v1_decoy,length=1388>
+##contig=<ID=chrUn_JTFH01000608v1_decoy,length=1387>
+##contig=<ID=chrUn_JTFH01000609v1_decoy,length=1384>
+##contig=<ID=chrUn_JTFH01000610v1_decoy,length=1381>
+##contig=<ID=chrUn_JTFH01000611v1_decoy,length=1381>
+##contig=<ID=chrUn_JTFH01000612v1_decoy,length=1379>
+##contig=<ID=chrUn_JTFH01000613v1_decoy,length=1377>
+##contig=<ID=chrUn_JTFH01000614v1_decoy,length=1376>
+##contig=<ID=chrUn_JTFH01000615v1_decoy,length=1376>
+##contig=<ID=chrUn_JTFH01000616v1_decoy,length=1375>
+##contig=<ID=chrUn_JTFH01000617v1_decoy,length=1374>
+##contig=<ID=chrUn_JTFH01000618v1_decoy,length=1372>
+##contig=<ID=chrUn_JTFH01000619v1_decoy,length=1371>
+##contig=<ID=chrUn_JTFH01000620v1_decoy,length=1370>
+##contig=<ID=chrUn_JTFH01000621v1_decoy,length=1370>
+##contig=<ID=chrUn_JTFH01000622v1_decoy,length=1366>
+##contig=<ID=chrUn_JTFH01000623v1_decoy,length=1363>
+##contig=<ID=chrUn_JTFH01000624v1_decoy,length=1360>
+##contig=<ID=chrUn_JTFH01000625v1_decoy,length=1356>
+##contig=<ID=chrUn_JTFH01000626v1_decoy,length=1355>
+##contig=<ID=chrUn_JTFH01000627v1_decoy,length=1355>
+##contig=<ID=chrUn_JTFH01000628v1_decoy,length=1352>
+##contig=<ID=chrUn_JTFH01000629v1_decoy,length=1345>
+##contig=<ID=chrUn_JTFH01000630v1_decoy,length=1344>
+##contig=<ID=chrUn_JTFH01000631v1_decoy,length=1344>
+##contig=<ID=chrUn_JTFH01000632v1_decoy,length=1342>
+##contig=<ID=chrUn_JTFH01000633v1_decoy,length=1342>
+##contig=<ID=chrUn_JTFH01000634v1_decoy,length=1336>
+##contig=<ID=chrUn_JTFH01000635v1_decoy,length=1334>
+##contig=<ID=chrUn_JTFH01000636v1_decoy,length=1334>
+##contig=<ID=chrUn_JTFH01000637v1_decoy,length=1333>
+##contig=<ID=chrUn_JTFH01000638v1_decoy,length=1332>
+##contig=<ID=chrUn_JTFH01000639v1_decoy,length=1328>
+##contig=<ID=chrUn_JTFH01000640v1_decoy,length=1328>
+##contig=<ID=chrUn_JTFH01000641v1_decoy,length=1328>
+##contig=<ID=chrUn_JTFH01000642v1_decoy,length=1327>
+##contig=<ID=chrUn_JTFH01000643v1_decoy,length=1325>
+##contig=<ID=chrUn_JTFH01000644v1_decoy,length=1322>
+##contig=<ID=chrUn_JTFH01000645v1_decoy,length=1320>
+##contig=<ID=chrUn_JTFH01000646v1_decoy,length=1319>
+##contig=<ID=chrUn_JTFH01000647v1_decoy,length=1318>
+##contig=<ID=chrUn_JTFH01000648v1_decoy,length=1315>
+##contig=<ID=chrUn_JTFH01000649v1_decoy,length=1314>
+##contig=<ID=chrUn_JTFH01000650v1_decoy,length=1313>
+##contig=<ID=chrUn_JTFH01000651v1_decoy,length=1313>
+##contig=<ID=chrUn_JTFH01000652v1_decoy,length=1312>
+##contig=<ID=chrUn_JTFH01000653v1_decoy,length=1310>
+##contig=<ID=chrUn_JTFH01000654v1_decoy,length=1309>
+##contig=<ID=chrUn_JTFH01000655v1_decoy,length=1309>
+##contig=<ID=chrUn_JTFH01000656v1_decoy,length=1307>
+##contig=<ID=chrUn_JTFH01000657v1_decoy,length=1307>
+##contig=<ID=chrUn_JTFH01000658v1_decoy,length=1305>
+##contig=<ID=chrUn_JTFH01000659v1_decoy,length=1304>
+##contig=<ID=chrUn_JTFH01000660v1_decoy,length=1303>
+##contig=<ID=chrUn_JTFH01000661v1_decoy,length=1302>
+##contig=<ID=chrUn_JTFH01000662v1_decoy,length=1302>
+##contig=<ID=chrUn_JTFH01000663v1_decoy,length=1301>
+##contig=<ID=chrUn_JTFH01000664v1_decoy,length=1301>
+##contig=<ID=chrUn_JTFH01000665v1_decoy,length=1300>
+##contig=<ID=chrUn_JTFH01000666v1_decoy,length=1299>
+##contig=<ID=chrUn_JTFH01000667v1_decoy,length=1297>
+##contig=<ID=chrUn_JTFH01000668v1_decoy,length=1295>
+##contig=<ID=chrUn_JTFH01000669v1_decoy,length=1294>
+##contig=<ID=chrUn_JTFH01000670v1_decoy,length=1293>
+##contig=<ID=chrUn_JTFH01000671v1_decoy,length=1291>
+##contig=<ID=chrUn_JTFH01000672v1_decoy,length=1291>
+##contig=<ID=chrUn_JTFH01000673v1_decoy,length=1289>
+##contig=<ID=chrUn_JTFH01000674v1_decoy,length=1288>
+##contig=<ID=chrUn_JTFH01000675v1_decoy,length=1288>
+##contig=<ID=chrUn_JTFH01000676v1_decoy,length=1287>
+##contig=<ID=chrUn_JTFH01000677v1_decoy,length=1287>
+##contig=<ID=chrUn_JTFH01000678v1_decoy,length=1287>
+##contig=<ID=chrUn_JTFH01000679v1_decoy,length=1286>
+##contig=<ID=chrUn_JTFH01000680v1_decoy,length=1283>
+##contig=<ID=chrUn_JTFH01000681v1_decoy,length=1281>
+##contig=<ID=chrUn_JTFH01000682v1_decoy,length=1277>
+##contig=<ID=chrUn_JTFH01000683v1_decoy,length=1274>
+##contig=<ID=chrUn_JTFH01000684v1_decoy,length=1270>
+##contig=<ID=chrUn_JTFH01000685v1_decoy,length=1267>
+##contig=<ID=chrUn_JTFH01000686v1_decoy,length=1266>
+##contig=<ID=chrUn_JTFH01000687v1_decoy,length=1260>
+##contig=<ID=chrUn_JTFH01000688v1_decoy,length=1259>
+##contig=<ID=chrUn_JTFH01000689v1_decoy,length=1258>
+##contig=<ID=chrUn_JTFH01000690v1_decoy,length=1258>
+##contig=<ID=chrUn_JTFH01000691v1_decoy,length=1258>
+##contig=<ID=chrUn_JTFH01000692v1_decoy,length=1256>
+##contig=<ID=chrUn_JTFH01000693v1_decoy,length=1255>
+##contig=<ID=chrUn_JTFH01000694v1_decoy,length=1254>
+##contig=<ID=chrUn_JTFH01000695v1_decoy,length=1254>
+##contig=<ID=chrUn_JTFH01000696v1_decoy,length=1253>
+##contig=<ID=chrUn_JTFH01000697v1_decoy,length=1250>
+##contig=<ID=chrUn_JTFH01000698v1_decoy,length=1249>
+##contig=<ID=chrUn_JTFH01000699v1_decoy,length=1248>
+##contig=<ID=chrUn_JTFH01000700v1_decoy,length=1248>
+##contig=<ID=chrUn_JTFH01000701v1_decoy,length=1247>
+##contig=<ID=chrUn_JTFH01000702v1_decoy,length=1242>
+##contig=<ID=chrUn_JTFH01000703v1_decoy,length=1242>
+##contig=<ID=chrUn_JTFH01000704v1_decoy,length=1241>
+##contig=<ID=chrUn_JTFH01000705v1_decoy,length=1241>
+##contig=<ID=chrUn_JTFH01000706v1_decoy,length=1241>
+##contig=<ID=chrUn_JTFH01000707v1_decoy,length=1239>
+##contig=<ID=chrUn_JTFH01000708v1_decoy,length=1238>
+##contig=<ID=chrUn_JTFH01000709v1_decoy,length=1237>
+##contig=<ID=chrUn_JTFH01000710v1_decoy,length=1236>
+##contig=<ID=chrUn_JTFH01000711v1_decoy,length=1235>
+##contig=<ID=chrUn_JTFH01000712v1_decoy,length=1234>
+##contig=<ID=chrUn_JTFH01000713v1_decoy,length=1234>
+##contig=<ID=chrUn_JTFH01000714v1_decoy,length=1234>
+##contig=<ID=chrUn_JTFH01000715v1_decoy,length=1233>
+##contig=<ID=chrUn_JTFH01000716v1_decoy,length=1232>
+##contig=<ID=chrUn_JTFH01000717v1_decoy,length=1232>
+##contig=<ID=chrUn_JTFH01000718v1_decoy,length=1231>
+##contig=<ID=chrUn_JTFH01000719v1_decoy,length=1230>
+##contig=<ID=chrUn_JTFH01000720v1_decoy,length=1228>
+##contig=<ID=chrUn_JTFH01000721v1_decoy,length=1227>
+##contig=<ID=chrUn_JTFH01000722v1_decoy,length=1227>
+##contig=<ID=chrUn_JTFH01000723v1_decoy,length=1226>
+##contig=<ID=chrUn_JTFH01000724v1_decoy,length=1224>
+##contig=<ID=chrUn_JTFH01000725v1_decoy,length=1224>
+##contig=<ID=chrUn_JTFH01000726v1_decoy,length=1220>
+##contig=<ID=chrUn_JTFH01000727v1_decoy,length=1220>
+##contig=<ID=chrUn_JTFH01000728v1_decoy,length=1219>
+##contig=<ID=chrUn_JTFH01000729v1_decoy,length=1217>
+##contig=<ID=chrUn_JTFH01000730v1_decoy,length=1216>
+##contig=<ID=chrUn_JTFH01000731v1_decoy,length=1215>
+##contig=<ID=chrUn_JTFH01000732v1_decoy,length=1214>
+##contig=<ID=chrUn_JTFH01000733v1_decoy,length=1214>
+##contig=<ID=chrUn_JTFH01000734v1_decoy,length=1214>
+##contig=<ID=chrUn_JTFH01000735v1_decoy,length=1213>
+##contig=<ID=chrUn_JTFH01000736v1_decoy,length=1212>
+##contig=<ID=chrUn_JTFH01000737v1_decoy,length=1209>
+##contig=<ID=chrUn_JTFH01000738v1_decoy,length=1208>
+##contig=<ID=chrUn_JTFH01000739v1_decoy,length=1207>
+##contig=<ID=chrUn_JTFH01000740v1_decoy,length=1207>
+##contig=<ID=chrUn_JTFH01000741v1_decoy,length=1207>
+##contig=<ID=chrUn_JTFH01000742v1_decoy,length=1206>
+##contig=<ID=chrUn_JTFH01000743v1_decoy,length=1206>
+##contig=<ID=chrUn_JTFH01000744v1_decoy,length=1205>
+##contig=<ID=chrUn_JTFH01000745v1_decoy,length=1205>
+##contig=<ID=chrUn_JTFH01000746v1_decoy,length=1204>
+##contig=<ID=chrUn_JTFH01000747v1_decoy,length=1204>
+##contig=<ID=chrUn_JTFH01000748v1_decoy,length=1204>
+##contig=<ID=chrUn_JTFH01000749v1_decoy,length=1203>
+##contig=<ID=chrUn_JTFH01000750v1_decoy,length=1201>
+##contig=<ID=chrUn_JTFH01000751v1_decoy,length=1201>
+##contig=<ID=chrUn_JTFH01000752v1_decoy,length=1200>
+##contig=<ID=chrUn_JTFH01000753v1_decoy,length=1200>
+##contig=<ID=chrUn_JTFH01000754v1_decoy,length=1199>
+##contig=<ID=chrUn_JTFH01000755v1_decoy,length=1198>
+##contig=<ID=chrUn_JTFH01000756v1_decoy,length=1197>
+##contig=<ID=chrUn_JTFH01000757v1_decoy,length=1196>
+##contig=<ID=chrUn_JTFH01000758v1_decoy,length=1195>
+##contig=<ID=chrUn_JTFH01000759v1_decoy,length=1194>
+##contig=<ID=chrUn_JTFH01000760v1_decoy,length=1194>
+##contig=<ID=chrUn_JTFH01000761v1_decoy,length=1191>
+##contig=<ID=chrUn_JTFH01000762v1_decoy,length=1189>
+##contig=<ID=chrUn_JTFH01000763v1_decoy,length=1186>
+##contig=<ID=chrUn_JTFH01000764v1_decoy,length=1186>
+##contig=<ID=chrUn_JTFH01000765v1_decoy,length=1184>
+##contig=<ID=chrUn_JTFH01000766v1_decoy,length=1183>
+##contig=<ID=chrUn_JTFH01000767v1_decoy,length=1183>
+##contig=<ID=chrUn_JTFH01000768v1_decoy,length=1182>
+##contig=<ID=chrUn_JTFH01000769v1_decoy,length=1181>
+##contig=<ID=chrUn_JTFH01000770v1_decoy,length=1181>
+##contig=<ID=chrUn_JTFH01000771v1_decoy,length=1181>
+##contig=<ID=chrUn_JTFH01000772v1_decoy,length=1181>
+##contig=<ID=chrUn_JTFH01000773v1_decoy,length=1179>
+##contig=<ID=chrUn_JTFH01000774v1_decoy,length=1178>
+##contig=<ID=chrUn_JTFH01000775v1_decoy,length=1178>
+##contig=<ID=chrUn_JTFH01000776v1_decoy,length=1177>
+##contig=<ID=chrUn_JTFH01000777v1_decoy,length=1177>
+##contig=<ID=chrUn_JTFH01000778v1_decoy,length=1171>
+##contig=<ID=chrUn_JTFH01000779v1_decoy,length=1171>
+##contig=<ID=chrUn_JTFH01000780v1_decoy,length=1171>
+##contig=<ID=chrUn_JTFH01000781v1_decoy,length=1170>
+##contig=<ID=chrUn_JTFH01000782v1_decoy,length=1170>
+##contig=<ID=chrUn_JTFH01000783v1_decoy,length=1167>
+##contig=<ID=chrUn_JTFH01000784v1_decoy,length=1167>
+##contig=<ID=chrUn_JTFH01000785v1_decoy,length=1167>
+##contig=<ID=chrUn_JTFH01000786v1_decoy,length=1165>
+##contig=<ID=chrUn_JTFH01000787v1_decoy,length=1165>
+##contig=<ID=chrUn_JTFH01000788v1_decoy,length=1162>
+##contig=<ID=chrUn_JTFH01000789v1_decoy,length=1157>
+##contig=<ID=chrUn_JTFH01000790v1_decoy,length=1156>
+##contig=<ID=chrUn_JTFH01000791v1_decoy,length=1156>
+##contig=<ID=chrUn_JTFH01000792v1_decoy,length=1154>
+##contig=<ID=chrUn_JTFH01000793v1_decoy,length=1154>
+##contig=<ID=chrUn_JTFH01000794v1_decoy,length=1151>
+##contig=<ID=chrUn_JTFH01000795v1_decoy,length=1151>
+##contig=<ID=chrUn_JTFH01000796v1_decoy,length=1150>
+##contig=<ID=chrUn_JTFH01000797v1_decoy,length=1150>
+##contig=<ID=chrUn_JTFH01000798v1_decoy,length=1147>
+##contig=<ID=chrUn_JTFH01000799v1_decoy,length=1147>
+##contig=<ID=chrUn_JTFH01000800v1_decoy,length=1146>
+##contig=<ID=chrUn_JTFH01000801v1_decoy,length=1144>
+##contig=<ID=chrUn_JTFH01000802v1_decoy,length=1144>
+##contig=<ID=chrUn_JTFH01000803v1_decoy,length=1143>
+##contig=<ID=chrUn_JTFH01000804v1_decoy,length=1142>
+##contig=<ID=chrUn_JTFH01000805v1_decoy,length=1141>
+##contig=<ID=chrUn_JTFH01000806v1_decoy,length=1141>
+##contig=<ID=chrUn_JTFH01000807v1_decoy,length=1140>
+##contig=<ID=chrUn_JTFH01000808v1_decoy,length=1138>
+##contig=<ID=chrUn_JTFH01000809v1_decoy,length=1134>
+##contig=<ID=chrUn_JTFH01000810v1_decoy,length=1134>
+##contig=<ID=chrUn_JTFH01000811v1_decoy,length=1132>
+##contig=<ID=chrUn_JTFH01000812v1_decoy,length=1131>
+##contig=<ID=chrUn_JTFH01000813v1_decoy,length=1131>
+##contig=<ID=chrUn_JTFH01000814v1_decoy,length=1130>
+##contig=<ID=chrUn_JTFH01000815v1_decoy,length=1127>
+##contig=<ID=chrUn_JTFH01000816v1_decoy,length=1126>
+##contig=<ID=chrUn_JTFH01000817v1_decoy,length=1124>
+##contig=<ID=chrUn_JTFH01000818v1_decoy,length=1122>
+##contig=<ID=chrUn_JTFH01000819v1_decoy,length=1122>
+##contig=<ID=chrUn_JTFH01000820v1_decoy,length=1121>
+##contig=<ID=chrUn_JTFH01000821v1_decoy,length=1119>
+##contig=<ID=chrUn_JTFH01000822v1_decoy,length=1119>
+##contig=<ID=chrUn_JTFH01000823v1_decoy,length=1119>
+##contig=<ID=chrUn_JTFH01000824v1_decoy,length=1119>
+##contig=<ID=chrUn_JTFH01000825v1_decoy,length=1118>
+##contig=<ID=chrUn_JTFH01000826v1_decoy,length=1116>
+##contig=<ID=chrUn_JTFH01000827v1_decoy,length=1116>
+##contig=<ID=chrUn_JTFH01000828v1_decoy,length=1115>
+##contig=<ID=chrUn_JTFH01000829v1_decoy,length=1115>
+##contig=<ID=chrUn_JTFH01000830v1_decoy,length=1115>
+##contig=<ID=chrUn_JTFH01000831v1_decoy,length=1114>
+##contig=<ID=chrUn_JTFH01000832v1_decoy,length=1113>
+##contig=<ID=chrUn_JTFH01000833v1_decoy,length=1113>
+##contig=<ID=chrUn_JTFH01000834v1_decoy,length=1110>
+##contig=<ID=chrUn_JTFH01000835v1_decoy,length=1110>
+##contig=<ID=chrUn_JTFH01000836v1_decoy,length=1109>
+##contig=<ID=chrUn_JTFH01000837v1_decoy,length=1108>
+##contig=<ID=chrUn_JTFH01000838v1_decoy,length=1107>
+##contig=<ID=chrUn_JTFH01000839v1_decoy,length=1107>
+##contig=<ID=chrUn_JTFH01000840v1_decoy,length=1107>
+##contig=<ID=chrUn_JTFH01000841v1_decoy,length=1107>
+##contig=<ID=chrUn_JTFH01000842v1_decoy,length=1106>
+##contig=<ID=chrUn_JTFH01000843v1_decoy,length=1103>
+##contig=<ID=chrUn_JTFH01000844v1_decoy,length=1103>
+##contig=<ID=chrUn_JTFH01000845v1_decoy,length=1103>
+##contig=<ID=chrUn_JTFH01000846v1_decoy,length=1100>
+##contig=<ID=chrUn_JTFH01000847v1_decoy,length=1099>
+##contig=<ID=chrUn_JTFH01000848v1_decoy,length=1098>
+##contig=<ID=chrUn_JTFH01000849v1_decoy,length=1097>
+##contig=<ID=chrUn_JTFH01000850v1_decoy,length=1096>
+##contig=<ID=chrUn_JTFH01000851v1_decoy,length=1096>
+##contig=<ID=chrUn_JTFH01000852v1_decoy,length=1094>
+##contig=<ID=chrUn_JTFH01000853v1_decoy,length=1093>
+##contig=<ID=chrUn_JTFH01000854v1_decoy,length=1090>
+##contig=<ID=chrUn_JTFH01000855v1_decoy,length=1088>
+##contig=<ID=chrUn_JTFH01000856v1_decoy,length=1087>
+##contig=<ID=chrUn_JTFH01000857v1_decoy,length=1086>
+##contig=<ID=chrUn_JTFH01000858v1_decoy,length=1085>
+##contig=<ID=chrUn_JTFH01000859v1_decoy,length=1084>
+##contig=<ID=chrUn_JTFH01000860v1_decoy,length=1084>
+##contig=<ID=chrUn_JTFH01000861v1_decoy,length=1084>
+##contig=<ID=chrUn_JTFH01000862v1_decoy,length=1084>
+##contig=<ID=chrUn_JTFH01000863v1_decoy,length=1083>
+##contig=<ID=chrUn_JTFH01000864v1_decoy,length=1083>
+##contig=<ID=chrUn_JTFH01000865v1_decoy,length=1082>
+##contig=<ID=chrUn_JTFH01000866v1_decoy,length=1082>
+##contig=<ID=chrUn_JTFH01000867v1_decoy,length=1081>
+##contig=<ID=chrUn_JTFH01000868v1_decoy,length=1081>
+##contig=<ID=chrUn_JTFH01000869v1_decoy,length=1079>
+##contig=<ID=chrUn_JTFH01000870v1_decoy,length=1076>
+##contig=<ID=chrUn_JTFH01000871v1_decoy,length=1074>
+##contig=<ID=chrUn_JTFH01000872v1_decoy,length=1073>
+##contig=<ID=chrUn_JTFH01000873v1_decoy,length=1073>
+##contig=<ID=chrUn_JTFH01000874v1_decoy,length=1071>
+##contig=<ID=chrUn_JTFH01000875v1_decoy,length=1069>
+##contig=<ID=chrUn_JTFH01000876v1_decoy,length=1067>
+##contig=<ID=chrUn_JTFH01000877v1_decoy,length=1067>
+##contig=<ID=chrUn_JTFH01000878v1_decoy,length=1067>
+##contig=<ID=chrUn_JTFH01000879v1_decoy,length=1066>
+##contig=<ID=chrUn_JTFH01000880v1_decoy,length=1065>
+##contig=<ID=chrUn_JTFH01000881v1_decoy,length=1065>
+##contig=<ID=chrUn_JTFH01000882v1_decoy,length=1065>
+##contig=<ID=chrUn_JTFH01000883v1_decoy,length=1065>
+##contig=<ID=chrUn_JTFH01000884v1_decoy,length=1065>
+##contig=<ID=chrUn_JTFH01000885v1_decoy,length=1064>
+##contig=<ID=chrUn_JTFH01000886v1_decoy,length=1064>
+##contig=<ID=chrUn_JTFH01000887v1_decoy,length=1064>
+##contig=<ID=chrUn_JTFH01000888v1_decoy,length=1063>
+##contig=<ID=chrUn_JTFH01000889v1_decoy,length=1062>
+##contig=<ID=chrUn_JTFH01000890v1_decoy,length=1062>
+##contig=<ID=chrUn_JTFH01000891v1_decoy,length=1062>
+##contig=<ID=chrUn_JTFH01000892v1_decoy,length=1061>
+##contig=<ID=chrUn_JTFH01000893v1_decoy,length=1060>
+##contig=<ID=chrUn_JTFH01000894v1_decoy,length=1057>
+##contig=<ID=chrUn_JTFH01000895v1_decoy,length=1057>
+##contig=<ID=chrUn_JTFH01000896v1_decoy,length=1056>
+##contig=<ID=chrUn_JTFH01000897v1_decoy,length=1055>
+##contig=<ID=chrUn_JTFH01000898v1_decoy,length=1055>
+##contig=<ID=chrUn_JTFH01000899v1_decoy,length=1055>
+##contig=<ID=chrUn_JTFH01000900v1_decoy,length=1055>
+##contig=<ID=chrUn_JTFH01000901v1_decoy,length=1054>
+##contig=<ID=chrUn_JTFH01000902v1_decoy,length=1051>
+##contig=<ID=chrUn_JTFH01000903v1_decoy,length=1050>
+##contig=<ID=chrUn_JTFH01000904v1_decoy,length=1050>
+##contig=<ID=chrUn_JTFH01000905v1_decoy,length=1049>
+##contig=<ID=chrUn_JTFH01000906v1_decoy,length=1048>
+##contig=<ID=chrUn_JTFH01000907v1_decoy,length=1047>
+##contig=<ID=chrUn_JTFH01000908v1_decoy,length=1046>
+##contig=<ID=chrUn_JTFH01000909v1_decoy,length=1046>
+##contig=<ID=chrUn_JTFH01000910v1_decoy,length=1046>
+##contig=<ID=chrUn_JTFH01000911v1_decoy,length=1045>
+##contig=<ID=chrUn_JTFH01000912v1_decoy,length=1045>
+##contig=<ID=chrUn_JTFH01000913v1_decoy,length=1045>
+##contig=<ID=chrUn_JTFH01000914v1_decoy,length=1044>
+##contig=<ID=chrUn_JTFH01000915v1_decoy,length=1042>
+##contig=<ID=chrUn_JTFH01000916v1_decoy,length=1041>
+##contig=<ID=chrUn_JTFH01000917v1_decoy,length=1039>
+##contig=<ID=chrUn_JTFH01000918v1_decoy,length=1039>
+##contig=<ID=chrUn_JTFH01000919v1_decoy,length=1038>
+##contig=<ID=chrUn_JTFH01000920v1_decoy,length=1036>
+##contig=<ID=chrUn_JTFH01000921v1_decoy,length=1036>
+##contig=<ID=chrUn_JTFH01000922v1_decoy,length=1035>
+##contig=<ID=chrUn_JTFH01000923v1_decoy,length=1035>
+##contig=<ID=chrUn_JTFH01000924v1_decoy,length=1033>
+##contig=<ID=chrUn_JTFH01000925v1_decoy,length=1032>
+##contig=<ID=chrUn_JTFH01000926v1_decoy,length=1031>
+##contig=<ID=chrUn_JTFH01000927v1_decoy,length=1031>
+##contig=<ID=chrUn_JTFH01000928v1_decoy,length=1031>
+##contig=<ID=chrUn_JTFH01000929v1_decoy,length=1027>
+##contig=<ID=chrUn_JTFH01000930v1_decoy,length=1027>
+##contig=<ID=chrUn_JTFH01000931v1_decoy,length=1026>
+##contig=<ID=chrUn_JTFH01000932v1_decoy,length=1026>
+##contig=<ID=chrUn_JTFH01000933v1_decoy,length=1024>
+##contig=<ID=chrUn_JTFH01000934v1_decoy,length=1024>
+##contig=<ID=chrUn_JTFH01000935v1_decoy,length=1022>
+##contig=<ID=chrUn_JTFH01000936v1_decoy,length=1022>
+##contig=<ID=chrUn_JTFH01000937v1_decoy,length=1021>
+##contig=<ID=chrUn_JTFH01000938v1_decoy,length=1020>
+##contig=<ID=chrUn_JTFH01000939v1_decoy,length=1019>
+##contig=<ID=chrUn_JTFH01000940v1_decoy,length=1018>
+##contig=<ID=chrUn_JTFH01000941v1_decoy,length=1018>
+##contig=<ID=chrUn_JTFH01000942v1_decoy,length=1018>
+##contig=<ID=chrUn_JTFH01000943v1_decoy,length=1016>
+##contig=<ID=chrUn_JTFH01000944v1_decoy,length=1010>
+##contig=<ID=chrUn_JTFH01000945v1_decoy,length=1010>
+##contig=<ID=chrUn_JTFH01000946v1_decoy,length=1009>
+##contig=<ID=chrUn_JTFH01000947v1_decoy,length=1008>
+##contig=<ID=chrUn_JTFH01000948v1_decoy,length=1007>
+##contig=<ID=chrUn_JTFH01000949v1_decoy,length=1006>
+##contig=<ID=chrUn_JTFH01000950v1_decoy,length=1005>
+##contig=<ID=chrUn_JTFH01000951v1_decoy,length=1005>
+##contig=<ID=chrUn_JTFH01000952v1_decoy,length=1004>
+##contig=<ID=chrUn_JTFH01000953v1_decoy,length=1004>
+##contig=<ID=chrUn_JTFH01000954v1_decoy,length=1003>
+##contig=<ID=chrUn_JTFH01000955v1_decoy,length=1003>
+##contig=<ID=chrUn_JTFH01000956v1_decoy,length=1003>
+##contig=<ID=chrUn_JTFH01000957v1_decoy,length=1003>
+##contig=<ID=chrUn_JTFH01000958v1_decoy,length=1002>
+##contig=<ID=chrUn_JTFH01000959v1_decoy,length=1002>
+##contig=<ID=chrUn_JTFH01000960v1_decoy,length=1000>
+##contig=<ID=chrUn_JTFH01000961v1_decoy,length=1000>
+##contig=<ID=chrUn_JTFH01000962v1_decoy,length=8358>
+##contig=<ID=chrUn_JTFH01000963v1_decoy,length=7932>
+##contig=<ID=chrUn_JTFH01000964v1_decoy,length=6846>
+##contig=<ID=chrUn_JTFH01000965v1_decoy,length=4591>
+##contig=<ID=chrUn_JTFH01000966v1_decoy,length=4041>
+##contig=<ID=chrUn_JTFH01000967v1_decoy,length=3841>
+##contig=<ID=chrUn_JTFH01000968v1_decoy,length=3754>
+##contig=<ID=chrUn_JTFH01000969v1_decoy,length=3743>
+##contig=<ID=chrUn_JTFH01000970v1_decoy,length=3702>
+##contig=<ID=chrUn_JTFH01000971v1_decoy,length=3625>
+##contig=<ID=chrUn_JTFH01000972v1_decoy,length=3529>
+##contig=<ID=chrUn_JTFH01000973v1_decoy,length=3508>
+##contig=<ID=chrUn_JTFH01000974v1_decoy,length=3359>
+##contig=<ID=chrUn_JTFH01000975v1_decoy,length=3320>
+##contig=<ID=chrUn_JTFH01000976v1_decoy,length=3231>
+##contig=<ID=chrUn_JTFH01000977v1_decoy,length=3220>
+##contig=<ID=chrUn_JTFH01000978v1_decoy,length=3212>
+##contig=<ID=chrUn_JTFH01000979v1_decoy,length=3192>
+##contig=<ID=chrUn_JTFH01000980v1_decoy,length=3092>
+##contig=<ID=chrUn_JTFH01000981v1_decoy,length=3087>
+##contig=<ID=chrUn_JTFH01000982v1_decoy,length=3048>
+##contig=<ID=chrUn_JTFH01000983v1_decoy,length=3005>
+##contig=<ID=chrUn_JTFH01000984v1_decoy,length=3004>
+##contig=<ID=chrUn_JTFH01000985v1_decoy,length=2959>
+##contig=<ID=chrUn_JTFH01000986v1_decoy,length=2934>
+##contig=<ID=chrUn_JTFH01000987v1_decoy,length=2933>
+##contig=<ID=chrUn_JTFH01000988v1_decoy,length=2827>
+##contig=<ID=chrUn_JTFH01000989v1_decoy,length=2794>
+##contig=<ID=chrUn_JTFH01000990v1_decoy,length=2749>
+##contig=<ID=chrUn_JTFH01000991v1_decoy,length=2745>
+##contig=<ID=chrUn_JTFH01000992v1_decoy,length=2733>
+##contig=<ID=chrUn_JTFH01000993v1_decoy,length=2698>
+##contig=<ID=chrUn_JTFH01000994v1_decoy,length=2665>
+##contig=<ID=chrUn_JTFH01000995v1_decoy,length=2634>
+##contig=<ID=chrUn_JTFH01000996v1_decoy,length=2492>
+##contig=<ID=chrUn_JTFH01000997v1_decoy,length=2489>
+##contig=<ID=chrUn_JTFH01000998v1_decoy,length=2468>
+##contig=<ID=chrUn_JTFH01000999v1_decoy,length=2414>
+##contig=<ID=chrUn_JTFH01001000v1_decoy,length=2395>
+##contig=<ID=chrUn_JTFH01001001v1_decoy,length=2356>
+##contig=<ID=chrUn_JTFH01001002v1_decoy,length=2339>
+##contig=<ID=chrUn_JTFH01001003v1_decoy,length=2310>
+##contig=<ID=chrUn_JTFH01001004v1_decoy,length=2288>
+##contig=<ID=chrUn_JTFH01001005v1_decoy,length=2285>
+##contig=<ID=chrUn_JTFH01001006v1_decoy,length=2269>
+##contig=<ID=chrUn_JTFH01001007v1_decoy,length=2253>
+##contig=<ID=chrUn_JTFH01001008v1_decoy,length=2203>
+##contig=<ID=chrUn_JTFH01001009v1_decoy,length=2176>
+##contig=<ID=chrUn_JTFH01001010v1_decoy,length=2159>
+##contig=<ID=chrUn_JTFH01001011v1_decoy,length=2155>
+##contig=<ID=chrUn_JTFH01001012v1_decoy,length=2149>
+##contig=<ID=chrUn_JTFH01001013v1_decoy,length=2129>
+##contig=<ID=chrUn_JTFH01001014v1_decoy,length=2116>
+##contig=<ID=chrUn_JTFH01001015v1_decoy,length=2113>
+##contig=<ID=chrUn_JTFH01001016v1_decoy,length=2098>
+##contig=<ID=chrUn_JTFH01001017v1_decoy,length=2066>
+##contig=<ID=chrUn_JTFH01001018v1_decoy,length=2066>
+##contig=<ID=chrUn_JTFH01001019v1_decoy,length=2059>
+##contig=<ID=chrUn_JTFH01001020v1_decoy,length=2047>
+##contig=<ID=chrUn_JTFH01001021v1_decoy,length=2040>
+##contig=<ID=chrUn_JTFH01001022v1_decoy,length=2030>
+##contig=<ID=chrUn_JTFH01001023v1_decoy,length=2024>
+##contig=<ID=chrUn_JTFH01001024v1_decoy,length=2001>
+##contig=<ID=chrUn_JTFH01001025v1_decoy,length=1992>
+##contig=<ID=chrUn_JTFH01001026v1_decoy,length=1981>
+##contig=<ID=chrUn_JTFH01001027v1_decoy,length=1979>
+##contig=<ID=chrUn_JTFH01001028v1_decoy,length=1957>
+##contig=<ID=chrUn_JTFH01001029v1_decoy,length=1953>
+##contig=<ID=chrUn_JTFH01001030v1_decoy,length=1944>
+##contig=<ID=chrUn_JTFH01001031v1_decoy,length=1936>
+##contig=<ID=chrUn_JTFH01001032v1_decoy,length=1932>
+##contig=<ID=chrUn_JTFH01001033v1_decoy,length=1882>
+##contig=<ID=chrUn_JTFH01001034v1_decoy,length=1878>
+##contig=<ID=chrUn_JTFH01001035v1_decoy,length=1870>
+##contig=<ID=chrUn_JTFH01001036v1_decoy,length=1821>
+##contig=<ID=chrUn_JTFH01001037v1_decoy,length=1813>
+##contig=<ID=chrUn_JTFH01001038v1_decoy,length=1809>
+##contig=<ID=chrUn_JTFH01001039v1_decoy,length=1804>
+##contig=<ID=chrUn_JTFH01001040v1_decoy,length=1797>
+##contig=<ID=chrUn_JTFH01001041v1_decoy,length=1791>
+##contig=<ID=chrUn_JTFH01001042v1_decoy,length=1781>
+##contig=<ID=chrUn_JTFH01001043v1_decoy,length=1766>
+##contig=<ID=chrUn_JTFH01001044v1_decoy,length=1764>
+##contig=<ID=chrUn_JTFH01001045v1_decoy,length=1743>
+##contig=<ID=chrUn_JTFH01001046v1_decoy,length=1741>
+##contig=<ID=chrUn_JTFH01001047v1_decoy,length=1709>
+##contig=<ID=chrUn_JTFH01001048v1_decoy,length=1706>
+##contig=<ID=chrUn_JTFH01001049v1_decoy,length=1701>
+##contig=<ID=chrUn_JTFH01001050v1_decoy,length=1689>
+##contig=<ID=chrUn_JTFH01001051v1_decoy,length=1646>
+##contig=<ID=chrUn_JTFH01001052v1_decoy,length=1641>
+##contig=<ID=chrUn_JTFH01001053v1_decoy,length=1639>
+##contig=<ID=chrUn_JTFH01001054v1_decoy,length=1636>
+##contig=<ID=chrUn_JTFH01001055v1_decoy,length=1632>
+##contig=<ID=chrUn_JTFH01001056v1_decoy,length=1629>
+##contig=<ID=chrUn_JTFH01001057v1_decoy,length=1623>
+##contig=<ID=chrUn_JTFH01001058v1_decoy,length=1622>
+##contig=<ID=chrUn_JTFH01001059v1_decoy,length=1622>
+##contig=<ID=chrUn_JTFH01001060v1_decoy,length=1619>
+##contig=<ID=chrUn_JTFH01001061v1_decoy,length=1606>
+##contig=<ID=chrUn_JTFH01001062v1_decoy,length=1593>
+##contig=<ID=chrUn_JTFH01001063v1_decoy,length=1592>
+##contig=<ID=chrUn_JTFH01001064v1_decoy,length=1558>
+##contig=<ID=chrUn_JTFH01001065v1_decoy,length=1545>
+##contig=<ID=chrUn_JTFH01001066v1_decoy,length=1542>
+##contig=<ID=chrUn_JTFH01001067v1_decoy,length=1540>
+##contig=<ID=chrUn_JTFH01001068v1_decoy,length=1529>
+##contig=<ID=chrUn_JTFH01001069v1_decoy,length=1518>
+##contig=<ID=chrUn_JTFH01001070v1_decoy,length=1515>
+##contig=<ID=chrUn_JTFH01001071v1_decoy,length=1513>
+##contig=<ID=chrUn_JTFH01001072v1_decoy,length=1507>
+##contig=<ID=chrUn_JTFH01001073v1_decoy,length=1504>
+##contig=<ID=chrUn_JTFH01001074v1_decoy,length=1499>
+##contig=<ID=chrUn_JTFH01001075v1_decoy,length=1495>
+##contig=<ID=chrUn_JTFH01001076v1_decoy,length=1495>
+##contig=<ID=chrUn_JTFH01001077v1_decoy,length=1492>
+##contig=<ID=chrUn_JTFH01001078v1_decoy,length=1492>
+##contig=<ID=chrUn_JTFH01001079v1_decoy,length=1489>
+##contig=<ID=chrUn_JTFH01001080v1_decoy,length=1485>
+##contig=<ID=chrUn_JTFH01001081v1_decoy,length=1483>
+##contig=<ID=chrUn_JTFH01001082v1_decoy,length=1473>
+##contig=<ID=chrUn_JTFH01001083v1_decoy,length=1470>
+##contig=<ID=chrUn_JTFH01001084v1_decoy,length=1463>
+##contig=<ID=chrUn_JTFH01001085v1_decoy,length=1460>
+##contig=<ID=chrUn_JTFH01001086v1_decoy,length=1458>
+##contig=<ID=chrUn_JTFH01001087v1_decoy,length=1456>
+##contig=<ID=chrUn_JTFH01001088v1_decoy,length=1453>
+##contig=<ID=chrUn_JTFH01001089v1_decoy,length=1443>
+##contig=<ID=chrUn_JTFH01001090v1_decoy,length=1441>
+##contig=<ID=chrUn_JTFH01001091v1_decoy,length=1426>
+##contig=<ID=chrUn_JTFH01001092v1_decoy,length=1425>
+##contig=<ID=chrUn_JTFH01001093v1_decoy,length=1418>
+##contig=<ID=chrUn_JTFH01001094v1_decoy,length=1413>
+##contig=<ID=chrUn_JTFH01001095v1_decoy,length=1413>
+##contig=<ID=chrUn_JTFH01001096v1_decoy,length=1412>
+##contig=<ID=chrUn_JTFH01001097v1_decoy,length=1407>
+##contig=<ID=chrUn_JTFH01001098v1_decoy,length=1406>
+##contig=<ID=chrUn_JTFH01001099v1_decoy,length=1396>
+##contig=<ID=chrUn_JTFH01001100v1_decoy,length=1390>
+##contig=<ID=chrUn_JTFH01001101v1_decoy,length=1382>
+##contig=<ID=chrUn_JTFH01001102v1_decoy,length=1376>
+##contig=<ID=chrUn_JTFH01001103v1_decoy,length=1375>
+##contig=<ID=chrUn_JTFH01001104v1_decoy,length=1371>
+##contig=<ID=chrUn_JTFH01001105v1_decoy,length=1367>
+##contig=<ID=chrUn_JTFH01001106v1_decoy,length=1364>
+##contig=<ID=chrUn_JTFH01001107v1_decoy,length=1356>
+##contig=<ID=chrUn_JTFH01001108v1_decoy,length=1355>
+##contig=<ID=chrUn_JTFH01001109v1_decoy,length=1352>
+##contig=<ID=chrUn_JTFH01001110v1_decoy,length=1350>
+##contig=<ID=chrUn_JTFH01001111v1_decoy,length=1346>
+##contig=<ID=chrUn_JTFH01001112v1_decoy,length=1345>
+##contig=<ID=chrUn_JTFH01001113v1_decoy,length=1340>
+##contig=<ID=chrUn_JTFH01001114v1_decoy,length=1330>
+##contig=<ID=chrUn_JTFH01001115v1_decoy,length=1329>
+##contig=<ID=chrUn_JTFH01001116v1_decoy,length=1324>
+##contig=<ID=chrUn_JTFH01001117v1_decoy,length=1316>
+##contig=<ID=chrUn_JTFH01001118v1_decoy,length=1307>
+##contig=<ID=chrUn_JTFH01001119v1_decoy,length=1304>
+##contig=<ID=chrUn_JTFH01001120v1_decoy,length=1304>
+##contig=<ID=chrUn_JTFH01001121v1_decoy,length=1303>
+##contig=<ID=chrUn_JTFH01001122v1_decoy,length=1301>
+##contig=<ID=chrUn_JTFH01001123v1_decoy,length=1300>
+##contig=<ID=chrUn_JTFH01001124v1_decoy,length=1297>
+##contig=<ID=chrUn_JTFH01001125v1_decoy,length=1296>
+##contig=<ID=chrUn_JTFH01001126v1_decoy,length=1290>
+##contig=<ID=chrUn_JTFH01001127v1_decoy,length=1284>
+##contig=<ID=chrUn_JTFH01001128v1_decoy,length=1282>
+##contig=<ID=chrUn_JTFH01001129v1_decoy,length=1281>
+##contig=<ID=chrUn_JTFH01001130v1_decoy,length=1280>
+##contig=<ID=chrUn_JTFH01001131v1_decoy,length=1279>
+##contig=<ID=chrUn_JTFH01001132v1_decoy,length=1272>
+##contig=<ID=chrUn_JTFH01001133v1_decoy,length=1267>
+##contig=<ID=chrUn_JTFH01001134v1_decoy,length=1267>
+##contig=<ID=chrUn_JTFH01001135v1_decoy,length=1266>
+##contig=<ID=chrUn_JTFH01001136v1_decoy,length=1264>
+##contig=<ID=chrUn_JTFH01001137v1_decoy,length=1264>
+##contig=<ID=chrUn_JTFH01001138v1_decoy,length=1264>
+##contig=<ID=chrUn_JTFH01001139v1_decoy,length=1263>
+##contig=<ID=chrUn_JTFH01001140v1_decoy,length=1249>
+##contig=<ID=chrUn_JTFH01001141v1_decoy,length=1240>
+##contig=<ID=chrUn_JTFH01001142v1_decoy,length=1239>
+##contig=<ID=chrUn_JTFH01001143v1_decoy,length=1235>
+##contig=<ID=chrUn_JTFH01001144v1_decoy,length=1235>
+##contig=<ID=chrUn_JTFH01001145v1_decoy,length=1233>
+##contig=<ID=chrUn_JTFH01001146v1_decoy,length=1232>
+##contig=<ID=chrUn_JTFH01001147v1_decoy,length=1230>
+##contig=<ID=chrUn_JTFH01001148v1_decoy,length=1226>
+##contig=<ID=chrUn_JTFH01001149v1_decoy,length=1223>
+##contig=<ID=chrUn_JTFH01001150v1_decoy,length=1214>
+##contig=<ID=chrUn_JTFH01001151v1_decoy,length=1213>
+##contig=<ID=chrUn_JTFH01001152v1_decoy,length=1211>
+##contig=<ID=chrUn_JTFH01001153v1_decoy,length=1209>
+##contig=<ID=chrUn_JTFH01001154v1_decoy,length=1202>
+##contig=<ID=chrUn_JTFH01001155v1_decoy,length=1199>
+##contig=<ID=chrUn_JTFH01001156v1_decoy,length=1197>
+##contig=<ID=chrUn_JTFH01001157v1_decoy,length=1193>
+##contig=<ID=chrUn_JTFH01001158v1_decoy,length=1191>
+##contig=<ID=chrUn_JTFH01001159v1_decoy,length=1187>
+##contig=<ID=chrUn_JTFH01001160v1_decoy,length=1186>
+##contig=<ID=chrUn_JTFH01001161v1_decoy,length=1184>
+##contig=<ID=chrUn_JTFH01001162v1_decoy,length=1184>
+##contig=<ID=chrUn_JTFH01001163v1_decoy,length=1182>
+##contig=<ID=chrUn_JTFH01001164v1_decoy,length=1179>
+##contig=<ID=chrUn_JTFH01001165v1_decoy,length=1173>
+##contig=<ID=chrUn_JTFH01001166v1_decoy,length=1169>
+##contig=<ID=chrUn_JTFH01001167v1_decoy,length=1167>
+##contig=<ID=chrUn_JTFH01001168v1_decoy,length=1166>
+##contig=<ID=chrUn_JTFH01001169v1_decoy,length=1165>
+##contig=<ID=chrUn_JTFH01001170v1_decoy,length=1164>
+##contig=<ID=chrUn_JTFH01001171v1_decoy,length=1163>
+##contig=<ID=chrUn_JTFH01001172v1_decoy,length=1158>
+##contig=<ID=chrUn_JTFH01001173v1_decoy,length=1158>
+##contig=<ID=chrUn_JTFH01001174v1_decoy,length=1157>
+##contig=<ID=chrUn_JTFH01001175v1_decoy,length=1157>
+##contig=<ID=chrUn_JTFH01001176v1_decoy,length=1157>
+##contig=<ID=chrUn_JTFH01001177v1_decoy,length=1155>
+##contig=<ID=chrUn_JTFH01001178v1_decoy,length=1154>
+##contig=<ID=chrUn_JTFH01001179v1_decoy,length=1149>
+##contig=<ID=chrUn_JTFH01001180v1_decoy,length=1148>
+##contig=<ID=chrUn_JTFH01001181v1_decoy,length=1148>
+##contig=<ID=chrUn_JTFH01001182v1_decoy,length=1146>
+##contig=<ID=chrUn_JTFH01001183v1_decoy,length=1144>
+##contig=<ID=chrUn_JTFH01001184v1_decoy,length=1140>
+##contig=<ID=chrUn_JTFH01001185v1_decoy,length=1136>
+##contig=<ID=chrUn_JTFH01001186v1_decoy,length=1134>
+##contig=<ID=chrUn_JTFH01001187v1_decoy,length=1133>
+##contig=<ID=chrUn_JTFH01001188v1_decoy,length=1129>
+##contig=<ID=chrUn_JTFH01001189v1_decoy,length=1127>
+##contig=<ID=chrUn_JTFH01001190v1_decoy,length=1127>
+##contig=<ID=chrUn_JTFH01001191v1_decoy,length=1118>
+##contig=<ID=chrUn_JTFH01001192v1_decoy,length=1110>
+##contig=<ID=chrUn_JTFH01001193v1_decoy,length=1104>
+##contig=<ID=chrUn_JTFH01001194v1_decoy,length=1104>
+##contig=<ID=chrUn_JTFH01001195v1_decoy,length=1101>
+##contig=<ID=chrUn_JTFH01001196v1_decoy,length=1098>
+##contig=<ID=chrUn_JTFH01001197v1_decoy,length=1096>
+##contig=<ID=chrUn_JTFH01001198v1_decoy,length=1094>
+##contig=<ID=chrUn_JTFH01001199v1_decoy,length=1091>
+##contig=<ID=chrUn_JTFH01001200v1_decoy,length=1089>
+##contig=<ID=chrUn_JTFH01001201v1_decoy,length=1086>
+##contig=<ID=chrUn_JTFH01001202v1_decoy,length=1085>
+##contig=<ID=chrUn_JTFH01001203v1_decoy,length=1084>
+##contig=<ID=chrUn_JTFH01001204v1_decoy,length=1083>
+##contig=<ID=chrUn_JTFH01001205v1_decoy,length=1083>
+##contig=<ID=chrUn_JTFH01001206v1_decoy,length=1079>
+##contig=<ID=chrUn_JTFH01001207v1_decoy,length=1076>
+##contig=<ID=chrUn_JTFH01001208v1_decoy,length=1069>
+##contig=<ID=chrUn_JTFH01001209v1_decoy,length=1068>
+##contig=<ID=chrUn_JTFH01001210v1_decoy,length=1067>
+##contig=<ID=chrUn_JTFH01001211v1_decoy,length=1067>
+##contig=<ID=chrUn_JTFH01001212v1_decoy,length=1067>
+##contig=<ID=chrUn_JTFH01001213v1_decoy,length=1063>
+##contig=<ID=chrUn_JTFH01001214v1_decoy,length=1062>
+##contig=<ID=chrUn_JTFH01001215v1_decoy,length=1059>
+##contig=<ID=chrUn_JTFH01001216v1_decoy,length=1058>
+##contig=<ID=chrUn_JTFH01001217v1_decoy,length=1058>
+##contig=<ID=chrUn_JTFH01001218v1_decoy,length=1055>
+##contig=<ID=chrUn_JTFH01001219v1_decoy,length=1054>
+##contig=<ID=chrUn_JTFH01001220v1_decoy,length=1054>
+##contig=<ID=chrUn_JTFH01001221v1_decoy,length=1053>
+##contig=<ID=chrUn_JTFH01001222v1_decoy,length=1053>
+##contig=<ID=chrUn_JTFH01001223v1_decoy,length=1052>
+##contig=<ID=chrUn_JTFH01001224v1_decoy,length=1051>
+##contig=<ID=chrUn_JTFH01001225v1_decoy,length=1049>
+##contig=<ID=chrUn_JTFH01001226v1_decoy,length=1047>
+##contig=<ID=chrUn_JTFH01001227v1_decoy,length=1044>
+##contig=<ID=chrUn_JTFH01001228v1_decoy,length=1043>
+##contig=<ID=chrUn_JTFH01001229v1_decoy,length=1043>
+##contig=<ID=chrUn_JTFH01001230v1_decoy,length=1042>
+##contig=<ID=chrUn_JTFH01001231v1_decoy,length=1042>
+##contig=<ID=chrUn_JTFH01001232v1_decoy,length=1041>
+##contig=<ID=chrUn_JTFH01001233v1_decoy,length=1040>
+##contig=<ID=chrUn_JTFH01001234v1_decoy,length=1039>
+##contig=<ID=chrUn_JTFH01001235v1_decoy,length=1038>
+##contig=<ID=chrUn_JTFH01001236v1_decoy,length=1037>
+##contig=<ID=chrUn_JTFH01001237v1_decoy,length=1037>
+##contig=<ID=chrUn_JTFH01001238v1_decoy,length=1035>
+##contig=<ID=chrUn_JTFH01001239v1_decoy,length=1027>
+##contig=<ID=chrUn_JTFH01001240v1_decoy,length=1021>
+##contig=<ID=chrUn_JTFH01001241v1_decoy,length=1021>
+##contig=<ID=chrUn_JTFH01001242v1_decoy,length=1019>
+##contig=<ID=chrUn_JTFH01001243v1_decoy,length=1019>
+##contig=<ID=chrUn_JTFH01001244v1_decoy,length=1016>
+##contig=<ID=chrUn_JTFH01001245v1_decoy,length=1014>
+##contig=<ID=chrUn_JTFH01001246v1_decoy,length=1013>
+##contig=<ID=chrUn_JTFH01001247v1_decoy,length=1009>
+##contig=<ID=chrUn_JTFH01001248v1_decoy,length=1008>
+##contig=<ID=chrUn_JTFH01001249v1_decoy,length=1007>
+##contig=<ID=chrUn_JTFH01001250v1_decoy,length=1004>
+##contig=<ID=chrUn_JTFH01001251v1_decoy,length=1004>
+##contig=<ID=chrUn_JTFH01001252v1_decoy,length=1003>
+##contig=<ID=chrUn_JTFH01001253v1_decoy,length=1001>
+##contig=<ID=chrUn_JTFH01001254v1_decoy,length=1000>
+##contig=<ID=chrUn_JTFH01001255v1_decoy,length=1000>
+##contig=<ID=chrUn_JTFH01001256v1_decoy,length=1000>
+##contig=<ID=chrUn_JTFH01001257v1_decoy,length=17929>
+##contig=<ID=chrUn_JTFH01001258v1_decoy,length=9749>
+##contig=<ID=chrUn_JTFH01001259v1_decoy,length=8053>
+##contig=<ID=chrUn_JTFH01001260v1_decoy,length=7826>
+##contig=<ID=chrUn_JTFH01001261v1_decoy,length=7768>
+##contig=<ID=chrUn_JTFH01001262v1_decoy,length=5691>
+##contig=<ID=chrUn_JTFH01001263v1_decoy,length=5444>
+##contig=<ID=chrUn_JTFH01001264v1_decoy,length=5077>
+##contig=<ID=chrUn_JTFH01001265v1_decoy,length=4990>
+##contig=<ID=chrUn_JTFH01001266v1_decoy,length=4545>
+##contig=<ID=chrUn_JTFH01001267v1_decoy,length=4544>
+##contig=<ID=chrUn_JTFH01001268v1_decoy,length=4202>
+##contig=<ID=chrUn_JTFH01001269v1_decoy,length=4195>
+##contig=<ID=chrUn_JTFH01001270v1_decoy,length=3807>
+##contig=<ID=chrUn_JTFH01001271v1_decoy,length=3741>
+##contig=<ID=chrUn_JTFH01001272v1_decoy,length=3699>
+##contig=<ID=chrUn_JTFH01001273v1_decoy,length=3640>
+##contig=<ID=chrUn_JTFH01001274v1_decoy,length=3531>
+##contig=<ID=chrUn_JTFH01001275v1_decoy,length=3455>
+##contig=<ID=chrUn_JTFH01001276v1_decoy,length=3411>
+##contig=<ID=chrUn_JTFH01001277v1_decoy,length=3387>
+##contig=<ID=chrUn_JTFH01001278v1_decoy,length=3358>
+##contig=<ID=chrUn_JTFH01001279v1_decoy,length=3285>
+##contig=<ID=chrUn_JTFH01001280v1_decoy,length=3273>
+##contig=<ID=chrUn_JTFH01001281v1_decoy,length=3262>
+##contig=<ID=chrUn_JTFH01001282v1_decoy,length=3259>
+##contig=<ID=chrUn_JTFH01001283v1_decoy,length=3222>
+##contig=<ID=chrUn_JTFH01001284v1_decoy,length=3127>
+##contig=<ID=chrUn_JTFH01001285v1_decoy,length=3110>
+##contig=<ID=chrUn_JTFH01001286v1_decoy,length=3104>
+##contig=<ID=chrUn_JTFH01001287v1_decoy,length=3071>
+##contig=<ID=chrUn_JTFH01001288v1_decoy,length=3063>
+##contig=<ID=chrUn_JTFH01001289v1_decoy,length=3059>
+##contig=<ID=chrUn_JTFH01001290v1_decoy,length=2990>
+##contig=<ID=chrUn_JTFH01001291v1_decoy,length=2986>
+##contig=<ID=chrUn_JTFH01001292v1_decoy,length=2928>
+##contig=<ID=chrUn_JTFH01001293v1_decoy,length=2922>
+##contig=<ID=chrUn_JTFH01001294v1_decoy,length=2875>
+##contig=<ID=chrUn_JTFH01001295v1_decoy,length=2859>
+##contig=<ID=chrUn_JTFH01001296v1_decoy,length=2850>
+##contig=<ID=chrUn_JTFH01001297v1_decoy,length=2813>
+##contig=<ID=chrUn_JTFH01001298v1_decoy,length=2785>
+##contig=<ID=chrUn_JTFH01001299v1_decoy,length=2736>
+##contig=<ID=chrUn_JTFH01001300v1_decoy,length=2688>
+##contig=<ID=chrUn_JTFH01001301v1_decoy,length=2658>
+##contig=<ID=chrUn_JTFH01001302v1_decoy,length=2643>
+##contig=<ID=chrUn_JTFH01001303v1_decoy,length=2618>
+##contig=<ID=chrUn_JTFH01001304v1_decoy,length=2605>
+##contig=<ID=chrUn_JTFH01001305v1_decoy,length=2583>
+##contig=<ID=chrUn_JTFH01001306v1_decoy,length=2534>
+##contig=<ID=chrUn_JTFH01001307v1_decoy,length=2512>
+##contig=<ID=chrUn_JTFH01001308v1_decoy,length=2500>
+##contig=<ID=chrUn_JTFH01001309v1_decoy,length=2481>
+##contig=<ID=chrUn_JTFH01001310v1_decoy,length=2478>
+##contig=<ID=chrUn_JTFH01001311v1_decoy,length=2473>
+##contig=<ID=chrUn_JTFH01001312v1_decoy,length=2467>
+##contig=<ID=chrUn_JTFH01001313v1_decoy,length=2442>
+##contig=<ID=chrUn_JTFH01001314v1_decoy,length=2430>
+##contig=<ID=chrUn_JTFH01001315v1_decoy,length=2417>
+##contig=<ID=chrUn_JTFH01001316v1_decoy,length=2408>
+##contig=<ID=chrUn_JTFH01001317v1_decoy,length=2395>
+##contig=<ID=chrUn_JTFH01001318v1_decoy,length=2352>
+##contig=<ID=chrUn_JTFH01001319v1_decoy,length=2337>
+##contig=<ID=chrUn_JTFH01001320v1_decoy,length=2322>
+##contig=<ID=chrUn_JTFH01001321v1_decoy,length=2307>
+##contig=<ID=chrUn_JTFH01001322v1_decoy,length=2306>
+##contig=<ID=chrUn_JTFH01001323v1_decoy,length=2292>
+##contig=<ID=chrUn_JTFH01001324v1_decoy,length=2271>
+##contig=<ID=chrUn_JTFH01001325v1_decoy,length=2265>
+##contig=<ID=chrUn_JTFH01001326v1_decoy,length=2260>
+##contig=<ID=chrUn_JTFH01001327v1_decoy,length=2240>
+##contig=<ID=chrUn_JTFH01001328v1_decoy,length=2238>
+##contig=<ID=chrUn_JTFH01001329v1_decoy,length=2228>
+##contig=<ID=chrUn_JTFH01001330v1_decoy,length=2215>
+##contig=<ID=chrUn_JTFH01001331v1_decoy,length=2205>
+##contig=<ID=chrUn_JTFH01001332v1_decoy,length=2191>
+##contig=<ID=chrUn_JTFH01001333v1_decoy,length=2191>
+##contig=<ID=chrUn_JTFH01001334v1_decoy,length=2190>
+##contig=<ID=chrUn_JTFH01001335v1_decoy,length=2184>
+##contig=<ID=chrUn_JTFH01001336v1_decoy,length=2166>
+##contig=<ID=chrUn_JTFH01001337v1_decoy,length=2165>
+##contig=<ID=chrUn_JTFH01001338v1_decoy,length=2162>
+##contig=<ID=chrUn_JTFH01001339v1_decoy,length=2146>
+##contig=<ID=chrUn_JTFH01001340v1_decoy,length=2116>
+##contig=<ID=chrUn_JTFH01001341v1_decoy,length=2112>
+##contig=<ID=chrUn_JTFH01001342v1_decoy,length=2108>
+##contig=<ID=chrUn_JTFH01001343v1_decoy,length=2106>
+##contig=<ID=chrUn_JTFH01001344v1_decoy,length=2106>
+##contig=<ID=chrUn_JTFH01001345v1_decoy,length=2106>
+##contig=<ID=chrUn_JTFH01001346v1_decoy,length=2097>
+##contig=<ID=chrUn_JTFH01001347v1_decoy,length=2081>
+##contig=<ID=chrUn_JTFH01001348v1_decoy,length=2058>
+##contig=<ID=chrUn_JTFH01001349v1_decoy,length=2055>
+##contig=<ID=chrUn_JTFH01001350v1_decoy,length=2054>
+##contig=<ID=chrUn_JTFH01001351v1_decoy,length=2037>
+##contig=<ID=chrUn_JTFH01001352v1_decoy,length=2032>
+##contig=<ID=chrUn_JTFH01001353v1_decoy,length=2032>
+##contig=<ID=chrUn_JTFH01001354v1_decoy,length=2020>
+##contig=<ID=chrUn_JTFH01001355v1_decoy,length=2018>
+##contig=<ID=chrUn_JTFH01001356v1_decoy,length=2014>
+##contig=<ID=chrUn_JTFH01001357v1_decoy,length=2001>
+##contig=<ID=chrUn_JTFH01001358v1_decoy,length=2001>
+##contig=<ID=chrUn_JTFH01001359v1_decoy,length=1991>
+##contig=<ID=chrUn_JTFH01001360v1_decoy,length=1990>
+##contig=<ID=chrUn_JTFH01001361v1_decoy,length=1983>
+##contig=<ID=chrUn_JTFH01001362v1_decoy,length=1981>
+##contig=<ID=chrUn_JTFH01001363v1_decoy,length=1981>
+##contig=<ID=chrUn_JTFH01001364v1_decoy,length=1979>
+##contig=<ID=chrUn_JTFH01001365v1_decoy,length=1963>
+##contig=<ID=chrUn_JTFH01001366v1_decoy,length=1932>
+##contig=<ID=chrUn_JTFH01001367v1_decoy,length=1929>
+##contig=<ID=chrUn_JTFH01001368v1_decoy,length=1881>
+##contig=<ID=chrUn_JTFH01001369v1_decoy,length=1874>
+##contig=<ID=chrUn_JTFH01001370v1_decoy,length=1849>
+##contig=<ID=chrUn_JTFH01001371v1_decoy,length=1849>
+##contig=<ID=chrUn_JTFH01001372v1_decoy,length=1833>
+##contig=<ID=chrUn_JTFH01001373v1_decoy,length=1832>
+##contig=<ID=chrUn_JTFH01001374v1_decoy,length=1826>
+##contig=<ID=chrUn_JTFH01001375v1_decoy,length=1814>
+##contig=<ID=chrUn_JTFH01001376v1_decoy,length=1814>
+##contig=<ID=chrUn_JTFH01001377v1_decoy,length=1791>
+##contig=<ID=chrUn_JTFH01001378v1_decoy,length=1789>
+##contig=<ID=chrUn_JTFH01001379v1_decoy,length=1786>
+##contig=<ID=chrUn_JTFH01001380v1_decoy,length=1778>
+##contig=<ID=chrUn_JTFH01001381v1_decoy,length=1776>
+##contig=<ID=chrUn_JTFH01001382v1_decoy,length=1762>
+##contig=<ID=chrUn_JTFH01001383v1_decoy,length=1758>
+##contig=<ID=chrUn_JTFH01001384v1_decoy,length=1757>
+##contig=<ID=chrUn_JTFH01001385v1_decoy,length=1754>
+##contig=<ID=chrUn_JTFH01001386v1_decoy,length=1752>
+##contig=<ID=chrUn_JTFH01001387v1_decoy,length=1751>
+##contig=<ID=chrUn_JTFH01001388v1_decoy,length=1749>
+##contig=<ID=chrUn_JTFH01001389v1_decoy,length=1738>
+##contig=<ID=chrUn_JTFH01001390v1_decoy,length=1729>
+##contig=<ID=chrUn_JTFH01001391v1_decoy,length=1726>
+##contig=<ID=chrUn_JTFH01001392v1_decoy,length=1716>
+##contig=<ID=chrUn_JTFH01001393v1_decoy,length=1712>
+##contig=<ID=chrUn_JTFH01001394v1_decoy,length=1711>
+##contig=<ID=chrUn_JTFH01001395v1_decoy,length=1703>
+##contig=<ID=chrUn_JTFH01001396v1_decoy,length=1702>
+##contig=<ID=chrUn_JTFH01001397v1_decoy,length=1699>
+##contig=<ID=chrUn_JTFH01001398v1_decoy,length=1686>
+##contig=<ID=chrUn_JTFH01001399v1_decoy,length=1684>
+##contig=<ID=chrUn_JTFH01001400v1_decoy,length=1680>
+##contig=<ID=chrUn_JTFH01001401v1_decoy,length=1678>
+##contig=<ID=chrUn_JTFH01001402v1_decoy,length=1678>
+##contig=<ID=chrUn_JTFH01001403v1_decoy,length=1677>
+##contig=<ID=chrUn_JTFH01001404v1_decoy,length=1676>
+##contig=<ID=chrUn_JTFH01001405v1_decoy,length=1672>
+##contig=<ID=chrUn_JTFH01001406v1_decoy,length=1669>
+##contig=<ID=chrUn_JTFH01001407v1_decoy,length=1668>
+##contig=<ID=chrUn_JTFH01001408v1_decoy,length=1663>
+##contig=<ID=chrUn_JTFH01001409v1_decoy,length=1660>
+##contig=<ID=chrUn_JTFH01001410v1_decoy,length=1660>
+##contig=<ID=chrUn_JTFH01001411v1_decoy,length=1658>
+##contig=<ID=chrUn_JTFH01001412v1_decoy,length=1656>
+##contig=<ID=chrUn_JTFH01001413v1_decoy,length=1656>
+##contig=<ID=chrUn_JTFH01001414v1_decoy,length=1652>
+##contig=<ID=chrUn_JTFH01001415v1_decoy,length=1647>
+##contig=<ID=chrUn_JTFH01001416v1_decoy,length=1645>
+##contig=<ID=chrUn_JTFH01001417v1_decoy,length=1641>
+##contig=<ID=chrUn_JTFH01001418v1_decoy,length=1638>
+##contig=<ID=chrUn_JTFH01001419v1_decoy,length=1633>
+##contig=<ID=chrUn_JTFH01001420v1_decoy,length=1626>
+##contig=<ID=chrUn_JTFH01001421v1_decoy,length=1614>
+##contig=<ID=chrUn_JTFH01001422v1_decoy,length=1612>
+##contig=<ID=chrUn_JTFH01001423v1_decoy,length=1605>
+##contig=<ID=chrUn_JTFH01001424v1_decoy,length=1603>
+##contig=<ID=chrUn_JTFH01001425v1_decoy,length=1599>
+##contig=<ID=chrUn_JTFH01001426v1_decoy,length=1589>
+##contig=<ID=chrUn_JTFH01001427v1_decoy,length=1588>
+##contig=<ID=chrUn_JTFH01001428v1_decoy,length=1585>
+##contig=<ID=chrUn_JTFH01001429v1_decoy,length=1584>
+##contig=<ID=chrUn_JTFH01001430v1_decoy,length=1584>
+##contig=<ID=chrUn_JTFH01001431v1_decoy,length=1580>
+##contig=<ID=chrUn_JTFH01001432v1_decoy,length=1572>
+##contig=<ID=chrUn_JTFH01001433v1_decoy,length=1570>
+##contig=<ID=chrUn_JTFH01001434v1_decoy,length=1569>
+##contig=<ID=chrUn_JTFH01001435v1_decoy,length=1568>
+##contig=<ID=chrUn_JTFH01001436v1_decoy,length=1567>
+##contig=<ID=chrUn_JTFH01001437v1_decoy,length=1565>
+##contig=<ID=chrUn_JTFH01001438v1_decoy,length=1559>
+##contig=<ID=chrUn_JTFH01001439v1_decoy,length=1559>
+##contig=<ID=chrUn_JTFH01001440v1_decoy,length=1556>
+##contig=<ID=chrUn_JTFH01001441v1_decoy,length=1554>
+##contig=<ID=chrUn_JTFH01001442v1_decoy,length=1549>
+##contig=<ID=chrUn_JTFH01001443v1_decoy,length=1542>
+##contig=<ID=chrUn_JTFH01001444v1_decoy,length=1541>
+##contig=<ID=chrUn_JTFH01001445v1_decoy,length=1538>
+##contig=<ID=chrUn_JTFH01001446v1_decoy,length=1537>
+##contig=<ID=chrUn_JTFH01001447v1_decoy,length=1535>
+##contig=<ID=chrUn_JTFH01001448v1_decoy,length=1530>
+##contig=<ID=chrUn_JTFH01001449v1_decoy,length=1528>
+##contig=<ID=chrUn_JTFH01001450v1_decoy,length=1522>
+##contig=<ID=chrUn_JTFH01001451v1_decoy,length=1514>
+##contig=<ID=chrUn_JTFH01001452v1_decoy,length=1509>
+##contig=<ID=chrUn_JTFH01001453v1_decoy,length=1507>
+##contig=<ID=chrUn_JTFH01001454v1_decoy,length=1500>
+##contig=<ID=chrUn_JTFH01001455v1_decoy,length=1499>
+##contig=<ID=chrUn_JTFH01001456v1_decoy,length=1499>
+##contig=<ID=chrUn_JTFH01001457v1_decoy,length=1497>
+##contig=<ID=chrUn_JTFH01001458v1_decoy,length=1496>
+##contig=<ID=chrUn_JTFH01001459v1_decoy,length=1488>
+##contig=<ID=chrUn_JTFH01001460v1_decoy,length=1486>
+##contig=<ID=chrUn_JTFH01001461v1_decoy,length=1485>
+##contig=<ID=chrUn_JTFH01001462v1_decoy,length=1481>
+##contig=<ID=chrUn_JTFH01001463v1_decoy,length=1479>
+##contig=<ID=chrUn_JTFH01001464v1_decoy,length=1472>
+##contig=<ID=chrUn_JTFH01001465v1_decoy,length=1472>
+##contig=<ID=chrUn_JTFH01001466v1_decoy,length=1470>
+##contig=<ID=chrUn_JTFH01001467v1_decoy,length=1466>
+##contig=<ID=chrUn_JTFH01001468v1_decoy,length=1465>
+##contig=<ID=chrUn_JTFH01001469v1_decoy,length=1461>
+##contig=<ID=chrUn_JTFH01001470v1_decoy,length=1458>
+##contig=<ID=chrUn_JTFH01001471v1_decoy,length=1457>
+##contig=<ID=chrUn_JTFH01001472v1_decoy,length=1448>
+##contig=<ID=chrUn_JTFH01001473v1_decoy,length=1447>
+##contig=<ID=chrUn_JTFH01001474v1_decoy,length=1444>
+##contig=<ID=chrUn_JTFH01001475v1_decoy,length=1443>
+##contig=<ID=chrUn_JTFH01001476v1_decoy,length=1443>
+##contig=<ID=chrUn_JTFH01001477v1_decoy,length=1438>
+##contig=<ID=chrUn_JTFH01001478v1_decoy,length=1432>
+##contig=<ID=chrUn_JTFH01001479v1_decoy,length=1430>
+##contig=<ID=chrUn_JTFH01001480v1_decoy,length=1430>
+##contig=<ID=chrUn_JTFH01001481v1_decoy,length=1429>
+##contig=<ID=chrUn_JTFH01001482v1_decoy,length=1429>
+##contig=<ID=chrUn_JTFH01001483v1_decoy,length=1429>
+##contig=<ID=chrUn_JTFH01001484v1_decoy,length=1426>
+##contig=<ID=chrUn_JTFH01001485v1_decoy,length=1426>
+##contig=<ID=chrUn_JTFH01001486v1_decoy,length=1420>
+##contig=<ID=chrUn_JTFH01001487v1_decoy,length=1416>
+##contig=<ID=chrUn_JTFH01001488v1_decoy,length=1416>
+##contig=<ID=chrUn_JTFH01001489v1_decoy,length=1415>
+##contig=<ID=chrUn_JTFH01001490v1_decoy,length=1415>
+##contig=<ID=chrUn_JTFH01001491v1_decoy,length=1414>
+##contig=<ID=chrUn_JTFH01001492v1_decoy,length=1413>
+##contig=<ID=chrUn_JTFH01001493v1_decoy,length=1410>
+##contig=<ID=chrUn_JTFH01001494v1_decoy,length=1405>
+##contig=<ID=chrUn_JTFH01001495v1_decoy,length=1402>
+##contig=<ID=chrUn_JTFH01001496v1_decoy,length=1398>
+##contig=<ID=chrUn_JTFH01001497v1_decoy,length=1397>
+##contig=<ID=chrUn_JTFH01001498v1_decoy,length=1395>
+##contig=<ID=chrUn_JTFH01001499v1_decoy,length=1392>
+##contig=<ID=chrUn_JTFH01001500v1_decoy,length=1388>
+##contig=<ID=chrUn_JTFH01001501v1_decoy,length=1386>
+##contig=<ID=chrUn_JTFH01001502v1_decoy,length=1382>
+##contig=<ID=chrUn_JTFH01001503v1_decoy,length=1381>
+##contig=<ID=chrUn_JTFH01001504v1_decoy,length=1379>
+##contig=<ID=chrUn_JTFH01001505v1_decoy,length=1376>
+##contig=<ID=chrUn_JTFH01001506v1_decoy,length=1374>
+##contig=<ID=chrUn_JTFH01001507v1_decoy,length=1374>
+##contig=<ID=chrUn_JTFH01001508v1_decoy,length=1373>
+##contig=<ID=chrUn_JTFH01001509v1_decoy,length=1373>
+##contig=<ID=chrUn_JTFH01001510v1_decoy,length=1372>
+##contig=<ID=chrUn_JTFH01001511v1_decoy,length=1370>
+##contig=<ID=chrUn_JTFH01001512v1_decoy,length=1367>
+##contig=<ID=chrUn_JTFH01001513v1_decoy,length=1365>
+##contig=<ID=chrUn_JTFH01001514v1_decoy,length=1364>
+##contig=<ID=chrUn_JTFH01001515v1_decoy,length=1361>
+##contig=<ID=chrUn_JTFH01001516v1_decoy,length=1361>
+##contig=<ID=chrUn_JTFH01001517v1_decoy,length=1355>
+##contig=<ID=chrUn_JTFH01001518v1_decoy,length=1355>
+##contig=<ID=chrUn_JTFH01001519v1_decoy,length=1354>
+##contig=<ID=chrUn_JTFH01001520v1_decoy,length=1353>
+##contig=<ID=chrUn_JTFH01001521v1_decoy,length=1349>
+##contig=<ID=chrUn_JTFH01001522v1_decoy,length=1345>
+##contig=<ID=chrUn_JTFH01001523v1_decoy,length=1344>
+##contig=<ID=chrUn_JTFH01001524v1_decoy,length=1343>
+##contig=<ID=chrUn_JTFH01001525v1_decoy,length=1338>
+##contig=<ID=chrUn_JTFH01001526v1_decoy,length=1338>
+##contig=<ID=chrUn_JTFH01001527v1_decoy,length=1338>
+##contig=<ID=chrUn_JTFH01001528v1_decoy,length=1336>
+##contig=<ID=chrUn_JTFH01001529v1_decoy,length=1333>
+##contig=<ID=chrUn_JTFH01001530v1_decoy,length=1333>
+##contig=<ID=chrUn_JTFH01001531v1_decoy,length=1332>
+##contig=<ID=chrUn_JTFH01001532v1_decoy,length=1324>
+##contig=<ID=chrUn_JTFH01001533v1_decoy,length=1323>
+##contig=<ID=chrUn_JTFH01001534v1_decoy,length=1323>
+##contig=<ID=chrUn_JTFH01001535v1_decoy,length=1320>
+##contig=<ID=chrUn_JTFH01001536v1_decoy,length=1320>
+##contig=<ID=chrUn_JTFH01001537v1_decoy,length=1317>
+##contig=<ID=chrUn_JTFH01001538v1_decoy,length=1316>
+##contig=<ID=chrUn_JTFH01001539v1_decoy,length=1304>
+##contig=<ID=chrUn_JTFH01001540v1_decoy,length=1304>
+##contig=<ID=chrUn_JTFH01001541v1_decoy,length=1303>
+##contig=<ID=chrUn_JTFH01001542v1_decoy,length=1302>
+##contig=<ID=chrUn_JTFH01001543v1_decoy,length=1301>
+##contig=<ID=chrUn_JTFH01001544v1_decoy,length=1300>
+##contig=<ID=chrUn_JTFH01001545v1_decoy,length=1298>
+##contig=<ID=chrUn_JTFH01001546v1_decoy,length=1297>
+##contig=<ID=chrUn_JTFH01001547v1_decoy,length=1295>
+##contig=<ID=chrUn_JTFH01001548v1_decoy,length=1284>
+##contig=<ID=chrUn_JTFH01001549v1_decoy,length=1283>
+##contig=<ID=chrUn_JTFH01001550v1_decoy,length=1283>
+##contig=<ID=chrUn_JTFH01001551v1_decoy,length=1279>
+##contig=<ID=chrUn_JTFH01001552v1_decoy,length=1278>
+##contig=<ID=chrUn_JTFH01001553v1_decoy,length=1271>
+##contig=<ID=chrUn_JTFH01001554v1_decoy,length=1271>
+##contig=<ID=chrUn_JTFH01001555v1_decoy,length=1268>
+##contig=<ID=chrUn_JTFH01001556v1_decoy,length=1264>
+##contig=<ID=chrUn_JTFH01001557v1_decoy,length=1263>
+##contig=<ID=chrUn_JTFH01001558v1_decoy,length=1262>
+##contig=<ID=chrUn_JTFH01001559v1_decoy,length=1261>
+##contig=<ID=chrUn_JTFH01001560v1_decoy,length=1260>
+##contig=<ID=chrUn_JTFH01001561v1_decoy,length=1259>
+##contig=<ID=chrUn_JTFH01001562v1_decoy,length=1259>
+##contig=<ID=chrUn_JTFH01001563v1_decoy,length=1258>
+##contig=<ID=chrUn_JTFH01001564v1_decoy,length=1256>
+##contig=<ID=chrUn_JTFH01001565v1_decoy,length=1253>
+##contig=<ID=chrUn_JTFH01001566v1_decoy,length=1248>
+##contig=<ID=chrUn_JTFH01001567v1_decoy,length=1248>
+##contig=<ID=chrUn_JTFH01001568v1_decoy,length=1246>
+##contig=<ID=chrUn_JTFH01001569v1_decoy,length=1246>
+##contig=<ID=chrUn_JTFH01001570v1_decoy,length=1244>
+##contig=<ID=chrUn_JTFH01001571v1_decoy,length=1238>
+##contig=<ID=chrUn_JTFH01001572v1_decoy,length=1238>
+##contig=<ID=chrUn_JTFH01001573v1_decoy,length=1236>
+##contig=<ID=chrUn_JTFH01001574v1_decoy,length=1234>
+##contig=<ID=chrUn_JTFH01001575v1_decoy,length=1234>
+##contig=<ID=chrUn_JTFH01001576v1_decoy,length=1231>
+##contig=<ID=chrUn_JTFH01001577v1_decoy,length=1231>
+##contig=<ID=chrUn_JTFH01001578v1_decoy,length=1230>
+##contig=<ID=chrUn_JTFH01001579v1_decoy,length=1230>
+##contig=<ID=chrUn_JTFH01001580v1_decoy,length=1228>
+##contig=<ID=chrUn_JTFH01001581v1_decoy,length=1227>
+##contig=<ID=chrUn_JTFH01001582v1_decoy,length=1222>
+##contig=<ID=chrUn_JTFH01001583v1_decoy,length=1222>
+##contig=<ID=chrUn_JTFH01001584v1_decoy,length=1221>
+##contig=<ID=chrUn_JTFH01001585v1_decoy,length=1221>
+##contig=<ID=chrUn_JTFH01001586v1_decoy,length=1220>
+##contig=<ID=chrUn_JTFH01001587v1_decoy,length=1218>
+##contig=<ID=chrUn_JTFH01001588v1_decoy,length=1218>
+##contig=<ID=chrUn_JTFH01001589v1_decoy,length=1216>
+##contig=<ID=chrUn_JTFH01001590v1_decoy,length=1216>
+##contig=<ID=chrUn_JTFH01001591v1_decoy,length=1212>
+##contig=<ID=chrUn_JTFH01001592v1_decoy,length=1210>
+##contig=<ID=chrUn_JTFH01001593v1_decoy,length=1209>
+##contig=<ID=chrUn_JTFH01001594v1_decoy,length=1208>
+##contig=<ID=chrUn_JTFH01001595v1_decoy,length=1208>
+##contig=<ID=chrUn_JTFH01001596v1_decoy,length=1206>
+##contig=<ID=chrUn_JTFH01001597v1_decoy,length=1205>
+##contig=<ID=chrUn_JTFH01001598v1_decoy,length=1205>
+##contig=<ID=chrUn_JTFH01001599v1_decoy,length=1202>
+##contig=<ID=chrUn_JTFH01001600v1_decoy,length=1200>
+##contig=<ID=chrUn_JTFH01001601v1_decoy,length=1199>
+##contig=<ID=chrUn_JTFH01001602v1_decoy,length=1198>
+##contig=<ID=chrUn_JTFH01001603v1_decoy,length=1198>
+##contig=<ID=chrUn_JTFH01001604v1_decoy,length=1198>
+##contig=<ID=chrUn_JTFH01001605v1_decoy,length=1195>
+##contig=<ID=chrUn_JTFH01001606v1_decoy,length=1194>
+##contig=<ID=chrUn_JTFH01001607v1_decoy,length=1191>
+##contig=<ID=chrUn_JTFH01001608v1_decoy,length=1189>
+##contig=<ID=chrUn_JTFH01001609v1_decoy,length=1188>
+##contig=<ID=chrUn_JTFH01001610v1_decoy,length=1180>
+##contig=<ID=chrUn_JTFH01001611v1_decoy,length=1180>
+##contig=<ID=chrUn_JTFH01001612v1_decoy,length=1179>
+##contig=<ID=chrUn_JTFH01001613v1_decoy,length=1172>
+##contig=<ID=chrUn_JTFH01001614v1_decoy,length=1168>
+##contig=<ID=chrUn_JTFH01001615v1_decoy,length=1166>
+##contig=<ID=chrUn_JTFH01001616v1_decoy,length=1157>
+##contig=<ID=chrUn_JTFH01001617v1_decoy,length=1156>
+##contig=<ID=chrUn_JTFH01001618v1_decoy,length=1156>
+##contig=<ID=chrUn_JTFH01001619v1_decoy,length=1155>
+##contig=<ID=chrUn_JTFH01001620v1_decoy,length=1154>
+##contig=<ID=chrUn_JTFH01001621v1_decoy,length=1154>
+##contig=<ID=chrUn_JTFH01001622v1_decoy,length=1149>
+##contig=<ID=chrUn_JTFH01001623v1_decoy,length=1143>
+##contig=<ID=chrUn_JTFH01001624v1_decoy,length=1143>
+##contig=<ID=chrUn_JTFH01001625v1_decoy,length=1140>
+##contig=<ID=chrUn_JTFH01001626v1_decoy,length=1137>
+##contig=<ID=chrUn_JTFH01001627v1_decoy,length=1135>
+##contig=<ID=chrUn_JTFH01001628v1_decoy,length=1135>
+##contig=<ID=chrUn_JTFH01001629v1_decoy,length=1135>
+##contig=<ID=chrUn_JTFH01001630v1_decoy,length=1127>
+##contig=<ID=chrUn_JTFH01001631v1_decoy,length=1127>
+##contig=<ID=chrUn_JTFH01001632v1_decoy,length=1126>
+##contig=<ID=chrUn_JTFH01001633v1_decoy,length=1123>
+##contig=<ID=chrUn_JTFH01001634v1_decoy,length=1123>
+##contig=<ID=chrUn_JTFH01001635v1_decoy,length=1123>
+##contig=<ID=chrUn_JTFH01001636v1_decoy,length=1122>
+##contig=<ID=chrUn_JTFH01001637v1_decoy,length=1122>
+##contig=<ID=chrUn_JTFH01001638v1_decoy,length=1121>
+##contig=<ID=chrUn_JTFH01001639v1_decoy,length=1121>
+##contig=<ID=chrUn_JTFH01001640v1_decoy,length=1119>
+##contig=<ID=chrUn_JTFH01001641v1_decoy,length=1119>
+##contig=<ID=chrUn_JTFH01001642v1_decoy,length=1119>
+##contig=<ID=chrUn_JTFH01001643v1_decoy,length=1118>
+##contig=<ID=chrUn_JTFH01001644v1_decoy,length=1115>
+##contig=<ID=chrUn_JTFH01001645v1_decoy,length=1106>
+##contig=<ID=chrUn_JTFH01001646v1_decoy,length=1106>
+##contig=<ID=chrUn_JTFH01001647v1_decoy,length=1104>
+##contig=<ID=chrUn_JTFH01001648v1_decoy,length=1102>
+##contig=<ID=chrUn_JTFH01001649v1_decoy,length=1101>
+##contig=<ID=chrUn_JTFH01001650v1_decoy,length=1098>
+##contig=<ID=chrUn_JTFH01001651v1_decoy,length=1098>
+##contig=<ID=chrUn_JTFH01001652v1_decoy,length=1096>
+##contig=<ID=chrUn_JTFH01001653v1_decoy,length=1096>
+##contig=<ID=chrUn_JTFH01001654v1_decoy,length=1095>
+##contig=<ID=chrUn_JTFH01001655v1_decoy,length=1093>
+##contig=<ID=chrUn_JTFH01001656v1_decoy,length=1090>
+##contig=<ID=chrUn_JTFH01001657v1_decoy,length=1089>
+##contig=<ID=chrUn_JTFH01001658v1_decoy,length=1087>
+##contig=<ID=chrUn_JTFH01001659v1_decoy,length=1087>
+##contig=<ID=chrUn_JTFH01001660v1_decoy,length=1085>
+##contig=<ID=chrUn_JTFH01001661v1_decoy,length=1085>
+##contig=<ID=chrUn_JTFH01001662v1_decoy,length=1085>
+##contig=<ID=chrUn_JTFH01001663v1_decoy,length=1083>
+##contig=<ID=chrUn_JTFH01001664v1_decoy,length=1080>
+##contig=<ID=chrUn_JTFH01001665v1_decoy,length=1080>
+##contig=<ID=chrUn_JTFH01001666v1_decoy,length=1079>
+##contig=<ID=chrUn_JTFH01001667v1_decoy,length=1079>
+##contig=<ID=chrUn_JTFH01001668v1_decoy,length=1079>
+##contig=<ID=chrUn_JTFH01001669v1_decoy,length=1075>
+##contig=<ID=chrUn_JTFH01001670v1_decoy,length=1074>
+##contig=<ID=chrUn_JTFH01001671v1_decoy,length=1073>
+##contig=<ID=chrUn_JTFH01001672v1_decoy,length=1070>
+##contig=<ID=chrUn_JTFH01001673v1_decoy,length=1068>
+##contig=<ID=chrUn_JTFH01001674v1_decoy,length=1067>
+##contig=<ID=chrUn_JTFH01001675v1_decoy,length=1066>
+##contig=<ID=chrUn_JTFH01001676v1_decoy,length=1066>
+##contig=<ID=chrUn_JTFH01001677v1_decoy,length=1066>
+##contig=<ID=chrUn_JTFH01001678v1_decoy,length=1063>
+##contig=<ID=chrUn_JTFH01001679v1_decoy,length=1063>
+##contig=<ID=chrUn_JTFH01001680v1_decoy,length=1063>
+##contig=<ID=chrUn_JTFH01001681v1_decoy,length=1062>
+##contig=<ID=chrUn_JTFH01001682v1_decoy,length=1058>
+##contig=<ID=chrUn_JTFH01001683v1_decoy,length=1056>
+##contig=<ID=chrUn_JTFH01001684v1_decoy,length=1052>
+##contig=<ID=chrUn_JTFH01001685v1_decoy,length=1051>
+##contig=<ID=chrUn_JTFH01001686v1_decoy,length=1051>
+##contig=<ID=chrUn_JTFH01001687v1_decoy,length=1050>
+##contig=<ID=chrUn_JTFH01001688v1_decoy,length=1048>
+##contig=<ID=chrUn_JTFH01001689v1_decoy,length=1046>
+##contig=<ID=chrUn_JTFH01001690v1_decoy,length=1046>
+##contig=<ID=chrUn_JTFH01001691v1_decoy,length=1045>
+##contig=<ID=chrUn_JTFH01001692v1_decoy,length=1043>
+##contig=<ID=chrUn_JTFH01001693v1_decoy,length=1038>
+##contig=<ID=chrUn_JTFH01001694v1_decoy,length=1036>
+##contig=<ID=chrUn_JTFH01001695v1_decoy,length=1035>
+##contig=<ID=chrUn_JTFH01001696v1_decoy,length=1035>
+##contig=<ID=chrUn_JTFH01001697v1_decoy,length=1035>
+##contig=<ID=chrUn_JTFH01001698v1_decoy,length=1033>
+##contig=<ID=chrUn_JTFH01001699v1_decoy,length=1032>
+##contig=<ID=chrUn_JTFH01001700v1_decoy,length=1031>
+##contig=<ID=chrUn_JTFH01001701v1_decoy,length=1026>
+##contig=<ID=chrUn_JTFH01001702v1_decoy,length=1026>
+##contig=<ID=chrUn_JTFH01001703v1_decoy,length=1026>
+##contig=<ID=chrUn_JTFH01001704v1_decoy,length=1023>
+##contig=<ID=chrUn_JTFH01001705v1_decoy,length=1022>
+##contig=<ID=chrUn_JTFH01001706v1_decoy,length=1020>
+##contig=<ID=chrUn_JTFH01001707v1_decoy,length=1020>
+##contig=<ID=chrUn_JTFH01001708v1_decoy,length=1020>
+##contig=<ID=chrUn_JTFH01001709v1_decoy,length=1019>
+##contig=<ID=chrUn_JTFH01001710v1_decoy,length=1018>
+##contig=<ID=chrUn_JTFH01001711v1_decoy,length=1018>
+##contig=<ID=chrUn_JTFH01001712v1_decoy,length=1017>
+##contig=<ID=chrUn_JTFH01001713v1_decoy,length=1015>
+##contig=<ID=chrUn_JTFH01001714v1_decoy,length=1015>
+##contig=<ID=chrUn_JTFH01001715v1_decoy,length=1015>
+##contig=<ID=chrUn_JTFH01001716v1_decoy,length=1014>
+##contig=<ID=chrUn_JTFH01001717v1_decoy,length=1014>
+##contig=<ID=chrUn_JTFH01001718v1_decoy,length=1013>
+##contig=<ID=chrUn_JTFH01001719v1_decoy,length=1013>
+##contig=<ID=chrUn_JTFH01001720v1_decoy,length=1013>
+##contig=<ID=chrUn_JTFH01001721v1_decoy,length=1012>
+##contig=<ID=chrUn_JTFH01001722v1_decoy,length=1011>
+##contig=<ID=chrUn_JTFH01001723v1_decoy,length=1011>
+##contig=<ID=chrUn_JTFH01001724v1_decoy,length=1009>
+##contig=<ID=chrUn_JTFH01001725v1_decoy,length=1008>
+##contig=<ID=chrUn_JTFH01001726v1_decoy,length=1008>
+##contig=<ID=chrUn_JTFH01001727v1_decoy,length=1007>
+##contig=<ID=chrUn_JTFH01001728v1_decoy,length=1007>
+##contig=<ID=chrUn_JTFH01001729v1_decoy,length=1007>
+##contig=<ID=chrUn_JTFH01001730v1_decoy,length=1006>
+##contig=<ID=chrUn_JTFH01001731v1_decoy,length=1005>
+##contig=<ID=chrUn_JTFH01001732v1_decoy,length=1003>
+##contig=<ID=chrUn_JTFH01001733v1_decoy,length=1001>
+##contig=<ID=chrUn_JTFH01001734v1_decoy,length=1000>
+##contig=<ID=chrUn_JTFH01001735v1_decoy,length=19311>
+##contig=<ID=chrUn_JTFH01001736v1_decoy,length=11713>
+##contig=<ID=chrUn_JTFH01001737v1_decoy,length=11263>
+##contig=<ID=chrUn_JTFH01001738v1_decoy,length=9779>
+##contig=<ID=chrUn_JTFH01001739v1_decoy,length=9568>
+##contig=<ID=chrUn_JTFH01001740v1_decoy,length=9344>
+##contig=<ID=chrUn_JTFH01001741v1_decoy,length=9188>
+##contig=<ID=chrUn_JTFH01001742v1_decoy,length=9100>
+##contig=<ID=chrUn_JTFH01001743v1_decoy,length=8771>
+##contig=<ID=chrUn_JTFH01001744v1_decoy,length=8690>
+##contig=<ID=chrUn_JTFH01001745v1_decoy,length=8566>
+##contig=<ID=chrUn_JTFH01001746v1_decoy,length=8058>
+##contig=<ID=chrUn_JTFH01001747v1_decoy,length=7759>
+##contig=<ID=chrUn_JTFH01001748v1_decoy,length=7585>
+##contig=<ID=chrUn_JTFH01001749v1_decoy,length=7471>
+##contig=<ID=chrUn_JTFH01001750v1_decoy,length=7461>
+##contig=<ID=chrUn_JTFH01001751v1_decoy,length=7342>
+##contig=<ID=chrUn_JTFH01001752v1_decoy,length=7223>
+##contig=<ID=chrUn_JTFH01001753v1_decoy,length=7064>
+##contig=<ID=chrUn_JTFH01001754v1_decoy,length=6916>
+##contig=<ID=chrUn_JTFH01001755v1_decoy,length=6897>
+##contig=<ID=chrUn_JTFH01001756v1_decoy,length=6880>
+##contig=<ID=chrUn_JTFH01001757v1_decoy,length=6857>
+##contig=<ID=chrUn_JTFH01001758v1_decoy,length=6840>
+##contig=<ID=chrUn_JTFH01001759v1_decoy,length=6728>
+##contig=<ID=chrUn_JTFH01001760v1_decoy,length=6688>
+##contig=<ID=chrUn_JTFH01001761v1_decoy,length=6553>
+##contig=<ID=chrUn_JTFH01001762v1_decoy,length=6396>
+##contig=<ID=chrUn_JTFH01001763v1_decoy,length=6345>
+##contig=<ID=chrUn_JTFH01001764v1_decoy,length=6295>
+##contig=<ID=chrUn_JTFH01001765v1_decoy,length=6266>
+##contig=<ID=chrUn_JTFH01001766v1_decoy,length=6173>
+##contig=<ID=chrUn_JTFH01001767v1_decoy,length=6171>
+##contig=<ID=chrUn_JTFH01001768v1_decoy,length=6120>
+##contig=<ID=chrUn_JTFH01001769v1_decoy,length=6105>
+##contig=<ID=chrUn_JTFH01001770v1_decoy,length=6099>
+##contig=<ID=chrUn_JTFH01001771v1_decoy,length=5893>
+##contig=<ID=chrUn_JTFH01001772v1_decoy,length=5829>
+##contig=<ID=chrUn_JTFH01001773v1_decoy,length=5793>
+##contig=<ID=chrUn_JTFH01001774v1_decoy,length=5776>
+##contig=<ID=chrUn_JTFH01001775v1_decoy,length=5759>
+##contig=<ID=chrUn_JTFH01001776v1_decoy,length=5716>
+##contig=<ID=chrUn_JTFH01001777v1_decoy,length=5708>
+##contig=<ID=chrUn_JTFH01001778v1_decoy,length=5590>
+##contig=<ID=chrUn_JTFH01001779v1_decoy,length=5566>
+##contig=<ID=chrUn_JTFH01001780v1_decoy,length=5558>
+##contig=<ID=chrUn_JTFH01001781v1_decoy,length=5418>
+##contig=<ID=chrUn_JTFH01001782v1_decoy,length=5375>
+##contig=<ID=chrUn_JTFH01001783v1_decoy,length=5300>
+##contig=<ID=chrUn_JTFH01001784v1_decoy,length=5255>
+##contig=<ID=chrUn_JTFH01001785v1_decoy,length=5157>
+##contig=<ID=chrUn_JTFH01001786v1_decoy,length=5130>
+##contig=<ID=chrUn_JTFH01001787v1_decoy,length=4978>
+##contig=<ID=chrUn_JTFH01001788v1_decoy,length=4957>
+##contig=<ID=chrUn_JTFH01001789v1_decoy,length=4947>
+##contig=<ID=chrUn_JTFH01001790v1_decoy,length=4897>
+##contig=<ID=chrUn_JTFH01001791v1_decoy,length=4867>
+##contig=<ID=chrUn_JTFH01001792v1_decoy,length=4845>
+##contig=<ID=chrUn_JTFH01001793v1_decoy,length=4678>
+##contig=<ID=chrUn_JTFH01001794v1_decoy,length=4641>
+##contig=<ID=chrUn_JTFH01001795v1_decoy,length=4592>
+##contig=<ID=chrUn_JTFH01001796v1_decoy,length=4543>
+##contig=<ID=chrUn_JTFH01001797v1_decoy,length=4532>
+##contig=<ID=chrUn_JTFH01001798v1_decoy,length=4503>
+##contig=<ID=chrUn_JTFH01001799v1_decoy,length=4495>
+##contig=<ID=chrUn_JTFH01001800v1_decoy,length=4444>
+##contig=<ID=chrUn_JTFH01001801v1_decoy,length=4414>
+##contig=<ID=chrUn_JTFH01001802v1_decoy,length=4409>
+##contig=<ID=chrUn_JTFH01001803v1_decoy,length=4302>
+##contig=<ID=chrUn_JTFH01001804v1_decoy,length=4300>
+##contig=<ID=chrUn_JTFH01001805v1_decoy,length=4277>
+##contig=<ID=chrUn_JTFH01001806v1_decoy,length=4173>
+##contig=<ID=chrUn_JTFH01001807v1_decoy,length=4169>
+##contig=<ID=chrUn_JTFH01001808v1_decoy,length=4136>
+##contig=<ID=chrUn_JTFH01001809v1_decoy,length=4101>
+##contig=<ID=chrUn_JTFH01001810v1_decoy,length=4089>
+##contig=<ID=chrUn_JTFH01001811v1_decoy,length=4015>
+##contig=<ID=chrUn_JTFH01001812v1_decoy,length=4000>
+##contig=<ID=chrUn_JTFH01001813v1_decoy,length=3973>
+##contig=<ID=chrUn_JTFH01001814v1_decoy,length=3732>
+##contig=<ID=chrUn_JTFH01001815v1_decoy,length=3709>
+##contig=<ID=chrUn_JTFH01001816v1_decoy,length=3686>
+##contig=<ID=chrUn_JTFH01001817v1_decoy,length=3676>
+##contig=<ID=chrUn_JTFH01001818v1_decoy,length=3673>
+##contig=<ID=chrUn_JTFH01001819v1_decoy,length=3672>
+##contig=<ID=chrUn_JTFH01001820v1_decoy,length=3633>
+##contig=<ID=chrUn_JTFH01001821v1_decoy,length=3633>
+##contig=<ID=chrUn_JTFH01001822v1_decoy,length=3613>
+##contig=<ID=chrUn_JTFH01001823v1_decoy,length=3605>
+##contig=<ID=chrUn_JTFH01001824v1_decoy,length=3592>
+##contig=<ID=chrUn_JTFH01001825v1_decoy,length=3586>
+##contig=<ID=chrUn_JTFH01001826v1_decoy,length=3584>
+##contig=<ID=chrUn_JTFH01001827v1_decoy,length=3577>
+##contig=<ID=chrUn_JTFH01001828v1_decoy,length=3537>
+##contig=<ID=chrUn_JTFH01001829v1_decoy,length=3510>
+##contig=<ID=chrUn_JTFH01001830v1_decoy,length=3509>
+##contig=<ID=chrUn_JTFH01001831v1_decoy,length=3488>
+##contig=<ID=chrUn_JTFH01001832v1_decoy,length=3473>
+##contig=<ID=chrUn_JTFH01001833v1_decoy,length=3445>
+##contig=<ID=chrUn_JTFH01001834v1_decoy,length=3427>
+##contig=<ID=chrUn_JTFH01001835v1_decoy,length=3395>
+##contig=<ID=chrUn_JTFH01001836v1_decoy,length=3367>
+##contig=<ID=chrUn_JTFH01001837v1_decoy,length=3337>
+##contig=<ID=chrUn_JTFH01001838v1_decoy,length=3324>
+##contig=<ID=chrUn_JTFH01001839v1_decoy,length=3315>
+##contig=<ID=chrUn_JTFH01001840v1_decoy,length=3313>
+##contig=<ID=chrUn_JTFH01001841v1_decoy,length=3283>
+##contig=<ID=chrUn_JTFH01001842v1_decoy,length=3250>
+##contig=<ID=chrUn_JTFH01001843v1_decoy,length=3247>
+##contig=<ID=chrUn_JTFH01001844v1_decoy,length=3237>
+##contig=<ID=chrUn_JTFH01001845v1_decoy,length=3235>
+##contig=<ID=chrUn_JTFH01001846v1_decoy,length=3200>
+##contig=<ID=chrUn_JTFH01001847v1_decoy,length=3195>
+##contig=<ID=chrUn_JTFH01001848v1_decoy,length=3175>
+##contig=<ID=chrUn_JTFH01001849v1_decoy,length=3158>
+##contig=<ID=chrUn_JTFH01001850v1_decoy,length=3143>
+##contig=<ID=chrUn_JTFH01001851v1_decoy,length=3139>
+##contig=<ID=chrUn_JTFH01001852v1_decoy,length=3138>
+##contig=<ID=chrUn_JTFH01001853v1_decoy,length=3136>
+##contig=<ID=chrUn_JTFH01001854v1_decoy,length=3132>
+##contig=<ID=chrUn_JTFH01001855v1_decoy,length=3132>
+##contig=<ID=chrUn_JTFH01001856v1_decoy,length=3095>
+##contig=<ID=chrUn_JTFH01001857v1_decoy,length=3094>
+##contig=<ID=chrUn_JTFH01001858v1_decoy,length=3093>
+##contig=<ID=chrUn_JTFH01001859v1_decoy,length=3059>
+##contig=<ID=chrUn_JTFH01001860v1_decoy,length=2985>
+##contig=<ID=chrUn_JTFH01001861v1_decoy,length=2975>
+##contig=<ID=chrUn_JTFH01001862v1_decoy,length=2967>
+##contig=<ID=chrUn_JTFH01001863v1_decoy,length=2961>
+##contig=<ID=chrUn_JTFH01001864v1_decoy,length=2955>
+##contig=<ID=chrUn_JTFH01001865v1_decoy,length=2935>
+##contig=<ID=chrUn_JTFH01001866v1_decoy,length=2933>
+##contig=<ID=chrUn_JTFH01001867v1_decoy,length=2909>
+##contig=<ID=chrUn_JTFH01001868v1_decoy,length=2904>
+##contig=<ID=chrUn_JTFH01001869v1_decoy,length=2892>
+##contig=<ID=chrUn_JTFH01001870v1_decoy,length=2886>
+##contig=<ID=chrUn_JTFH01001871v1_decoy,length=2885>
+##contig=<ID=chrUn_JTFH01001872v1_decoy,length=2878>
+##contig=<ID=chrUn_JTFH01001873v1_decoy,length=2875>
+##contig=<ID=chrUn_JTFH01001874v1_decoy,length=2861>
+##contig=<ID=chrUn_JTFH01001875v1_decoy,length=2856>
+##contig=<ID=chrUn_JTFH01001876v1_decoy,length=2838>
+##contig=<ID=chrUn_JTFH01001877v1_decoy,length=2801>
+##contig=<ID=chrUn_JTFH01001878v1_decoy,length=2797>
+##contig=<ID=chrUn_JTFH01001879v1_decoy,length=2788>
+##contig=<ID=chrUn_JTFH01001880v1_decoy,length=2773>
+##contig=<ID=chrUn_JTFH01001881v1_decoy,length=2755>
+##contig=<ID=chrUn_JTFH01001882v1_decoy,length=2754>
+##contig=<ID=chrUn_JTFH01001883v1_decoy,length=2743>
+##contig=<ID=chrUn_JTFH01001884v1_decoy,length=2725>
+##contig=<ID=chrUn_JTFH01001885v1_decoy,length=2722>
+##contig=<ID=chrUn_JTFH01001886v1_decoy,length=2682>
+##contig=<ID=chrUn_JTFH01001887v1_decoy,length=2669>
+##contig=<ID=chrUn_JTFH01001888v1_decoy,length=2663>
+##contig=<ID=chrUn_JTFH01001889v1_decoy,length=2652>
+##contig=<ID=chrUn_JTFH01001890v1_decoy,length=2647>
+##contig=<ID=chrUn_JTFH01001891v1_decoy,length=2635>
+##contig=<ID=chrUn_JTFH01001892v1_decoy,length=2633>
+##contig=<ID=chrUn_JTFH01001893v1_decoy,length=2629>
+##contig=<ID=chrUn_JTFH01001894v1_decoy,length=2612>
+##contig=<ID=chrUn_JTFH01001895v1_decoy,length=2599>
+##contig=<ID=chrUn_JTFH01001896v1_decoy,length=2566>
+##contig=<ID=chrUn_JTFH01001897v1_decoy,length=2556>
+##contig=<ID=chrUn_JTFH01001898v1_decoy,length=2551>
+##contig=<ID=chrUn_JTFH01001899v1_decoy,length=2551>
+##contig=<ID=chrUn_JTFH01001900v1_decoy,length=2538>
+##contig=<ID=chrUn_JTFH01001901v1_decoy,length=2538>
+##contig=<ID=chrUn_JTFH01001902v1_decoy,length=2525>
+##contig=<ID=chrUn_JTFH01001903v1_decoy,length=2498>
+##contig=<ID=chrUn_JTFH01001904v1_decoy,length=2496>
+##contig=<ID=chrUn_JTFH01001905v1_decoy,length=2483>
+##contig=<ID=chrUn_JTFH01001906v1_decoy,length=2475>
+##contig=<ID=chrUn_JTFH01001907v1_decoy,length=2469>
+##contig=<ID=chrUn_JTFH01001908v1_decoy,length=2455>
+##contig=<ID=chrUn_JTFH01001909v1_decoy,length=2444>
+##contig=<ID=chrUn_JTFH01001910v1_decoy,length=2437>
+##contig=<ID=chrUn_JTFH01001911v1_decoy,length=2435>
+##contig=<ID=chrUn_JTFH01001912v1_decoy,length=2427>
+##contig=<ID=chrUn_JTFH01001913v1_decoy,length=2419>
+##contig=<ID=chrUn_JTFH01001914v1_decoy,length=2413>
+##contig=<ID=chrUn_JTFH01001915v1_decoy,length=2412>
+##contig=<ID=chrUn_JTFH01001916v1_decoy,length=2400>
+##contig=<ID=chrUn_JTFH01001917v1_decoy,length=2399>
+##contig=<ID=chrUn_JTFH01001918v1_decoy,length=2396>
+##contig=<ID=chrUn_JTFH01001919v1_decoy,length=2393>
+##contig=<ID=chrUn_JTFH01001920v1_decoy,length=2386>
+##contig=<ID=chrUn_JTFH01001921v1_decoy,length=2384>
+##contig=<ID=chrUn_JTFH01001922v1_decoy,length=2382>
+##contig=<ID=chrUn_JTFH01001923v1_decoy,length=2382>
+##contig=<ID=chrUn_JTFH01001924v1_decoy,length=2367>
+##contig=<ID=chrUn_JTFH01001925v1_decoy,length=2366>
+##contig=<ID=chrUn_JTFH01001926v1_decoy,length=2362>
+##contig=<ID=chrUn_JTFH01001927v1_decoy,length=2361>
+##contig=<ID=chrUn_JTFH01001928v1_decoy,length=2353>
+##contig=<ID=chrUn_JTFH01001929v1_decoy,length=2349>
+##contig=<ID=chrUn_JTFH01001930v1_decoy,length=2348>
+##contig=<ID=chrUn_JTFH01001931v1_decoy,length=2340>
+##contig=<ID=chrUn_JTFH01001932v1_decoy,length=2339>
+##contig=<ID=chrUn_JTFH01001933v1_decoy,length=2336>
+##contig=<ID=chrUn_JTFH01001934v1_decoy,length=2333>
+##contig=<ID=chrUn_JTFH01001935v1_decoy,length=2330>
+##contig=<ID=chrUn_JTFH01001936v1_decoy,length=2327>
+##contig=<ID=chrUn_JTFH01001937v1_decoy,length=2318>
+##contig=<ID=chrUn_JTFH01001938v1_decoy,length=2293>
+##contig=<ID=chrUn_JTFH01001939v1_decoy,length=2292>
+##contig=<ID=chrUn_JTFH01001940v1_decoy,length=2287>
+##contig=<ID=chrUn_JTFH01001941v1_decoy,length=2274>
+##contig=<ID=chrUn_JTFH01001942v1_decoy,length=2274>
+##contig=<ID=chrUn_JTFH01001943v1_decoy,length=2267>
+##contig=<ID=chrUn_JTFH01001944v1_decoy,length=2260>
+##contig=<ID=chrUn_JTFH01001945v1_decoy,length=2257>
+##contig=<ID=chrUn_JTFH01001946v1_decoy,length=2240>
+##contig=<ID=chrUn_JTFH01001947v1_decoy,length=2239>
+##contig=<ID=chrUn_JTFH01001948v1_decoy,length=2232>
+##contig=<ID=chrUn_JTFH01001949v1_decoy,length=2230>
+##contig=<ID=chrUn_JTFH01001950v1_decoy,length=2230>
+##contig=<ID=chrUn_JTFH01001951v1_decoy,length=2222>
+##contig=<ID=chrUn_JTFH01001952v1_decoy,length=2216>
+##contig=<ID=chrUn_JTFH01001953v1_decoy,length=2214>
+##contig=<ID=chrUn_JTFH01001954v1_decoy,length=2210>
+##contig=<ID=chrUn_JTFH01001955v1_decoy,length=2203>
+##contig=<ID=chrUn_JTFH01001956v1_decoy,length=2197>
+##contig=<ID=chrUn_JTFH01001957v1_decoy,length=2196>
+##contig=<ID=chrUn_JTFH01001958v1_decoy,length=2196>
+##contig=<ID=chrUn_JTFH01001959v1_decoy,length=2179>
+##contig=<ID=chrUn_JTFH01001960v1_decoy,length=2178>
+##contig=<ID=chrUn_JTFH01001961v1_decoy,length=2178>
+##contig=<ID=chrUn_JTFH01001962v1_decoy,length=2172>
+##contig=<ID=chrUn_JTFH01001963v1_decoy,length=2170>
+##contig=<ID=chrUn_JTFH01001964v1_decoy,length=2167>
+##contig=<ID=chrUn_JTFH01001965v1_decoy,length=2167>
+##contig=<ID=chrUn_JTFH01001966v1_decoy,length=2157>
+##contig=<ID=chrUn_JTFH01001967v1_decoy,length=2153>
+##contig=<ID=chrUn_JTFH01001968v1_decoy,length=2151>
+##contig=<ID=chrUn_JTFH01001969v1_decoy,length=2147>
+##contig=<ID=chrUn_JTFH01001970v1_decoy,length=2145>
+##contig=<ID=chrUn_JTFH01001971v1_decoy,length=2142>
+##contig=<ID=chrUn_JTFH01001972v1_decoy,length=2142>
+##contig=<ID=chrUn_JTFH01001973v1_decoy,length=2136>
+##contig=<ID=chrUn_JTFH01001974v1_decoy,length=2130>
+##contig=<ID=chrUn_JTFH01001975v1_decoy,length=2128>
+##contig=<ID=chrUn_JTFH01001976v1_decoy,length=2126>
+##contig=<ID=chrUn_JTFH01001977v1_decoy,length=2126>
+##contig=<ID=chrUn_JTFH01001978v1_decoy,length=2119>
+##contig=<ID=chrUn_JTFH01001979v1_decoy,length=2107>
+##contig=<ID=chrUn_JTFH01001980v1_decoy,length=2091>
+##contig=<ID=chrUn_JTFH01001981v1_decoy,length=2087>
+##contig=<ID=chrUn_JTFH01001982v1_decoy,length=2086>
+##contig=<ID=chrUn_JTFH01001983v1_decoy,length=2083>
+##contig=<ID=chrUn_JTFH01001984v1_decoy,length=2075>
+##contig=<ID=chrUn_JTFH01001985v1_decoy,length=2075>
+##contig=<ID=chrUn_JTFH01001986v1_decoy,length=2072>
+##contig=<ID=chrUn_JTFH01001987v1_decoy,length=2068>
+##contig=<ID=chrUn_JTFH01001988v1_decoy,length=2067>
+##contig=<ID=chrUn_JTFH01001989v1_decoy,length=2055>
+##contig=<ID=chrUn_JTFH01001990v1_decoy,length=2051>
+##contig=<ID=chrUn_JTFH01001991v1_decoy,length=2050>
+##contig=<ID=chrUn_JTFH01001992v1_decoy,length=2033>
+##contig=<ID=chrUn_JTFH01001993v1_decoy,length=2024>
+##contig=<ID=chrUn_JTFH01001994v1_decoy,length=2016>
+##contig=<ID=chrUn_JTFH01001995v1_decoy,length=2011>
+##contig=<ID=chrUn_JTFH01001996v1_decoy,length=2009>
+##contig=<ID=chrUn_JTFH01001997v1_decoy,length=2003>
+##contig=<ID=chrUn_JTFH01001998v1_decoy,length=2001>
+##contig=<ID=HLA-A*01:01:01:01,length=3503>
+##contig=<ID=HLA-A*01:01:01:02N,length=3291>
+##contig=<ID=HLA-A*01:01:38L,length=3374>
+##contig=<ID=HLA-A*01:02,length=3374>
+##contig=<ID=HLA-A*01:03,length=3503>
+##contig=<ID=HLA-A*01:04N,length=3136>
+##contig=<ID=HLA-A*01:09,length=3105>
+##contig=<ID=HLA-A*01:11N,length=3374>
+##contig=<ID=HLA-A*01:14,length=3095>
+##contig=<ID=HLA-A*01:16N,length=2985>
+##contig=<ID=HLA-A*01:20,length=3105>
+##contig=<ID=HLA-A*02:01:01:01,length=3517>
+##contig=<ID=HLA-A*02:01:01:02L,length=3287>
+##contig=<ID=HLA-A*02:01:01:03,length=3023>
+##contig=<ID=HLA-A*02:01:01:04,length=3516>
+##contig=<ID=HLA-A*02:02:01,length=2917>
+##contig=<ID=HLA-A*02:03:01,length=3517>
+##contig=<ID=HLA-A*02:03:03,length=3148>
+##contig=<ID=HLA-A*02:05:01,length=3517>
+##contig=<ID=HLA-A*02:06:01,length=3517>
+##contig=<ID=HLA-A*02:07:01,length=3517>
+##contig=<ID=HLA-A*02:10,length=3517>
+##contig=<ID=HLA-A*02:251,length=3517>
+##contig=<ID=HLA-A*02:259,length=2978>
+##contig=<ID=HLA-A*02:264,length=3002>
+##contig=<ID=HLA-A*02:265,length=3148>
+##contig=<ID=HLA-A*02:266,length=3084>
+##contig=<ID=HLA-A*02:269,length=3101>
+##contig=<ID=HLA-A*02:279,length=3103>
+##contig=<ID=HLA-A*02:32N,length=3517>
+##contig=<ID=HLA-A*02:376,length=3104>
+##contig=<ID=HLA-A*02:43N,length=3218>
+##contig=<ID=HLA-A*02:455,length=3118>
+##contig=<ID=HLA-A*02:48,length=3517>
+##contig=<ID=HLA-A*02:51,length=3109>
+##contig=<ID=HLA-A*02:533,length=3217>
+##contig=<ID=HLA-A*02:53N,length=3305>
+##contig=<ID=HLA-A*02:57,length=3054>
+##contig=<ID=HLA-A*02:60:01,length=3112>
+##contig=<ID=HLA-A*02:65,length=3387>
+##contig=<ID=HLA-A*02:68,length=3109>
+##contig=<ID=HLA-A*02:77,length=3371>
+##contig=<ID=HLA-A*02:81,length=3309>
+##contig=<ID=HLA-A*02:89,length=3371>
+##contig=<ID=HLA-A*02:95,length=3388>
+##contig=<ID=HLA-A*03:01:01:01,length=3502>
+##contig=<ID=HLA-A*03:01:01:02N,length=3373>
+##contig=<ID=HLA-A*03:01:01:03,length=3094>
+##contig=<ID=HLA-A*03:02:01,length=3502>
+##contig=<ID=HLA-A*03:11N,length=3404>
+##contig=<ID=HLA-A*03:21N,length=3095>
+##contig=<ID=HLA-A*03:36N,length=3142>
+##contig=<ID=HLA-A*11:01:01,length=3503>
+##contig=<ID=HLA-A*11:01:18,length=3503>
+##contig=<ID=HLA-A*11:02:01,length=3503>
+##contig=<ID=HLA-A*11:05,length=3373>
+##contig=<ID=HLA-A*11:110,length=2903>
+##contig=<ID=HLA-A*11:25,length=3073>
+##contig=<ID=HLA-A*11:50Q,length=3362>
+##contig=<ID=HLA-A*11:60,length=3241>
+##contig=<ID=HLA-A*11:69N,length=3500>
+##contig=<ID=HLA-A*11:74,length=3227>
+##contig=<ID=HLA-A*11:75,length=3184>
+##contig=<ID=HLA-A*11:77,length=3233>
+##contig=<ID=HLA-A*23:01:01,length=3502>
+##contig=<ID=HLA-A*23:09,length=3104>
+##contig=<ID=HLA-A*23:38N,length=3020>
+##contig=<ID=HLA-A*24:02:01:01,length=3502>
+##contig=<ID=HLA-A*24:02:01:02L,length=3502>
+##contig=<ID=HLA-A*24:02:01:03,length=3075>
+##contig=<ID=HLA-A*24:02:03Q,length=3247>
+##contig=<ID=HLA-A*24:02:10,length=3356>
+##contig=<ID=HLA-A*24:03:01,length=3502>
+##contig=<ID=HLA-A*24:07:01,length=3502>
+##contig=<ID=HLA-A*24:08,length=3502>
+##contig=<ID=HLA-A*24:09N,length=3502>
+##contig=<ID=HLA-A*24:10:01,length=3502>
+##contig=<ID=HLA-A*24:11N,length=3503>
+##contig=<ID=HLA-A*24:152,length=3176>
+##contig=<ID=HLA-A*24:20,length=3502>
+##contig=<ID=HLA-A*24:215,length=3116>
+##contig=<ID=HLA-A*24:61,length=3043>
+##contig=<ID=HLA-A*24:86N,length=3415>
+##contig=<ID=HLA-A*25:01:01,length=2917>
+##contig=<ID=HLA-A*26:01:01,length=3517>
+##contig=<ID=HLA-A*26:11N,length=3091>
+##contig=<ID=HLA-A*26:15,length=3217>
+##contig=<ID=HLA-A*26:50,length=3141>
+##contig=<ID=HLA-A*29:01:01:01,length=3518>
+##contig=<ID=HLA-A*29:01:01:02N,length=3303>
+##contig=<ID=HLA-A*29:02:01:01,length=3518>
+##contig=<ID=HLA-A*29:02:01:02,length=3518>
+##contig=<ID=HLA-A*29:46,length=3310>
+##contig=<ID=HLA-A*30:01:01,length=3503>
+##contig=<ID=HLA-A*30:02:01:01,length=2903>
+##contig=<ID=HLA-A*30:02:01:02,length=3374>
+##contig=<ID=HLA-A*30:04:01,length=3503>
+##contig=<ID=HLA-A*30:89,length=2903>
+##contig=<ID=HLA-A*31:01:02,length=3518>
+##contig=<ID=HLA-A*31:01:23,length=2918>
+##contig=<ID=HLA-A*31:04,length=2918>
+##contig=<ID=HLA-A*31:14N,length=3090>
+##contig=<ID=HLA-A*31:46,length=3075>
+##contig=<ID=HLA-A*32:01:01,length=3518>
+##contig=<ID=HLA-A*32:06,length=3389>
+##contig=<ID=HLA-A*33:01:01,length=3518>
+##contig=<ID=HLA-A*33:03:01,length=3518>
+##contig=<ID=HLA-A*33:07,length=3389>
+##contig=<ID=HLA-A*34:01:01,length=3517>
+##contig=<ID=HLA-A*34:02:01,length=3096>
+##contig=<ID=HLA-A*36:01,length=2903>
+##contig=<ID=HLA-A*43:01,length=3388>
+##contig=<ID=HLA-A*66:01:01,length=3517>
+##contig=<ID=HLA-A*66:17,length=3075>
+##contig=<ID=HLA-A*68:01:01:01,length=2930>
+##contig=<ID=HLA-A*68:01:01:02,length=3517>
+##contig=<ID=HLA-A*68:01:02:01,length=3517>
+##contig=<ID=HLA-A*68:01:02:02,length=3388>
+##contig=<ID=HLA-A*68:02:01:01,length=3517>
+##contig=<ID=HLA-A*68:02:01:02,length=3506>
+##contig=<ID=HLA-A*68:02:01:03,length=2909>
+##contig=<ID=HLA-A*68:02:02,length=2916>
+##contig=<ID=HLA-A*68:03:01,length=2917>
+##contig=<ID=HLA-A*68:08:01,length=3120>
+##contig=<ID=HLA-A*68:113,length=3070>
+##contig=<ID=HLA-A*68:17,length=3134>
+##contig=<ID=HLA-A*68:18N,length=3237>
+##contig=<ID=HLA-A*68:22,length=3119>
+##contig=<ID=HLA-A*68:71,length=3198>
+##contig=<ID=HLA-A*69:01,length=2917>
+##contig=<ID=HLA-A*74:01,length=2918>
+##contig=<ID=HLA-A*74:02:01:01,length=2918>
+##contig=<ID=HLA-A*74:02:01:02,length=3518>
+##contig=<ID=HLA-A*80:01:01:01,length=3263>
+##contig=<ID=HLA-A*80:01:01:02,length=3055>
+##contig=<ID=HLA-B*07:02:01,length=3323>
+##contig=<ID=HLA-B*07:05:01,length=2676>
+##contig=<ID=HLA-B*07:06,length=2676>
+##contig=<ID=HLA-B*07:156,length=2967>
+##contig=<ID=HLA-B*07:33:01,length=3239>
+##contig=<ID=HLA-B*07:41,length=3266>
+##contig=<ID=HLA-B*07:44,length=3270>
+##contig=<ID=HLA-B*07:50,length=3323>
+##contig=<ID=HLA-B*08:01:01,length=3322>
+##contig=<ID=HLA-B*08:08N,length=3035>
+##contig=<ID=HLA-B*08:132,length=2675>
+##contig=<ID=HLA-B*08:134,length=2959>
+##contig=<ID=HLA-B*08:19N,length=3322>
+##contig=<ID=HLA-B*08:20,length=3322>
+##contig=<ID=HLA-B*08:33,length=3322>
+##contig=<ID=HLA-B*08:79,length=2676>
+##contig=<ID=HLA-B*13:01:01,length=3324>
+##contig=<ID=HLA-B*13:02:01,length=3324>
+##contig=<ID=HLA-B*13:02:03,length=3323>
+##contig=<ID=HLA-B*13:02:09,length=2919>
+##contig=<ID=HLA-B*13:08,length=3324>
+##contig=<ID=HLA-B*13:15,length=3323>
+##contig=<ID=HLA-B*13:25,length=2689>
+##contig=<ID=HLA-B*14:01:01,length=3312>
+##contig=<ID=HLA-B*14:02:01,length=3312>
+##contig=<ID=HLA-B*14:07N,length=3255>
+##contig=<ID=HLA-B*15:01:01:01,length=3336>
+##contig=<ID=HLA-B*15:01:01:02N,length=1208>
+##contig=<ID=HLA-B*15:01:01:03,length=3026>
+##contig=<ID=HLA-B*15:02:01,length=3335>
+##contig=<ID=HLA-B*15:03:01,length=2689>
+##contig=<ID=HLA-B*15:04:01,length=3052>
+##contig=<ID=HLA-B*15:07:01,length=3336>
+##contig=<ID=HLA-B*15:108,length=3283>
+##contig=<ID=HLA-B*15:10:01,length=2689>
+##contig=<ID=HLA-B*15:11:01,length=3336>
+##contig=<ID=HLA-B*15:13:01,length=2688>
+##contig=<ID=HLA-B*15:16:01,length=2688>
+##contig=<ID=HLA-B*15:17:01:01,length=3051>
+##contig=<ID=HLA-B*15:17:01:02,length=3051>
+##contig=<ID=HLA-B*15:18:01,length=3336>
+##contig=<ID=HLA-B*15:220,length=2878>
+##contig=<ID=HLA-B*15:25:01,length=3335>
+##contig=<ID=HLA-B*15:27:01,length=2689>
+##contig=<ID=HLA-B*15:32:01,length=3336>
+##contig=<ID=HLA-B*15:42,length=3333>
+##contig=<ID=HLA-B*15:58,length=3336>
+##contig=<ID=HLA-B*15:66,length=2902>
+##contig=<ID=HLA-B*15:77,length=3336>
+##contig=<ID=HLA-B*15:83,length=3337>
+##contig=<ID=HLA-B*18:01:01:01,length=3323>
+##contig=<ID=HLA-B*18:01:01:02,length=3323>
+##contig=<ID=HLA-B*18:02,length=2686>
+##contig=<ID=HLA-B*18:03,length=3323>
+##contig=<ID=HLA-B*18:17N,length=2979>
+##contig=<ID=HLA-B*18:26,length=3323>
+##contig=<ID=HLA-B*18:94N,length=2970>
+##contig=<ID=HLA-B*27:04:01,length=3325>
+##contig=<ID=HLA-B*27:05:02,length=3325>
+##contig=<ID=HLA-B*27:05:18,length=3321>
+##contig=<ID=HLA-B*27:06,length=3325>
+##contig=<ID=HLA-B*27:07:01,length=2677>
+##contig=<ID=HLA-B*27:131,length=3325>
+##contig=<ID=HLA-B*27:24,length=2677>
+##contig=<ID=HLA-B*27:25,length=2677>
+##contig=<ID=HLA-B*27:32,length=3325>
+##contig=<ID=HLA-B*35:01:01:01,length=3327>
+##contig=<ID=HLA-B*35:01:01:02,length=3327>
+##contig=<ID=HLA-B*35:01:22,length=2806>
+##contig=<ID=HLA-B*35:02:01,length=3327>
+##contig=<ID=HLA-B*35:03:01,length=2689>
+##contig=<ID=HLA-B*35:05:01,length=2690>
+##contig=<ID=HLA-B*35:08:01,length=2689>
+##contig=<ID=HLA-B*35:14:02,length=3327>
+##contig=<ID=HLA-B*35:241,length=3042>
+##contig=<ID=HLA-B*35:41,length=3327>
+##contig=<ID=HLA-B*37:01:01,length=3324>
+##contig=<ID=HLA-B*37:01:05,length=2687>
+##contig=<ID=HLA-B*38:01:01,length=3312>
+##contig=<ID=HLA-B*38:02:01,length=3312>
+##contig=<ID=HLA-B*38:14,length=2738>
+##contig=<ID=HLA-B*39:01:01:01,length=3155>
+##contig=<ID=HLA-B*39:01:01:02L,length=3153>
+##contig=<ID=HLA-B*39:01:01:03,length=3312>
+##contig=<ID=HLA-B*39:01:03,length=3155>
+##contig=<ID=HLA-B*39:01:16,length=3155>
+##contig=<ID=HLA-B*39:01:21,length=3312>
+##contig=<ID=HLA-B*39:05:01,length=2675>
+##contig=<ID=HLA-B*39:06:02,length=2674>
+##contig=<ID=HLA-B*39:10:01,length=3027>
+##contig=<ID=HLA-B*39:13:02,length=3255>
+##contig=<ID=HLA-B*39:14,length=2765>
+##contig=<ID=HLA-B*39:34,length=3254>
+##contig=<ID=HLA-B*39:38Q,length=2675>
+##contig=<ID=HLA-B*40:01:01,length=2676>
+##contig=<ID=HLA-B*40:01:02,length=3323>
+##contig=<ID=HLA-B*40:02:01,length=3258>
+##contig=<ID=HLA-B*40:03,length=2677>
+##contig=<ID=HLA-B*40:06:01:01,length=3325>
+##contig=<ID=HLA-B*40:06:01:02,length=3299>
+##contig=<ID=HLA-B*40:10:01,length=3304>
+##contig=<ID=HLA-B*40:150,length=2800>
+##contig=<ID=HLA-B*40:40,length=2677>
+##contig=<ID=HLA-B*40:72:01,length=3283>
+##contig=<ID=HLA-B*40:79,length=3257>
+##contig=<ID=HLA-B*41:01:01,length=3322>
+##contig=<ID=HLA-B*41:02:01,length=3322>
+##contig=<ID=HLA-B*42:01:01,length=3322>
+##contig=<ID=HLA-B*42:02,length=2675>
+##contig=<ID=HLA-B*42:08,length=3165>
+##contig=<ID=HLA-B*44:02:01:01,length=3323>
+##contig=<ID=HLA-B*44:02:01:02S,length=3152>
+##contig=<ID=HLA-B*44:02:01:03,length=3152>
+##contig=<ID=HLA-B*44:02:17,length=3323>
+##contig=<ID=HLA-B*44:02:27,length=2872>
+##contig=<ID=HLA-B*44:03:01,length=3323>
+##contig=<ID=HLA-B*44:03:02,length=2676>
+##contig=<ID=HLA-B*44:04,length=3239>
+##contig=<ID=HLA-B*44:09,length=3317>
+##contig=<ID=HLA-B*44:138Q,length=3043>
+##contig=<ID=HLA-B*44:150,length=2676>
+##contig=<ID=HLA-B*44:23N,length=3323>
+##contig=<ID=HLA-B*44:26,length=2804>
+##contig=<ID=HLA-B*44:46,length=3323>
+##contig=<ID=HLA-B*44:49,length=3039>
+##contig=<ID=HLA-B*44:56N,length=2676>
+##contig=<ID=HLA-B*45:01:01,length=3338>
+##contig=<ID=HLA-B*45:04,length=3339>
+##contig=<ID=HLA-B*46:01:01,length=3336>
+##contig=<ID=HLA-B*46:01:05,length=2891>
+##contig=<ID=HLA-B*47:01:01:01,length=3041>
+##contig=<ID=HLA-B*47:01:01:02,length=3041>
+##contig=<ID=HLA-B*48:01:01,length=3323>
+##contig=<ID=HLA-B*48:03:01,length=2676>
+##contig=<ID=HLA-B*48:04,length=2676>
+##contig=<ID=HLA-B*48:08,length=3323>
+##contig=<ID=HLA-B*49:01:01,length=3340>
+##contig=<ID=HLA-B*49:32,length=3340>
+##contig=<ID=HLA-B*50:01:01,length=3340>
+##contig=<ID=HLA-B*51:01:01,length=3327>
+##contig=<ID=HLA-B*51:01:02,length=3043>
+##contig=<ID=HLA-B*51:02:01,length=3327>
+##contig=<ID=HLA-B*51:07:01,length=3327>
+##contig=<ID=HLA-B*51:42,length=2962>
+##contig=<ID=HLA-B*52:01:01:01,length=3327>
+##contig=<ID=HLA-B*52:01:01:02,length=3327>
+##contig=<ID=HLA-B*52:01:01:03,length=3327>
+##contig=<ID=HLA-B*52:01:02,length=3327>
+##contig=<ID=HLA-B*53:01:01,length=3327>
+##contig=<ID=HLA-B*53:11,length=3274>
+##contig=<ID=HLA-B*54:01:01,length=3332>
+##contig=<ID=HLA-B*54:18,length=2813>
+##contig=<ID=HLA-B*55:01:01,length=3332>
+##contig=<ID=HLA-B*55:01:03,length=3332>
+##contig=<ID=HLA-B*55:02:01,length=3333>
+##contig=<ID=HLA-B*55:12,length=3332>
+##contig=<ID=HLA-B*55:24,length=3332>
+##contig=<ID=HLA-B*55:48,length=2980>
+##contig=<ID=HLA-B*56:01:01,length=2688>
+##contig=<ID=HLA-B*56:03,length=2688>
+##contig=<ID=HLA-B*56:04,length=2688>
+##contig=<ID=HLA-B*57:01:01,length=3337>
+##contig=<ID=HLA-B*57:03:01,length=2689>
+##contig=<ID=HLA-B*57:06,length=3284>
+##contig=<ID=HLA-B*57:11,length=3306>
+##contig=<ID=HLA-B*57:29,length=3337>
+##contig=<ID=HLA-B*58:01:01,length=3336>
+##contig=<ID=HLA-B*58:31N,length=3004>
+##contig=<ID=HLA-B*59:01:01:01,length=3333>
+##contig=<ID=HLA-B*59:01:01:02,length=3332>
+##contig=<ID=HLA-B*67:01:01,length=3312>
+##contig=<ID=HLA-B*67:01:02,length=2675>
+##contig=<ID=HLA-B*67:02,length=3307>
+##contig=<ID=HLA-B*73:01,length=3323>
+##contig=<ID=HLA-B*78:01:01,length=3327>
+##contig=<ID=HLA-B*81:01,length=2676>
+##contig=<ID=HLA-B*82:02:01,length=3050>
+##contig=<ID=HLA-C*01:02:01,length=3349>
+##contig=<ID=HLA-C*01:02:11,length=3057>
+##contig=<ID=HLA-C*01:02:29,length=3349>
+##contig=<ID=HLA-C*01:02:30,length=3333>
+##contig=<ID=HLA-C*01:03,length=3349>
+##contig=<ID=HLA-C*01:06,length=2895>
+##contig=<ID=HLA-C*01:08,length=3349>
+##contig=<ID=HLA-C*01:14,length=2895>
+##contig=<ID=HLA-C*01:21,length=2895>
+##contig=<ID=HLA-C*01:30,length=3349>
+##contig=<ID=HLA-C*01:40,length=2968>
+##contig=<ID=HLA-C*02:02:02:01,length=3347>
+##contig=<ID=HLA-C*02:02:02:02,length=3347>
+##contig=<ID=HLA-C*02:10,length=2893>
+##contig=<ID=HLA-C*02:11,length=3320>
+##contig=<ID=HLA-C*02:16:02,length=3029>
+##contig=<ID=HLA-C*02:69,length=2933>
+##contig=<ID=HLA-C*02:85,length=3347>
+##contig=<ID=HLA-C*02:86,length=3347>
+##contig=<ID=HLA-C*02:87,length=3064>
+##contig=<ID=HLA-C*03:02:01,length=2894>
+##contig=<ID=HLA-C*03:02:02:01,length=3348>
+##contig=<ID=HLA-C*03:02:02:02,length=2896>
+##contig=<ID=HLA-C*03:02:02:03,length=3348>
+##contig=<ID=HLA-C*03:03:01,length=3348>
+##contig=<ID=HLA-C*03:04:01:01,length=3348>
+##contig=<ID=HLA-C*03:04:01:02,length=3348>
+##contig=<ID=HLA-C*03:04:02,length=2877>
+##contig=<ID=HLA-C*03:04:04,length=2966>
+##contig=<ID=HLA-C*03:05,length=2894>
+##contig=<ID=HLA-C*03:06,length=2894>
+##contig=<ID=HLA-C*03:100,length=3034>
+##contig=<ID=HLA-C*03:13:01,length=3065>
+##contig=<ID=HLA-C*03:20N,length=3321>
+##contig=<ID=HLA-C*03:219,length=3070>
+##contig=<ID=HLA-C*03:261,length=3348>
+##contig=<ID=HLA-C*03:40:01,length=2894>
+##contig=<ID=HLA-C*03:41:02,length=3328>
+##contig=<ID=HLA-C*03:46,length=2997>
+##contig=<ID=HLA-C*03:61,length=2894>
+##contig=<ID=HLA-C*04:01:01:01,length=3349>
+##contig=<ID=HLA-C*04:01:01:02,length=3349>
+##contig=<ID=HLA-C*04:01:01:03,length=3349>
+##contig=<ID=HLA-C*04:01:01:04,length=3012>
+##contig=<ID=HLA-C*04:01:01:05,length=2931>
+##contig=<ID=HLA-C*04:01:62,length=3329>
+##contig=<ID=HLA-C*04:03:01,length=3349>
+##contig=<ID=HLA-C*04:06,length=3349>
+##contig=<ID=HLA-C*04:09N,length=2991>
+##contig=<ID=HLA-C*04:128,length=3086>
+##contig=<ID=HLA-C*04:161,length=3237>
+##contig=<ID=HLA-C*04:177,length=3349>
+##contig=<ID=HLA-C*04:70,length=3058>
+##contig=<ID=HLA-C*04:71,length=3086>
+##contig=<ID=HLA-C*05:01:01:01,length=3349>
+##contig=<ID=HLA-C*05:01:01:02,length=3349>
+##contig=<ID=HLA-C*05:08,length=3059>
+##contig=<ID=HLA-C*05:09:01,length=3322>
+##contig=<ID=HLA-C*05:93,length=2946>
+##contig=<ID=HLA-C*06:02:01:01,length=3349>
+##contig=<ID=HLA-C*06:02:01:02,length=3349>
+##contig=<ID=HLA-C*06:02:01:03,length=3349>
+##contig=<ID=HLA-C*06:23,length=3349>
+##contig=<ID=HLA-C*06:24,length=3349>
+##contig=<ID=HLA-C*06:46N,length=2987>
+##contig=<ID=HLA-C*07:01:01:01,length=3354>
+##contig=<ID=HLA-C*07:01:01:02,length=3093>
+##contig=<ID=HLA-C*07:01:02,length=3352>
+##contig=<ID=HLA-C*07:01:19,length=3354>
+##contig=<ID=HLA-C*07:01:27,length=3195>
+##contig=<ID=HLA-C*07:01:45,length=3354>
+##contig=<ID=HLA-C*07:02:01:01,length=3354>
+##contig=<ID=HLA-C*07:02:01:02,length=3074>
+##contig=<ID=HLA-C*07:02:01:03,length=3354>
+##contig=<ID=HLA-C*07:02:01:04,length=3353>
+##contig=<ID=HLA-C*07:02:01:05,length=3354>
+##contig=<ID=HLA-C*07:02:05,length=2903>
+##contig=<ID=HLA-C*07:02:06,length=3354>
+##contig=<ID=HLA-C*07:02:64,length=3354>
+##contig=<ID=HLA-C*07:04:01,length=3354>
+##contig=<ID=HLA-C*07:04:02,length=3343>
+##contig=<ID=HLA-C*07:06,length=3354>
+##contig=<ID=HLA-C*07:149,length=3098>
+##contig=<ID=HLA-C*07:18,length=3353>
+##contig=<ID=HLA-C*07:19,length=3222>
+##contig=<ID=HLA-C*07:26,length=3069>
+##contig=<ID=HLA-C*07:30,length=2903>
+##contig=<ID=HLA-C*07:32N,length=3334>
+##contig=<ID=HLA-C*07:384,length=3349>
+##contig=<ID=HLA-C*07:385,length=3354>
+##contig=<ID=HLA-C*07:386,length=3183>
+##contig=<ID=HLA-C*07:391,length=3354>
+##contig=<ID=HLA-C*07:392,length=3354>
+##contig=<ID=HLA-C*07:49,length=2935>
+##contig=<ID=HLA-C*07:56:02,length=3354>
+##contig=<ID=HLA-C*07:66,length=3354>
+##contig=<ID=HLA-C*07:67,length=3354>
+##contig=<ID=HLA-C*08:01:01,length=3349>
+##contig=<ID=HLA-C*08:01:03,length=2998>
+##contig=<ID=HLA-C*08:02:01:01,length=3349>
+##contig=<ID=HLA-C*08:02:01:02,length=3349>
+##contig=<ID=HLA-C*08:03:01,length=3349>
+##contig=<ID=HLA-C*08:04:01,length=2895>
+##contig=<ID=HLA-C*08:112,length=3178>
+##contig=<ID=HLA-C*08:20,length=3349>
+##contig=<ID=HLA-C*08:21,length=3349>
+##contig=<ID=HLA-C*08:22,length=3349>
+##contig=<ID=HLA-C*08:24,length=2895>
+##contig=<ID=HLA-C*08:27,length=3349>
+##contig=<ID=HLA-C*08:36N,length=3097>
+##contig=<ID=HLA-C*08:40,length=2978>
+##contig=<ID=HLA-C*08:41,length=3019>
+##contig=<ID=HLA-C*08:62,length=3086>
+##contig=<ID=HLA-C*12:02:02,length=3349>
+##contig=<ID=HLA-C*12:03:01:01,length=3349>
+##contig=<ID=HLA-C*12:03:01:02,length=3348>
+##contig=<ID=HLA-C*12:08,length=3066>
+##contig=<ID=HLA-C*12:13,length=3058>
+##contig=<ID=HLA-C*12:19,length=3349>
+##contig=<ID=HLA-C*12:22,length=2895>
+##contig=<ID=HLA-C*12:99,length=3349>
+##contig=<ID=HLA-C*14:02:01,length=3349>
+##contig=<ID=HLA-C*14:03,length=3349>
+##contig=<ID=HLA-C*14:21N,length=3099>
+##contig=<ID=HLA-C*14:23,length=2976>
+##contig=<ID=HLA-C*15:02:01,length=3349>
+##contig=<ID=HLA-C*15:05:01,length=3349>
+##contig=<ID=HLA-C*15:05:02,length=3349>
+##contig=<ID=HLA-C*15:13,length=2895>
+##contig=<ID=HLA-C*15:16,length=3066>
+##contig=<ID=HLA-C*15:17,length=3349>
+##contig=<ID=HLA-C*15:96Q,length=3349>
+##contig=<ID=HLA-C*16:01:01,length=3349>
+##contig=<ID=HLA-C*16:02:01,length=2895>
+##contig=<ID=HLA-C*16:04:01,length=3349>
+##contig=<ID=HLA-C*17:01:01:01,length=3368>
+##contig=<ID=HLA-C*17:01:01:02,length=3368>
+##contig=<ID=HLA-C*17:01:01:03,length=3368>
+##contig=<ID=HLA-C*17:03,length=3197>
+##contig=<ID=HLA-C*18:01,length=3346>
+##contig=<ID=HLA-DQA1*01:01:02,length=6489>
+##contig=<ID=HLA-DQA1*01:02:01:01,length=6484>
+##contig=<ID=HLA-DQA1*01:02:01:02,length=6485>
+##contig=<ID=HLA-DQA1*01:02:01:03,length=6485>
+##contig=<ID=HLA-DQA1*01:02:01:04,length=6492>
+##contig=<ID=HLA-DQA1*01:03:01:01,length=6485>
+##contig=<ID=HLA-DQA1*01:03:01:02,length=6492>
+##contig=<ID=HLA-DQA1*01:04:01:01,length=6484>
+##contig=<ID=HLA-DQA1*01:04:01:02,length=6485>
+##contig=<ID=HLA-DQA1*01:05:01,length=6485>
+##contig=<ID=HLA-DQA1*01:07,length=5959>
+##contig=<ID=HLA-DQA1*01:10,length=5790>
+##contig=<ID=HLA-DQA1*01:11,length=5926>
+##contig=<ID=HLA-DQA1*02:01,length=6403>
+##contig=<ID=HLA-DQA1*03:01:01,length=6437>
+##contig=<ID=HLA-DQA1*03:02,length=6437>
+##contig=<ID=HLA-DQA1*03:03:01,length=6437>
+##contig=<ID=HLA-DQA1*04:01:02:01,length=5853>
+##contig=<ID=HLA-DQA1*04:01:02:02,length=5666>
+##contig=<ID=HLA-DQA1*04:02,length=6210>
+##contig=<ID=HLA-DQA1*05:01:01:01,length=5806>
+##contig=<ID=HLA-DQA1*05:01:01:02,length=6529>
+##contig=<ID=HLA-DQA1*05:03,length=6121>
+##contig=<ID=HLA-DQA1*05:05:01:01,length=6593>
+##contig=<ID=HLA-DQA1*05:05:01:02,length=6597>
+##contig=<ID=HLA-DQA1*05:05:01:03,length=6393>
+##contig=<ID=HLA-DQA1*05:11,length=6589>
+##contig=<ID=HLA-DQA1*06:01:01,length=5878>
+##contig=<ID=HLA-DQB1*02:01:01,length=7480>
+##contig=<ID=HLA-DQB1*02:02:01,length=7471>
+##contig=<ID=HLA-DQB1*03:01:01:01,length=7231>
+##contig=<ID=HLA-DQB1*03:01:01:02,length=7230>
+##contig=<ID=HLA-DQB1*03:01:01:03,length=7231>
+##contig=<ID=HLA-DQB1*03:02:01,length=7126>
+##contig=<ID=HLA-DQB1*03:03:02:01,length=7126>
+##contig=<ID=HLA-DQB1*03:03:02:02,length=7126>
+##contig=<ID=HLA-DQB1*03:03:02:03,length=6800>
+##contig=<ID=HLA-DQB1*03:05:01,length=6934>
+##contig=<ID=HLA-DQB1*05:01:01:01,length=7090>
+##contig=<ID=HLA-DQB1*05:01:01:02,length=7090>
+##contig=<ID=HLA-DQB1*05:03:01:01,length=7089>
+##contig=<ID=HLA-DQB1*05:03:01:02,length=7089>
+##contig=<ID=HLA-DQB1*06:01:01,length=7111>
+##contig=<ID=HLA-DQB1*06:02:01,length=7102>
+##contig=<ID=HLA-DQB1*06:03:01,length=7103>
+##contig=<ID=HLA-DQB1*06:09:01,length=7102>
+##contig=<ID=HLA-DRB1*01:01:01,length=10741>
+##contig=<ID=HLA-DRB1*01:02:01,length=11229>
+##contig=<ID=HLA-DRB1*03:01:01:01,length=13908>
+##contig=<ID=HLA-DRB1*03:01:01:02,length=13426>
+##contig=<ID=HLA-DRB1*04:03:01,length=15246>
+##contig=<ID=HLA-DRB1*07:01:01:01,length=16110>
+##contig=<ID=HLA-DRB1*07:01:01:02,length=16120>
+##contig=<ID=HLA-DRB1*08:03:02,length=13562>
+##contig=<ID=HLA-DRB1*09:21,length=16039>
+##contig=<ID=HLA-DRB1*10:01:01,length=13501>
+##contig=<ID=HLA-DRB1*11:01:01,length=13921>
+##contig=<ID=HLA-DRB1*11:01:02,length=13931>
+##contig=<ID=HLA-DRB1*11:04:01,length=13919>
+##contig=<ID=HLA-DRB1*12:01:01,length=13404>
+##contig=<ID=HLA-DRB1*12:17,length=11260>
+##contig=<ID=HLA-DRB1*13:01:01,length=13935>
+##contig=<ID=HLA-DRB1*13:02:01,length=13941>
+##contig=<ID=HLA-DRB1*14:05:01,length=13933>
+##contig=<ID=HLA-DRB1*14:54:01,length=13936>
+##contig=<ID=HLA-DRB1*15:01:01:01,length=11080>
+##contig=<ID=HLA-DRB1*15:01:01:02,length=11571>
+##contig=<ID=HLA-DRB1*15:01:01:03,length=11056>
+##contig=<ID=HLA-DRB1*15:01:01:04,length=11056>
+##contig=<ID=HLA-DRB1*15:02:01,length=10313>
+##contig=<ID=HLA-DRB1*15:03:01:01,length=11567>
+##contig=<ID=HLA-DRB1*15:03:01:02,length=11569>
+##contig=<ID=HLA-DRB1*16:02:01,length=11005>
+##reference=file:///Users/shlee/Documents/ref/hg38/Homo_sapiens_assembly38.fasta
+#CHROM	POS	ID	REF	ALT	QUAL	FILTER	INFO	FORMAT	TUMOR	NORMAL
+chr6	33414233	.	GT	G	.	PASS	ECNT=1;HCNT=1;MAX_ED=.;MIN_ED=.;NLOD=28.24;RPA=5,4;RU=T;STR;TLOD=154.53	GT:AD:AF:ALT_F1R2:ALT_F2R1:FOXOG:QSS:REF_F1R2:REF_F2R1	0/1:66,70:0.534:25:41:.:2209,2350:26:40	0/0:112,0:0.00:0:0:.:3730,0:62:50
+chr6	33442919	.	A	C	.	alt_allele_in_normal	ECNT=1;HCNT=32;MAX_ED=.;MIN_ED=.;NLOD=2.94;TLOD=6.35	GT:AD:AF:ALT_F1R2:ALT_F2R1:FOXOG:QSS:REF_F1R2:REF_F2R1	0/1:123,29:0.156:13:16:0.552:3124,283:60:60	0/0:88,17:0.193:4:13:0.765:2231,128:38:50
+chr6	71886972	.	TC	T	.	PASS	ECNT=1;HCNT=1;MAX_ED=.;MIN_ED=.;NLOD=2.41;RPA=2,1;RU=C;STR;TLOD=14.12	GT:AD:AF:ALT_F1R2:ALT_F2R1:FOXOG:QSS:REF_F1R2:REF_F2R1	0/1:14,5:0.278:0:4:.:384,148:6:8	0/0:10,0:0.00:0:0:.:282,0:3:7
+chr6	118314029	.	TTTCAGGA	T	.	PASS	ECNT=1;HCNT=16;MAX_ED=.;MIN_ED=.;NLOD=20.42;TLOD=80.46	GT:AD:AF:ALT_F1R2:ALT_F2R1:FOXOG:QSS:REF_F1R2:REF_F2R1	0/1:68,26:0.261:13:10:.:2100,793:37:29	0/0:69,0:0.00:0:0:.:2115,0:35:34
diff --git a/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf.gz b/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf.gz
new file mode 100644
index 0000000..c2abb26
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf.gz differ
diff --git a/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf.gz.tbi b/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf.gz.tbi
new file mode 100644
index 0000000..64e435c
Binary files /dev/null and b/src/test/resources/htsjdk/tribble/tabix/4featuresHG38Header.vcf.gz.tbi differ
diff --git a/src/test/scala/htsjdk/UnitSpec.scala b/src/test/scala/htsjdk/UnitSpec.scala
new file mode 100644
index 0000000..a2995d5
--- /dev/null
+++ b/src/test/scala/htsjdk/UnitSpec.scala
@@ -0,0 +1,25 @@
+package htsjdk
+
+import java.nio.file.{Files, Path}
+
+import org.scalatest.{FlatSpec, Matchers}
+
+/** Base class for all Scala tests. */
+class UnitSpec extends FlatSpec with Matchers {
+  /** Make a temporary file that will get cleaned up at the end of testing. */
+  protected def makeTempFile(prefix: String, suffix: String): Path = {
+    val path = Files.createTempFile(prefix, suffix)
+    path.toFile.deleteOnExit()
+    path
+  }
+
+  /** Implicit conversion from Java to Scala iterator. */
+  implicit def javaIteratorAsScalaIterator[A](iter: java.util.Iterator[A]): Iterator[A] = {
+    scala.collection.JavaConverters.asScalaIterator(iter)
+  }
+
+  /** Implicit conversion from Java to Scala iterable. */
+  implicit def javaIterableAsScalaIterable[A](iterable: java.lang.Iterable[A]): Iterable[A] = {
+    scala.collection.JavaConverters.iterableAsScalaIterable(iterable)
+  }
+}
diff --git a/src/test/scala/htsjdk/samtools/fastq/FastqReaderWriterTest.scala b/src/test/scala/htsjdk/samtools/fastq/FastqReaderWriterTest.scala
new file mode 100644
index 0000000..00f62e9
--- /dev/null
+++ b/src/test/scala/htsjdk/samtools/fastq/FastqReaderWriterTest.scala
@@ -0,0 +1,180 @@
+package htsjdk.samtools.fastq
+
+import java.io.{BufferedReader, File, StringReader}
+
+import htsjdk.UnitSpec
+import htsjdk.samtools.{SAMException, SAMUtils}
+import htsjdk.samtools.util.IOUtil
+
+import scala.util.Random
+
+class FastqReaderWriterTest extends UnitSpec {
+  private val rng = new Random()
+  private val Bases = Array('A', 'C', 'G', 'T')
+
+  /** Generates a random string of bases of the desired length. */
+  def bases(length: Int): String = {
+    val chs = new Array[Char](length)
+    chs.indices.foreach(i => chs(i) = Bases(rng.nextInt(Bases.length)))
+    new String(chs)
+  }
+
+  /** Generates a FastqRecord with random bases at a given length. */
+  def fq(name: String, length: Int, qual: Int = 30): FastqRecord = {
+    new FastqRecord(name, bases(length), "", SAMUtils.phredToFastq(qual).toString * length)
+  }
+
+  "FastqWriter" should "write four lines per record to file" in {
+    val path = makeTempFile("test.", ".fastq")
+    val out = new FastqWriterFactory().newWriter(path.toFile)
+    val recs = Seq(fq("q1", 50), fq("q2", 48), fq("q3", 55))
+    val Seq(q1, q2, q3) = recs
+
+    recs.foreach(rec => out.write(rec))
+    out.close()
+
+    val lines = IOUtil.slurpLines(path.toFile)
+    lines should have size 12
+
+    lines.get(0) shouldBe "@q1"
+    lines.get(1) shouldBe q1.getReadString
+    lines.get(4) shouldBe "@q2"
+    lines.get(5) shouldBe q2.getReadString
+    lines.get(8) shouldBe "@q3"
+    lines.get(9) shouldBe q3.getReadString
+  }
+
+  it should "write a record with only a single base" in {
+    val path = makeTempFile("test.", ".fastq")
+    val out = new FastqWriterFactory().newWriter(path.toFile)
+    out.write(fq("q1", 1))
+    out.close()
+    val lines = IOUtil.slurpLines(path.toFile)
+    lines.get(1) should have length 1
+    lines.get(3) should have length 1
+  }
+
+  it should "write a record with zero-length bases and quals" in {
+    val path = makeTempFile("test.", ".fastq")
+    val out = new FastqWriterFactory().newWriter(path.toFile)
+    out.write(fq("q1", 0))
+    out.close()
+    val lines = IOUtil.slurpLines(path.toFile)
+    lines.get(1) should have length 0
+    lines.get(3) should have length 0
+  }
+
+
+  "FastqReader" should "read back a fastq file written by FastqWriter" in {
+    val path = makeTempFile("test.", ".fastq")
+    val out = new FastqWriterFactory().newWriter(path.toFile)
+    val recs = Seq(fq("q1", 50), fq("q2", 100), fq("q3", 150))
+    recs.foreach(rec => out.write(rec))
+    out.close()
+
+    val in = new FastqReader(path.toFile)
+    val recs2 = in.iterator().toList
+    in.close()
+    recs2 should contain theSameElementsInOrderAs recs
+  }
+
+  it should "throw an exception if the input fastq is garbled" in {
+    val fastq =
+      """
+        |@q1
+        |AACCGGTT
+        |+
+        |########
+        |@q2
+        |ACGT
+        |####
+      """.stripMargin.trim
+
+    val in = new FastqReader(null, new BufferedReader(new StringReader(fastq)))
+    an[Exception] shouldBe thrownBy { in.next() }
+  }
+
+  it should "throw an exception if the input file doesn't exist" in {
+    an[Exception] shouldBe thrownBy { new FastqReader(new File("/some/path/that/shouldnt/exist.fq"))}
+  }
+
+  it should "read an empty file just fine" in {
+    val path = makeTempFile("empty.", ".fastq")
+    val in = new FastqReader(path.toFile)
+    while (in.hasNext) in.next()
+    an[Exception] shouldBe thrownBy { in.next() }
+    in.close()
+  }
+
+  it should "honor skipBlankLines when requested" in {
+    val fastq =
+      """
+        |
+        |@SL-XBG:1:1:4:1663#0/2
+        |NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+        |+SL-XBG:1:1:4:1663#0/2
+        |BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+      """.stripMargin
+    val reader = new BufferedReader(new StringReader(fastq))
+    val in = new FastqReader(null, reader, true)
+    while (in.hasNext) in.next()
+  }
+
+  it should "fail on blank lines when skipBlankLines is false" in {
+    val fastq =
+      """
+        |
+        |@SL-XBG:1:1:4:1663#0/2
+        |NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+        |+SL-XBG:1:1:4:1663#0/2
+        |BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+      """.stripMargin
+    val reader = new BufferedReader(new StringReader(fastq))
+    an[SAMException] shouldBe thrownBy { val in = new FastqReader(null, reader, false) }
+  }
+
+  it should "fail on a truncated file" in {
+    val fastq =
+      """
+        |@q1
+        |AACCGGTT
+        |+
+        |########
+      """.stripMargin.trim
+
+    Range.inclusive(1, 3).foreach { n =>
+      val text   = fastq.lines.take(n).mkString("\n")
+      val reader = new BufferedReader(new StringReader(text))
+      an[Exception] shouldBe thrownBy { new FastqReader(null, reader).iterator().toSeq }
+    }
+  }
+
+  it should "fail if the seq and qual lines are different lengths" in {
+    val fastq =
+      """
+        |@q1
+        |AACC
+        |+
+        |########
+      """.stripMargin.trim
+
+    val reader = new BufferedReader(new StringReader(fastq))
+    an[Exception] shouldBe thrownBy { new FastqReader(null, reader).iterator().toSeq }
+  }
+
+  it should "fail if either header line is empty" in {
+    val fastq =
+      """
+        |@q1
+        |AACC
+        |+q1
+        |########
+      """.stripMargin.trim
+
+    val noSeqHeader  = new BufferedReader(new StringReader(fastq.replace("@q1", "")))
+    val noQualHeader = new BufferedReader(new StringReader(fastq.replace("+q1", "")))
+    an[Exception] shouldBe thrownBy { new FastqReader(noSeqHeader).iterator().toSeq }
+    an[Exception] shouldBe thrownBy { new FastqReader(noQualHeader).iterator().toSeq }
+  }
+
+}
diff --git a/src/test/scala/htsjdk/samtools/util/StringUtilTest.scala b/src/test/scala/htsjdk/samtools/util/StringUtilTest.scala
new file mode 100644
index 0000000..35957d6
--- /dev/null
+++ b/src/test/scala/htsjdk/samtools/util/StringUtilTest.scala
@@ -0,0 +1,134 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2017 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+package htsjdk.samtools.util
+
+import htsjdk.UnitSpec
+
+class StringUtilTest extends UnitSpec {
+  "StringUtil.split" should "behave like String.split(char)" in {
+    Seq("A:BB:C", "A:BB", "A:BB:", "A:BB:C:DDD", "A:", "A", "A:BB:C").foreach { s =>
+      val arr = new Array[String](10)
+      val count = StringUtil.split(s, arr, ':')
+      arr.take(count) shouldBe s.split(':')
+    }
+  }
+
+  "StringUtil.splitConcatenateExcessTokens" should "behave like String.split(regex, limit)" in {
+    Seq("A:BB:C", "A:BB", "A:BB:", "A:BB:C:DDD", "A:", "A", "A:BB:C:").foreach { s =>
+      val arr = new Array[String](3)
+      val count = StringUtil.splitConcatenateExcessTokens(s, arr, ':')
+      arr.take(count) shouldBe s.split(":", 3).filter(_.nonEmpty)
+    }
+  }
+
+  "StringUtil.join" should "join tokens with a separator" in {
+    StringUtil.join(",", 1, "hello", 'T') shouldBe "1,hello,T"
+    StringUtil.join(",") shouldBe ""
+  }
+
+  "StringUtil.hammingDistance" should "return zero for two empty sequences" in {
+      StringUtil.hammingDistance("", "") shouldBe 0
+  }
+
+  Seq(("ATAC", "GCAT", 3), ("ATAGC", "ATAGC", 0)).foreach { case (s1, s2, distance) =>
+      it should s"return distance $distance between $s1 and $s2" in {
+        StringUtil.hammingDistance(s1, s2) shouldBe distance
+      }
+  }
+
+  it should "be case sensitive" in {
+    StringUtil.hammingDistance("ATAC", "atac") shouldBe 4
+  }
+
+  it should "count Ns as matching when computing distance" in {
+    StringUtil.hammingDistance("nAGTN", "nAGTN") shouldBe 0
+  }
+
+  it should "throw an exception if two strings of different lengths are provided" in {
+    an[Exception] shouldBe thrownBy { StringUtil.hammingDistance("", "ABC")}
+    an[Exception] shouldBe thrownBy { StringUtil.hammingDistance("Abc", "wxyz")}
+  }
+
+  "StringUtil.isWithinHammingDistance" should "agree with StringUtil.hammingDistance" in {
+    Seq(("ATAC", "GCAT", 3), ("ATAC", "GCAT", 2), ("ATAC", "GCAT", 1), ("ATAC", "GCAT", 0)).foreach { case (s1, s2, within) =>
+        StringUtil.isWithinHammingDistance(s1, s2, within) shouldBe (StringUtil.hammingDistance(s1, s2) <= within)
+    }
+  }
+
+  it should "throw an exception if the two strings are of different lengths" in {
+    an[Exception] shouldBe thrownBy { StringUtil.isWithinHammingDistance("", "ABC", 2)}
+    an[Exception] shouldBe thrownBy { StringUtil.isWithinHammingDistance("Abc", "wxyz", 2)}
+  }
+
+  "StringUtil.toLowerCase(byte)" should "work just like Character.toLowerCase" in {
+    0 to 127 foreach {i => StringUtil.toLowerCase(i.toByte) shouldBe i.toChar.toLower.toByte }
+  }
+
+  "StringUtil.toUpperCase(byte)" should "work just like Character.toUpperCase" in {
+    0 to 127 foreach {i => StringUtil.toUpperCase(i.toByte) shouldBe i.toChar.toUpper.toByte }
+  }
+
+  "StringUtil.toUpperCase(byte[])" should "do upper case characters" in {
+    val seq = "atACgtaCGTgatcCAtATATgATtatgacNryuAN"
+    val bytes = seq.getBytes
+    StringUtil.toUpperCase(bytes)
+    bytes shouldBe seq.toUpperCase.getBytes
+  }
+
+  "StringUtil.assertCharactersNotInString" should "catch illegal characters" in {
+    an[Exception] shouldBe thrownBy {
+      StringUtil.assertCharactersNotInString("Hello World!", ' ', '!', '_')
+    }
+  }
+
+  it should "not fail when there are no illegal characters present" in {
+    StringUtil.assertCharactersNotInString("HelloWorld", ' ', '!', '_')
+  }
+
+  val textForWrapping: String =
+    """This is a little bit
+      |of text with nice short
+      |lines.
+    """.stripMargin.trim
+
+  "StringUtil.wordWrap" should "not wrap when lines are shorter than the given length" in {
+    StringUtil.wordWrap(textForWrapping, 50) shouldBe textForWrapping
+  }
+
+  it should "wrap text when lines are longer than length give" in {
+    val result = StringUtil.wordWrap(textForWrapping, 15)
+    result.lines.size shouldBe 5
+    result.lines.foreach(line => line.length should be <= 15)
+  }
+
+  "StringUtil.intValuesToString(int[])" should "generate a CSV string of ints" in {
+    val ints = Array[Int](1, 2, 3, 11, 22, 33, Int.MinValue, 0, Int.MaxValue)
+    StringUtil.intValuesToString(ints) shouldBe ints.mkString(", ")
+  }
+
+  "StringUtil.intValuesToString(short[])" should "generate a CSV string of ints" in {
+    val ints = Array[Short](1, 2, 3, 11, 22, 33, Short.MinValue, 0, Short.MaxValue)
+    StringUtil.intValuesToString(ints) shouldBe ints.mkString(", ")
+  }
+}

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/htsjdk.git



More information about the debian-med-commit mailing list